prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>issue-15689-1.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #[derive(PartialEq)] enum Test<'a> { Slice(&'a int) }<|fim▁hole|><|fim▁end|>
fn main() { assert!(Test::Slice(&1) == Test::Slice(&1)) }
<|file_name|>trait-static-method-generic-inference.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // Issue #3902. We are (at least currently) unable to infer `Self` // based on `T`, even though there is only a single impl, because of // the possibility of associated types and other things (basically: no // constraints on `Self` here at all). mod base {<|fim▁hole|> } pub struct Foo { dummy: (), } impl HasNew<Foo> for Foo { fn new() -> Foo { Foo { dummy: () } } } } pub fn foo() { let _f: base::Foo = base::HasNew::new(); //~^ ERROR type annotations required } fn main() { }<|fim▁end|>
pub trait HasNew<T> { fn new() -> T; fn dummy(&self) { }
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Jetlibs documentation build configuration file, created by # sphinx-quickstart on Wed Dec 23 16:22:13 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Jetlibs' copyright = u'2015, Marius Messerschmidt' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.0' # The full version, including alpha/beta/rc tags. release = '1.0' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True <|fim▁hole|># If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Jetlibsdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Jetlibs.tex', u'Jetlibs Documentation', u'Marius Messerschmidt', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'jetlibs', u'Jetlibs Documentation', [u'Marius Messerschmidt'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Jetlibs', u'Jetlibs Documentation', u'Marius Messerschmidt', 'Jetlibs', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # -- Options for Epub output ---------------------------------------------- # Bibliographic Dublin Core info. epub_title = u'Jetlibs' epub_author = u'Marius Messerschmidt' epub_publisher = u'Marius Messerschmidt' epub_copyright = u'2015, Marius Messerschmidt' # The basename for the epub file. It defaults to the project name. #epub_basename = u'Jetlibs' # The HTML theme for the epub output. Since the default themes are not optimized # for small screen space, using the same theme for HTML and epub output is # usually not wise. This defaults to 'epub', a theme designed to save visual # space. #epub_theme = 'epub' # The language of the text. It defaults to the language option # or en if the language is not set. #epub_language = '' # The scheme of the identifier. Typical schemes are ISBN or URL. #epub_scheme = '' # The unique identifier of the text. This can be a ISBN number # or the project homepage. #epub_identifier = '' # A unique identification for the text. #epub_uid = '' # A tuple containing the cover image and cover page html template filenames. #epub_cover = () # A sequence of (type, uri, title) tuples for the guide element of content.opf. #epub_guide = () # HTML files that should be inserted before the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_pre_files = [] # HTML files shat should be inserted after the pages created by sphinx. # The format is a list of tuples containing the path and title. #epub_post_files = [] # A list of files that should not be packed into the epub file. epub_exclude_files = ['search.html'] # The depth of the table of contents in toc.ncx. #epub_tocdepth = 3 # Allow duplicate toc entries. #epub_tocdup = True # Choose between 'default' and 'includehidden'. #epub_tocscope = 'default' # Fix unsupported image types using the PIL. #epub_fix_images = False # Scale large images. #epub_max_image_width = 0 # How to display URL addresses: 'footnote', 'no', or 'inline'. #epub_show_urls = 'inline' # If false, no index is generated. #epub_use_index = True<|fim▁end|>
<|file_name|>asset.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright (c) 2016, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils import flt, add_months, cint, nowdate, getdate from frappe.model.document import Document from erpnext.accounts.doctype.purchase_invoice.purchase_invoice import get_fixed_asset_account from erpnext.accounts.doctype.asset.depreciation \ import get_disposal_account_and_cost_center, get_depreciation_accounts class Asset(Document): def validate(self): self.status = self.get_status() self.validate_item() self.set_missing_values() self.validate_asset_values() self.make_depreciation_schedule() self.set_accumulated_depreciation() if self.get("schedules"): self.validate_expected_value_after_useful_life() # Validate depreciation related accounts get_depreciation_accounts(self) def on_submit(self): self.set_status() def on_cancel(self): self.validate_cancellation() self.delete_depreciation_entries() self.set_status() def validate_item(self): item = frappe.db.get_value("Item", self.item_code, ["is_fixed_asset", "is_stock_item", "disabled"], as_dict=1) if not item: frappe.throw(_("Item {0} does not exist").format(self.item_code)) elif item.disabled: frappe.throw(_("Item {0} has been disabled").format(self.item_code)) elif not item.is_fixed_asset: frappe.throw(_("Item {0} must be a Fixed Asset Item").format(self.item_code)) elif item.is_stock_item: frappe.throw(_("Item {0} must be a non-stock item").format(self.item_code)) def set_missing_values(self): if self.item_code: item_details = get_item_details(self.item_code) for field, value in item_details.items(): if not self.get(field): self.set(field, value) self.value_after_depreciation = (flt(self.gross_purchase_amount) - flt(self.opening_accumulated_depreciation)) def validate_asset_values(self): if flt(self.expected_value_after_useful_life) >= flt(self.gross_purchase_amount): frappe.throw(_("Expected Value After Useful Life must be less than Gross Purchase Amount")) if not flt(self.gross_purchase_amount): frappe.throw(_("Gross Purchase Amount is mandatory"), frappe.MandatoryError) if not self.is_existing_asset: self.opening_accumulated_depreciation = 0 self.number_of_depreciations_booked = 0 if not self.next_depreciation_date: frappe.throw(_("Next Depreciation Date is mandatory for new asset")) else: depreciable_amount = flt(self.gross_purchase_amount) - flt(self.expected_value_after_useful_life) if flt(self.opening_accumulated_depreciation) > depreciable_amount: frappe.throw(_("Opening Accumulated Depreciation must be less than equal to {0}") .format(depreciable_amount)) if self.opening_accumulated_depreciation: if not self.number_of_depreciations_booked: frappe.throw(_("Please set Number of Depreciations Booked")) else: self.number_of_depreciations_booked = 0 if cint(self.number_of_depreciations_booked) > cint(self.total_number_of_depreciations): frappe.throw(_("Number of Depreciations Booked cannot be greater than Total Number of Depreciations")) if self.next_depreciation_date and getdate(self.next_depreciation_date) < getdate(nowdate()): frappe.msgprint(_("Next Depreciation Date is entered as past date"), title=_('Warning'), indicator='red') if self.next_depreciation_date and getdate(self.next_depreciation_date) < getdate(self.purchase_date): frappe.throw(_("Next Depreciation Date cannot be before Purchase Date")) if (flt(self.value_after_depreciation) > flt(self.expected_value_after_useful_life) and not self.next_depreciation_date): frappe.throw(_("Please set Next Depreciation Date")) def make_depreciation_schedule(self): if self.depreciation_method != 'Manual': self.schedules = [] if not self.get("schedules") and self.next_depreciation_date: value_after_depreciation = flt(self.value_after_depreciation) number_of_pending_depreciations = cint(self.total_number_of_depreciations) - \ cint(self.number_of_depreciations_booked) if number_of_pending_depreciations: for n in xrange(number_of_pending_depreciations): schedule_date = add_months(self.next_depreciation_date, n * cint(self.frequency_of_depreciation)) depreciation_amount = self.get_depreciation_amount(value_after_depreciation) value_after_depreciation -= flt(depreciation_amount) self.append("schedules", { "schedule_date": schedule_date, "depreciation_amount": depreciation_amount }) def set_accumulated_depreciation(self): accumulated_depreciation = flt(self.opening_accumulated_depreciation) value_after_depreciation = flt(self.value_after_depreciation) for i, d in enumerate(self.get("schedules")): depreciation_amount = flt(d.depreciation_amount, d.precision("depreciation_amount")) value_after_depreciation -= flt(depreciation_amount) if i==len(self.get("schedules"))-1 and self.depreciation_method == "Straight Line": depreciation_amount += flt(value_after_depreciation - flt(self.expected_value_after_useful_life), d.precision("depreciation_amount")) d.depreciation_amount = depreciation_amount accumulated_depreciation += d.depreciation_amount d.accumulated_depreciation_amount = flt(accumulated_depreciation, d.precision("accumulated_depreciation_amount")) def get_depreciation_amount(self, depreciable_value): if self.depreciation_method in ("Straight Line", "Manual"): depreciation_amount = (flt(self.value_after_depreciation) - flt(self.expected_value_after_useful_life)) / (cint(self.total_number_of_depreciations) - cint(self.number_of_depreciations_booked)) else: factor = 200.0 / self.total_number_of_depreciations depreciation_amount = flt(depreciable_value * factor / 100, 0) value_after_depreciation = flt(depreciable_value) - depreciation_amount if value_after_depreciation < flt(self.expected_value_after_useful_life): depreciation_amount = flt(depreciable_value) - flt(self.expected_value_after_useful_life) return depreciation_amount def validate_expected_value_after_useful_life(self): accumulated_depreciation_after_full_schedule = \ max([d.accumulated_depreciation_amount for d in self.get("schedules")]) asset_value_after_full_schedule = (flt(self.gross_purchase_amount) - flt(accumulated_depreciation_after_full_schedule)) <|fim▁hole|> def validate_cancellation(self): if self.status not in ("Submitted", "Partially Depreciated", "Fully Depreciated"): frappe.throw(_("Asset cannot be cancelled, as it is already {0}").format(self.status)) if self.purchase_invoice: frappe.throw(_("Please cancel Purchase Invoice {0} first").format(self.purchase_invoice)) def delete_depreciation_entries(self): for d in self.get("schedules"): if d.journal_entry: frappe.get_doc("Journal Entry", d.journal_entry).cancel() d.db_set("journal_entry", None) self.db_set("value_after_depreciation", (flt(self.gross_purchase_amount) - flt(self.opening_accumulated_depreciation))) def set_status(self, status=None): '''Get and update status''' if not status: status = self.get_status() self.db_set("status", status) def get_status(self): '''Returns status based on whether it is draft, submitted, scrapped or depreciated''' if self.docstatus == 0: status = "Draft" elif self.docstatus == 1: status = "Submitted" if self.journal_entry_for_scrap: status = "Scrapped" elif flt(self.value_after_depreciation) <= flt(self.expected_value_after_useful_life): status = "Fully Depreciated" elif flt(self.value_after_depreciation) < flt(self.gross_purchase_amount): status = 'Partially Depreciated' elif self.docstatus == 2: status = "Cancelled" return status @frappe.whitelist() def make_purchase_invoice(asset, item_code, gross_purchase_amount, company, posting_date): pi = frappe.new_doc("Purchase Invoice") pi.company = company pi.currency = frappe.db.get_value("Company", company, "default_currency") pi.set_posting_time = 1 pi.posting_date = posting_date pi.append("items", { "item_code": item_code, "is_fixed_asset": 1, "asset": asset, "expense_account": get_fixed_asset_account(asset), "qty": 1, "price_list_rate": gross_purchase_amount, "rate": gross_purchase_amount }) pi.set_missing_values() return pi @frappe.whitelist() def make_sales_invoice(asset, item_code, company): si = frappe.new_doc("Sales Invoice") si.company = company si.currency = frappe.db.get_value("Company", company, "default_currency") disposal_account, depreciation_cost_center = get_disposal_account_and_cost_center(company) si.append("items", { "item_code": item_code, "is_fixed_asset": 1, "asset": asset, "income_account": disposal_account, "cost_center": depreciation_cost_center, "qty": 1 }) si.set_missing_values() return si @frappe.whitelist() def transfer_asset(args): import json args = json.loads(args) movement_entry = frappe.new_doc("Asset Movement") movement_entry.update(args) movement_entry.insert() movement_entry.submit() frappe.db.commit() frappe.msgprint(_("Asset Movement record {0} created").format("<a href='#Form/Asset Movement/{0}'>{0}</a>".format(movement_entry.name))) @frappe.whitelist() def get_item_details(item_code): asset_category = frappe.db.get_value("Item", item_code, "asset_category") if not asset_category: frappe.throw(_("Please enter Asset Category in Item {0}").format(item_code)) ret = frappe.db.get_value("Asset Category", asset_category, ["depreciation_method", "total_number_of_depreciations", "frequency_of_depreciation"], as_dict=1) ret.update({ "asset_category": asset_category }) return ret<|fim▁end|>
if self.expected_value_after_useful_life < asset_value_after_full_schedule: frappe.throw(_("Expected value after useful life must be greater than or equal to {0}") .format(asset_value_after_full_schedule))
<|file_name|>index.js<|end_file_name|><|fim▁begin|>/** * Module dependencies. */ var util = require('sails-util'), uuid = require('node-uuid'), path = require('path'),<|fim▁hole|> ConnectSession = require('express/node_modules/connect').middleware.session.Session; module.exports = function(sails) { ////////////////////////////////////////////////////////////////////////////// // TODO: // // All of this craziness can be replaced by making the socket.io interpreter // 100% connect-compatible (it's close!). Then, the connect cookie parser // can be used directly with Sails' simulated req and res objects. // ////////////////////////////////////////////////////////////////////////////// /** * Prototype for the connect session store wrapper used by the sockets hook. * Includes a save() method to persist the session data. */ function SocketIOSession(options) { var sid = options.sid, data = options.data; this.save = function(cb) { if (!sid) { sails.log.error('Trying to save session, but could not determine session ID.'); sails.log.error('This probably means a requesting socket did not send a cookie.'); sails.log.error('Usually, this happens when a socket from an old browser tab ' + ' tries to reconnect.'); sails.log.error('(this can also occur when trying to connect a cross-origin socket.)'); if (cb) cb('Could not save session.'); return; } // Merge data directly into instance to allow easy access on `req.session` later util.defaults(this, data); // Persist session Session.set(sid, sails.util.cloneDeep(this), function(err) { if (err) { sails.log.error('Could not save session:'); sails.log.error(err); } if (cb) cb(err); }); }; // Set the data on this object, since it will be used as req.session util.extend(this, options.data); } // Session hook var Session = { defaults: { session: { adapter: 'memory', key: "sails.sid" } }, /** * Normalize and validate configuration for this hook. * Then fold any modifications back into `sails.config` */ configure: function() { // Validate config // Ensure that secret is specified if a custom session store is used if (sails.config.session) { if (!util.isObject(sails.config.session)) { throw new Error('Invalid custom session store configuration!\n' + '\n' + 'Basic usage ::\n' + '{ session: { adapter: "memory", secret: "someVerySecureString", /* ...if applicable: host, port, etc... */ } }' + '\n\nCustom usage ::\n' + '{ session: { store: { /* some custom connect session store instance */ }, secret: "someVerySecureString", /* ...custom settings.... */ } }' ); } } // If session config is set, but secret is undefined, set a secure, one-time use secret if (!sails.config.session || !sails.config.session.secret) { sails.log.verbose('Session secret not defined-- automatically generating one for now...'); if (sails.config.environment === 'production') { sails.log.warn('Session secret must be identified!'); sails.log.warn('Automatically generating one for now...'); sails.log.error('This generated session secret is NOT OK for production!'); sails.log.error('It will change each time the server starts and break multi-instance deployments.'); sails.log.blank(); sails.log.error('To set up a session secret, add or update it in `config/session.js`:'); sails.log.error('module.exports.session = { secret: "keyboardcat" }'); sails.log.blank(); } sails.config.session.secret = generateSecret(); } // Backwards-compatibility / shorthand notation // (allow mongo or redis session stores to be specified directly) if (sails.config.session.adapter === 'redis') { sails.config.session.adapter = 'connect-redis'; } else if (sails.config.session.adapter === 'mongo') { sails.config.session.adapter = 'connect-mongo'; } }, /** * Create a connection to the configured session store * and keep it around * * @api private */ initialize: function(cb) { var sessionConfig = sails.config.session; // Intepret session adapter config and "new up" a session store if (util.isObject(sessionConfig) && !util.isObject(sessionConfig.store)) { // Unless the session is explicitly disabled, require the appropriate adapter if (sessionConfig.adapter) { // 'memory' is a special case if (sessionConfig.adapter === 'memory') { var MemoryStore = require('express').session.MemoryStore; sessionConfig.store = new MemoryStore(); } // Try and load the specified adapter from the local sails project, // or catch and return error: else { var COULD_NOT_REQUIRE_CONNECT_ADAPTER_ERR = function (adapter, packagejson, e) { var errMsg; if (e && typeof e === 'object' && e instanceof Error) { errMsg = e.stack; } else { errMsg = util.inspect(e); } var output = 'Could not load Connect session adapter :: ' + adapter + '\n'; if (packagejson && !packagejson.main) { output+='(If this is your module, make sure that the module has a "main" configuration in its package.json file)'; } output+='\nError from adapter:\n'+ errMsg+'\n\n'; // Recommend installation of the session adapter: output += 'Do you have the Connect session adapter installed in this project?\n'; output += 'Try running the following command in your project\'s root directory:\n'; var installRecommendation = 'npm install '; if (adapter === 'connect-redis') { installRecommendation += '[email protected]'; installRecommendation += '\n(Note that `[email protected]` introduced breaking changes- make sure you have v1.4.5 installed!)'; } else { installRecommendation += adapter; installRecommendation +='\n(Note: Make sure the version of the Connect adapter you install is compatible with Express 3/Sails v0.10)'; } installRecommendation += '\n'; output += installRecommendation; return output; }; try { // Determine the path to the adapter by using the "main" described in its package.json file: var pathToAdapterDependency; var pathToAdapterPackage = path.resolve(sails.config.appPath, 'node_modules', sessionConfig.adapter ,'package.json'); var adapterPackage; try { adapterPackage = require(pathToAdapterPackage); pathToAdapterDependency = path.resolve(sails.config.appPath, 'node_modules', sessionConfig.adapter, adapterPackage.main); } catch (e) { return cb(COULD_NOT_REQUIRE_CONNECT_ADAPTER_ERR(sessionConfig.adapter, adapterPackage, e)); } var SessionAdapter = require(pathToAdapterDependency); var CustomStore = SessionAdapter(require('express')); sessionConfig.store = new CustomStore(sessionConfig); } catch (e) { // TODO: negotiate error return cb(COULD_NOT_REQUIRE_CONNECT_ADAPTER_ERR(sessionConfig.adapter, adapterPackage, e)); } } } } // Save reference in `sails.session` sails.session = Session; return cb(); }, /** * Create a new sid and build an empty session for it. * * @param {Object} handshake - a socket "handshake" -- basically, this is like `req` * @param {Function} cb * @returns live session, with `id` property === new sid */ generate: function(handshake, cb) { // Generate a session object w/ sid // This is important since we need this sort of object as the basis for the data // we'll save directly into the session store. // (handshake is a pretend `req` object, and 2nd argument is cookie config) var session = new ConnectSession(handshake, { cookie: { // Prevent access from client-side javascript httpOnly: true, // Restrict to path path: '/' } }); // Next, persist the new session Session.set(session.id, session, function(err) { if (err) return cb(err); sails.log.verbose('Generated new session (', session.id, ') for socket....'); // Pass back final session object return cb(null, session); }); }, /** * @param {String} sessionId * @param {Function} cb * * @api private */ get: function(sessionId, cb) { if (!util.isFunction(cb)) { throw new Error('Invalid usage :: `Session.get(sessionId, cb)`'); } return sails.config.session.store.get(sessionId, cb); }, /** * @param {String} sessionId * @param {} data * @param {Function} [cb] - optional * * @api private */ set: function(sessionId, data, cb) { cb = util.optional(cb); return sails.config.session.store.set(sessionId, data, cb); }, /** * Create a session transaction * * Load the Connect session data using the sessionID in the socket.io handshake object * Mix-in the session.save() method for persisting the data back to the session store. * * Functionally equivalent to connect's sessionStore middleware. */ fromSocket: function(socket, cb) { // If a socket makes it here, even though its associated session is not specified, // it's authorized as far as the app is concerned, so no need to do that again. // Instead, use the cookie to look up the sid, and then the sid to look up the session data // If sid doesn't exit in socket, we have to do a little work first to get it // (or generate a new one-- and therefore a new empty session as well) if (!socket.handshake.sessionID && !socket.handshake.headers.cookie) { // If no cookie exists, generate a random one (this will create a new session!) var generatedCookie = sails.config.session.key + '=' + uuid.v1(); socket.handshake.headers.cookie = generatedCookie; sails.log.verbose('Could not fetch session, since connecting socket (', socket.id, ') has no cookie.'); sails.log.verbose('Is this a cross-origin socket..?)'); sails.log.verbose('Generated a one-time-use cookie:', generatedCookie); sails.log.verbose('This will result in an empty session, i.e. (req.session === {})'); // Convert cookie into `sid` using session secret // Maintain sid in socket so that the session can be queried before processing each incoming message socket.handshake.cookie = cookie.parse(generatedCookie); // Parse and decrypt cookie and save it in the socket.handshake socket.handshake.sessionID = parseSignedCookie(socket.handshake.cookie[sails.config.session.key], sails.config.session.secret); // Generate and persist a new session in the store Session.generate(socket.handshake, function(err, sessionData) { if (err) return cb(err); sails.log.silly('socket.handshake.sessionID is now :: ', socket.handshake.sessionID); // Provide access to adapter-agnostic `.save()` return cb(null, new SocketIOSession({ sid: sessionData.id, data: sessionData })); }); return; } try { // Convert cookie into `sid` using session secret // Maintain sid in socket so that the session can be queried before processing each incoming message socket.handshake.cookie = cookie.parse(socket.handshake.headers.cookie); // Parse and decrypt cookie and save it in the socket.handshake socket.handshake.sessionID = parseSignedCookie(socket.handshake.cookie[sails.config.session.key], sails.config.session.secret); } catch (e) { sails.log.error('Could not load session for socket #' + socket.id); sails.log.error('The socket\'s cookie could not be parsed into a sessionID.'); sails.log.error('Unless you\'re overriding the `authorization` function, make sure ' + 'you pass in a valid `' + sails.config.session.key + '` cookie'); sails.log.error('(or omit the cookie altogether to have a new session created and an ' + 'encrypted cookie sent in the response header to your socket.io upgrade request)'); return cb(e); } // If sid DOES exist, it's easy to look up in the socket var sid = socket.handshake.sessionID; // Cache the handshake in case it gets wiped out during the call to Session.get var handshake = socket.handshake; // Retrieve session data from store Session.get(sid, function(err, sessionData) { if (err) { sails.log.error('Error retrieving session from socket.'); return cb(err); } // sid is not known-- the session secret probably changed // Or maybe server restarted and it was: // (a) using an auto-generated secret, or // (b) using the session memory store // and so it doesn't recognize the socket's session ID. else if (!sessionData) { sails.log.verbose('A socket (' + socket.id + ') is trying to connect with an invalid or expired session ID (' + sid + ').'); sails.log.verbose('Regnerating empty session...'); Session.generate(handshake, function(err, sessionData) { if (err) return cb(err); // Provide access to adapter-agnostic `.save()` return cb(null, new SocketIOSession({ sid: sessionData.id, data: sessionData })); }); } // Otherwise session exists and everything is ok. // Instantiate SocketIOSession (provides .save() method) // And extend it with session data else return cb(null, new SocketIOSession({ data: sessionData, sid: sid })); }); } }; return Session; };<|fim▁end|>
generateSecret = require('./generateSecret'), cookie = require('express/node_modules/cookie'), parseSignedCookie = require('cookie-parser').signedCookie,
<|file_name|>setup.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Werkzeug ======== Werkzeug started as simple collection of various utilities for WSGI applications and has become one of the most advanced WSGI utility modules. It includes a powerful debugger, full featured request and response objects, HTTP utilities to handle entity tags, cache control headers, HTTP dates, cookie handling, file uploads, a powerful URL routing system and a bunch of community contributed addon modules. Werkzeug is unicode aware and doesn't enforce a specific template engine, database adapter or anything else. It doesn't even enforce a specific way of handling requests and leaves all that up to the developer. It's most useful for end user applications which should work on as many server environments as possible (such as blogs, wikis, bulletin boards, etc.). Details and example applications are available on the `Werkzeug website <http://werkzeug.pocoo.org/>`_. Features -------- - unicode awareness - request and response objects - various utility functions for dealing with HTTP headers such as `Accept` and `Cache-Control` headers. - thread local objects with proper cleanup at request end - an interactive debugger - A simple WSGI server with support for threading and forking with an automatic reloader. - a flexible URL routing system with REST support. - fully WSGI compatible Development Version ------------------- The Werkzeug development version can be installed by cloning the git repository from `github`_:: git clone [email protected]:mitsuhiko/werkzeug.git .. _github: http://github.com/mitsuhiko/werkzeug """ try: from setuptools import setup except ImportError: from distutils.core import setup setup( name='Werkzeug', version='0.10-dev', url='http://werkzeug.pocoo.org/', license='BSD', author='Armin Ronacher', author_email='[email protected]', description='The Swiss Army knife of Python web development', long_description=__doc__, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 3', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules' ], packages=['werkzeug', 'werkzeug.debug', 'werkzeug.contrib', 'werkzeug.testsuite', 'werkzeug.testsuite.contrib'], include_package_data=True, test_suite='werkzeug.testsuite.suite',<|fim▁hole|>)<|fim▁end|>
zip_safe=False, platforms='any'
<|file_name|>lifx_cloud.py<|end_file_name|><|fim▁begin|>""" Support for LIFX Cloud scenes. For more details about this component, please refer to the documentation at https://home-assistant.io/components/scene.lifx_cloud/ """ import asyncio import logging import voluptuous as vol import aiohttp import async_timeout from homeassistant.components.scene import Scene from homeassistant.const import (CONF_PLATFORM, CONF_TOKEN, CONF_TIMEOUT) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.aiohttp_client import (async_get_clientsession) _LOGGER = logging.getLogger(__name__) <|fim▁hole|>LIFX_API_URL = 'https://api.lifx.com/v1/{0}' DEFAULT_TIMEOUT = 10 PLATFORM_SCHEMA = vol.Schema({ vol.Required(CONF_PLATFORM): 'lifx_cloud', vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int, }) # pylint: disable=unused-argument @asyncio.coroutine def async_setup_platform(hass, config, async_add_devices, discovery_info=None): """Set up the scenes stored in the LIFX Cloud.""" token = config.get(CONF_TOKEN) timeout = config.get(CONF_TIMEOUT) headers = { "Authorization": "Bearer %s" % token, } url = LIFX_API_URL.format('scenes') try: httpsession = async_get_clientsession(hass) with async_timeout.timeout(timeout, loop=hass.loop): scenes_resp = yield from httpsession.get(url, headers=headers) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.exception("Error on %s", url) return False status = scenes_resp.status if status == 200: data = yield from scenes_resp.json() devices = [] for scene in data: devices.append(LifxCloudScene(hass, headers, timeout, scene)) async_add_devices(devices) return True elif status == 401: _LOGGER.error("Unauthorized (bad token?) on %s", url) return False _LOGGER.error("HTTP error %d on %s", scenes_resp.status, url) return False class LifxCloudScene(Scene): """Representation of a LIFX Cloud scene.""" def __init__(self, hass, headers, timeout, scene_data): """Initialize the scene.""" self.hass = hass self._headers = headers self._timeout = timeout self._name = scene_data["name"] self._uuid = scene_data["uuid"] @property def name(self): """Return the name of the scene.""" return self._name @asyncio.coroutine def async_activate(self): """Activate the scene.""" url = LIFX_API_URL.format('scenes/scene_id:%s/activate' % self._uuid) try: httpsession = async_get_clientsession(self.hass) with async_timeout.timeout(self._timeout, loop=self.hass.loop): yield from httpsession.put(url, headers=self._headers) except (asyncio.TimeoutError, aiohttp.ClientError): _LOGGER.exception("Error on %s", url)<|fim▁end|>
<|file_name|>array.js<|end_file_name|><|fim▁begin|>Astro.createValidator({ name: 'array', validate: _.isArray, events: { validationerror: function(e) { var fieldName = e.data.field; e.data.message = 'The "' + fieldName + '" field\'s value has to be an array';<|fim▁hole|>});<|fim▁end|>
} }
<|file_name|>core.py<|end_file_name|><|fim▁begin|>""" Core classes for the XBlock family. This code is in the Runtime layer, because it is authored once by edX and used by all runtimes. """ import inspect import pkg_resources import warnings from collections import defaultdict from xblock.exceptions import DisallowedFileError from xblock.fields import String, List, Scope from xblock.internal import class_lazy import xblock.mixins from xblock.mixins import ( ScopedStorageMixin, HierarchyMixin, RuntimeServicesMixin, HandlersMixin, XmlSerializationMixin, IndexInfoMixin, ViewsMixin, ) from xblock.plugin import Plugin from xblock.validation import Validation # exposing XML_NAMESPACES as a member of core, in order to avoid importing mixins where # XML_NAMESPACES are needed (e.g. runtime.py). XML_NAMESPACES = xblock.mixins.XML_NAMESPACES # __all__ controls what classes end up in the docs. __all__ = ['XBlock'] UNSET = object() class XBlockMixin(ScopedStorageMixin): """ Base class for XBlock Mixin classes. XBlockMixin classes can add new fields and new properties to all XBlocks created by a particular runtime. """ pass class SharedBlockBase(Plugin): """ Behaviors and attrs which all XBlock like things should share """ @classmethod def open_local_resource(cls, uri): """Open a local resource. The container calls this method when it receives a request for a resource on a URL which was generated by Runtime.local_resource_url(). It will pass the URI from the original call to local_resource_url() back to this method. The XBlock must parse this URI and return an open file-like object for the resource. For security reasons, the default implementation will return only a very restricted set of file types, which must be located in a folder called "public". XBlock authors who want to override this behavior will need to take care to ensure that the method only serves legitimate public resources. At the least, the URI should be matched against a whitelist regex to ensure that you do not serve an unauthorized resource. """ # Verify the URI is in whitelisted form before opening for serving. # URI must begin with public/, and no file path component can start # with a dot, which prevents ".." and ".hidden" files. if not uri.startswith("public/"): raise DisallowedFileError("Only files from public/ are allowed: %r" % uri) if "/." in uri: raise DisallowedFileError("Only safe file names are allowed: %r" % uri) return pkg_resources.resource_stream(cls.__module__, uri) # -- Base Block class XBlock(XmlSerializationMixin, HierarchyMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin, IndexInfoMixin, ViewsMixin, SharedBlockBase): """Base class for XBlocks. Derive from this class to create a new kind of XBlock. There are no required methods, but you will probably need at least one view. Don't provide the ``__init__`` method when deriving from this class. """ entry_point = 'xblock.v1' name = String(help="Short name for the block", scope=Scope.settings) tags = List(help="Tags for this block", scope=Scope.settings) @class_lazy def _class_tags(cls): # pylint: disable=no-self-argument """ Collect the tags from all base classes. """ class_tags = set() for base in cls.mro()[1:]: # pylint: disable=no-member class_tags.update(getattr(base, '_class_tags', set())) <|fim▁hole|> @staticmethod def tag(tags): """Returns a function that adds the words in `tags` as class tags to this class.""" def dec(cls): """Add the words in `tags` as class tags to this class.""" # Add in this class's tags cls._class_tags.update(tags.replace(",", " ").split()) # pylint: disable=protected-access return cls return dec @classmethod def load_tagged_classes(cls, tag, fail_silently=True): """ Produce a sequence of all XBlock classes tagged with `tag`. fail_silently causes the code to simply log warnings if a plugin cannot import. The goal is to be able to use part of libraries from an XBlock (and thus have it installed), even if the overall XBlock cannot be used (e.g. depends on Django in a non-Django application). There is diagreement about whether this is a good idea, or whether we should see failures early (e.g. on startup or first page load), and in what contexts. Hence, the flag. """ # Allow this method to access the `_class_tags` # pylint: disable=W0212 for name, class_ in cls.load_classes(fail_silently): if tag in class_._class_tags: yield name, class_ def __init__(self, runtime, field_data=None, scope_ids=UNSET, *args, **kwargs): """ Construct a new XBlock. This class should only be instantiated by runtimes. Arguments: runtime (:class:`.Runtime`): Use it to access the environment. It is available in XBlock code as ``self.runtime``. field_data (:class:`.FieldData`): Interface used by the XBlock fields to access their data from wherever it is persisted. Deprecated. scope_ids (:class:`.ScopeIds`): Identifiers needed to resolve scopes. """ if scope_ids is UNSET: raise TypeError('scope_ids are required') # Provide backwards compatibility for external access through _field_data super(XBlock, self).__init__(runtime=runtime, scope_ids=scope_ids, field_data=field_data, *args, **kwargs) def render(self, view, context=None): """Render `view` with this block's runtime and the supplied `context`""" return self.runtime.render(self, view, context) def validate(self): """ Ask this xblock to validate itself. Subclasses are expected to override this method, as there is currently only a no-op implementation. Any overriding method should call super to collect validation results from its superclasses, and then add any additional results as necessary. """ return Validation(self.scope_ids.usage_id) class XBlockAside(XmlSerializationMixin, ScopedStorageMixin, RuntimeServicesMixin, HandlersMixin, SharedBlockBase): """ This mixin allows Xblock-like class to declare that it provides aside functionality. """ entry_point = "xblock_asides.v1" @classmethod def aside_for(cls, view_name): """ A decorator to indicate a function is the aside view for the given view_name. Aside views should have a signature like: @XBlockAside.aside_for('student_view') def student_aside(self, block, context=None): ... return Fragment(...) """ # pylint: disable=protected-access def _decorator(func): # pylint: disable=missing-docstring if not hasattr(func, '_aside_for'): func._aside_for = [] func._aside_for.append(view_name) # pylint: disable=protected-access return func return _decorator @class_lazy def _combined_asides(cls): # pylint: disable=no-self-argument """ A dictionary mapping XBlock view names to the aside method that decorates them (or None, if there is no decorator for the specified view). """ # The method declares what views it decorates. We rely on `dir` # to handle subclasses and overrides. combined_asides = defaultdict(None) for _view_name, view_func in inspect.getmembers(cls, lambda attr: hasattr(attr, '_aside_for')): aside_for = getattr(view_func, '_aside_for', []) for view in aside_for: combined_asides[view] = view_func.__name__ return combined_asides def aside_view_declaration(self, view_name): """ Find and return a function object if one is an aside_view for the given view_name Aside methods declare their view provision via @XBlockAside.aside_for(view_name) This function finds those declarations for a block. Arguments: view_name (string): the name of the view requested. Returns: either the function or None """ if view_name in self._combined_asides: return getattr(self, self._combined_asides[view_name]) else: return None def needs_serialization(self): """ Return True if the aside has any data to serialize to XML. If all of the aside's data is empty or a default value, then the aside shouldn't be serialized as XML at all. """ return any([field.is_set_on(self) for field in self.fields.itervalues()]) # Maintain backwards compatibility import xblock.exceptions class KeyValueMultiSaveError(xblock.exceptions.KeyValueMultiSaveError): """ Backwards compatibility class wrapper around :class:`.KeyValueMultiSaveError`. """ def __init__(self, *args, **kwargs): warnings.warn("Please use xblock.exceptions.KeyValueMultiSaveError", DeprecationWarning, stacklevel=2) super(KeyValueMultiSaveError, self).__init__(*args, **kwargs) class XBlockSaveError(xblock.exceptions.XBlockSaveError): """ Backwards compatibility class wrapper around :class:`.XBlockSaveError`. """ def __init__(self, *args, **kwargs): warnings.warn("Please use xblock.exceptions.XBlockSaveError", DeprecationWarning, stacklevel=2) super(XBlockSaveError, self).__init__(*args, **kwargs)<|fim▁end|>
return class_tags
<|file_name|>LuaExecuteScriptTests.cpp<|end_file_name|><|fim▁begin|>/** * * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #define CATCH_CONFIG_MAIN #include <memory> #include <string> #include <set> #include "../TestBase.h" #include <ExecuteScript.h> #include "processors/LogAttribute.h" #include "processors/GetFile.h" #include "processors/PutFile.h" TEST_CASE("Lua: Test Log", "[executescriptLuaLog]") { // NOLINT TestController testController; LogTestController &logTestController = LogTestController::getInstance(); logTestController.setDebug<TestPlan>(); logTestController.setDebug<minifi::processors::LogAttribute>(); logTestController.setDebug<minifi::processors::ExecuteScript>(); auto plan = testController.createPlan(); auto getFile = plan->addProcessor("GetFile", "getFile"); auto executeScript = plan->addProcessor("ExecuteScript", "executeScript", core::Relationship("success", "description"), true); plan->setProperty(executeScript, processors::ExecuteScript::ScriptEngine.getName(), "lua"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptBody.getName(), R"( function onTrigger(context, session) log:info('hello from lua') end )"); char getFileDirFmt[] = "/tmp/ft.XXXXXX"; char *getFileDir = testController.createTempDirectory(getFileDirFmt); plan->setProperty(getFile, processors::GetFile::Directory.getName(), getFileDir); std::fstream file; std::stringstream ss; ss << getFileDir << "/" << "tstFile.ext"; file.open(ss.str(), std::ios::out); file << "tempFile"; file.close(); plan->reset(); testController.runSession(plan, false); testController.runSession(plan, false); REQUIRE(LogTestController::getInstance().contains( "[org::apache::nifi::minifi::processors::ExecuteScript] [info] hello from lua")); logTestController.reset(); } TEST_CASE("Lua: Test Read File", "[executescriptLuaRead]") { // NOLINT TestController testController; LogTestController &logTestController = LogTestController::getInstance(); logTestController.setDebug<TestPlan>(); logTestController.setDebug<minifi::processors::LogAttribute>(); logTestController.setDebug<minifi::processors::ExecuteScript>(); auto plan = testController.createPlan(); auto getFile = plan->addProcessor("GetFile", "getFile"); auto logAttribute = plan->addProcessor("LogAttribute", "logAttribute", core::Relationship("success", "description"), true); auto executeScript = plan->addProcessor("ExecuteScript", "executeScript", core::Relationship("success", "description"), true); auto putFile = plan->addProcessor("PutFile", "putFile", core::Relationship("success", "description"), true); plan->setProperty(executeScript, processors::ExecuteScript::ScriptEngine.getName(), "lua"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptBody.getName(), R"( read_callback = {} function read_callback.process(self, input_stream) content = input_stream:read() log:info('file content: ' .. content) return #content end function onTrigger(context, session) flow_file = session:get() if flow_file ~= nil then log:info('got flow file: ' .. flow_file:getAttribute('filename')) session:read(flow_file, read_callback) session:transfer(flow_file, REL_SUCCESS) end end )"); char getFileDirFmt[] = "/tmp/ft.XXXXXX"; char *getFileDir = testController.createTempDirectory(getFileDirFmt); plan->setProperty(getFile, processors::GetFile::Directory.getName(), getFileDir); char putFileDirFmt[] = "/tmp/ft.XXXXXX"; char *putFileDir = testController.createTempDirectory(putFileDirFmt); plan->setProperty(putFile, processors::PutFile::Directory.getName(), putFileDir); testController.runSession(plan, false); auto records = plan->getProvenanceRecords(); std::shared_ptr<core::FlowFile> record = plan->getCurrentFlowFile(); REQUIRE(record == nullptr); REQUIRE(records.empty()); std::fstream file; std::stringstream ss; ss << getFileDir << "/" << "tstFile.ext"; file.open(ss.str(), std::ios::out); file << "tempFile"; file.close(); plan->reset(); testController.runSession(plan, false); testController.runSession(plan, false); testController.runSession(plan, false); records = plan->getProvenanceRecords(); record = plan->getCurrentFlowFile(); testController.runSession(plan, false); unlink(ss.str().c_str()); REQUIRE(logTestController.contains("[info] file content: tempFile")); // Verify that file content was preserved REQUIRE(!std::ifstream(ss.str()).good()); std::stringstream movedFile; movedFile << putFileDir << "/" << "tstFile.ext"; REQUIRE(std::ifstream(movedFile.str()).good()); file.open(movedFile.str(), std::ios::in); std::string contents((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>()); REQUIRE("tempFile" == contents); file.close(); logTestController.reset(); } TEST_CASE("Lua: Test Write File", "[executescriptLuaWrite]") { // NOLINT TestController testController; LogTestController &logTestController = LogTestController::getInstance(); logTestController.setDebug<TestPlan>(); logTestController.setDebug<minifi::processors::LogAttribute>(); logTestController.setDebug<minifi::processors::ExecuteScript>(); auto plan = testController.createPlan(); auto getFile = plan->addProcessor("GetFile", "getFile"); auto logAttribute = plan->addProcessor("LogAttribute", "logAttribute", core::Relationship("success", "description"), true); auto executeScript = plan->addProcessor("ExecuteScript", "executeScript", core::Relationship("success", "description"), true); auto putFile = plan->addProcessor("PutFile", "putFile", core::Relationship("success", "description"), true); plan->setProperty(executeScript, processors::ExecuteScript::ScriptEngine.getName(), "lua"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptBody.getName(), R"( write_callback = {} function write_callback.process(self, output_stream) new_content = 'hello 2' output_stream:write(new_content) return #new_content end function onTrigger(context, session) flow_file = session:get() if flow_file ~= nil then log:info('got flow file: ' .. flow_file:getAttribute('filename')) session:write(flow_file, write_callback) session:transfer(flow_file, REL_SUCCESS) end end )"); char getFileDirFmt[] = "/tmp/ft.XXXXXX"; char *getFileDir = testController.createTempDirectory(getFileDirFmt); plan->setProperty(getFile, processors::GetFile::Directory.getName(), getFileDir); char putFileDirFmt[] = "/tmp/ft.XXXXXX"; char *putFileDir = testController.createTempDirectory(putFileDirFmt); plan->setProperty(putFile, processors::PutFile::Directory.getName(), putFileDir); testController.runSession(plan, false); auto records = plan->getProvenanceRecords(); std::shared_ptr<core::FlowFile> record = plan->getCurrentFlowFile(); REQUIRE(record == nullptr); REQUIRE(records.empty()); std::fstream file; std::stringstream ss; ss << getFileDir << "/" << "tstFile.ext"; file.open(ss.str(), std::ios::out); file << "tempFile"; file.close(); plan->reset(); testController.runSession(plan, false); testController.runSession(plan, false); testController.runSession(plan, false); records = plan->getProvenanceRecords(); record = plan->getCurrentFlowFile(); testController.runSession(plan, false); unlink(ss.str().c_str()); // Verify new content was written REQUIRE(!std::ifstream(ss.str()).good()); std::stringstream movedFile; movedFile << putFileDir << "/" << "tstFile.ext"; REQUIRE(std::ifstream(movedFile.str()).good()); file.open(movedFile.str(), std::ios::in); std::string contents((std::istreambuf_iterator<char>(file)), std::istreambuf_iterator<char>()); REQUIRE("hello 2" == contents); file.close(); logTestController.reset(); } TEST_CASE("Lua: Test Update Attribute", "[executescriptLuaUpdateAttribute]") { // NOLINT TestController testController; LogTestController &logTestController = LogTestController::getInstance(); logTestController.setDebug<TestPlan>(); logTestController.setDebug<minifi::processors::LogAttribute>(); logTestController.setDebug<minifi::processors::ExecuteScript>(); auto plan = testController.createPlan(); auto getFile = plan->addProcessor("GetFile", "getFile"); auto executeScript = plan->addProcessor("ExecuteScript", "executeScript", core::Relationship("success", "description"), true); auto logAttribute = plan->addProcessor("LogAttribute", "logAttribute", core::Relationship("success", "description"), true); plan->setProperty(executeScript, processors::ExecuteScript::ScriptEngine.getName(), "lua"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptBody.getName(), R"( function onTrigger(context, session) flow_file = session:get() if flow_file ~= nil then log:info('got flow file: ' .. flow_file:getAttribute('filename')) flow_file:addAttribute('test_attr', '1') attr = flow_file:getAttribute('test_attr') log:info('got flow file attr \'test_attr\': ' .. attr) flow_file:updateAttribute('test_attr', attr + 1) session:transfer(flow_file, REL_SUCCESS) end end )"); char getFileDirFmt[] = "/tmp/ft.XXXXXX"; char *getFileDir = testController.createTempDirectory(getFileDirFmt); plan->setProperty(getFile, processors::GetFile::Directory.getName(), getFileDir); std::fstream file; std::stringstream ss; ss << getFileDir << "/" << "tstFile.ext"; file.open(ss.str(), std::ios::out); file << "tempFile"; file.close(); plan->reset(); testController.runSession(plan, false); testController.runSession(plan, false); testController.runSession(plan, false); REQUIRE(LogTestController::getInstance().contains("key:test_attr value:2")); logTestController.reset(); } TEST_CASE("Lua: Test Create", "[executescriptLuaCreate]") { // NOLINT TestController testController; LogTestController &logTestController = LogTestController::getInstance(); logTestController.setDebug<TestPlan>(); logTestController.setDebug<minifi::processors::ExecuteScript>(); auto plan = testController.createPlan(); auto executeScript = plan->addProcessor("ExecuteScript", "executeScript"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptEngine.getName(), "lua"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptBody.getName(), R"( function onTrigger(context, session) flow_file = session:create(nil) if flow_file ~= nil then log:info('created flow file: ' .. flow_file:getAttribute('filename')) session:transfer(flow_file, REL_SUCCESS) end end<|fim▁hole|> testController.runSession(plan, false); REQUIRE(LogTestController::getInstance().contains("[info] created flow file:")); logTestController.reset(); } TEST_CASE("Lua: Test Require", "[executescriptLuaRequire]") { // NOLINT TestController testController; LogTestController &logTestController = LogTestController::getInstance(); logTestController.setDebug<TestPlan>(); logTestController.setDebug<minifi::processors::ExecuteScript>(); auto plan = testController.createPlan(); auto executeScript = plan->addProcessor("ExecuteScript", "executeScript"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptEngine.getName(), "lua"); plan->setProperty(executeScript, processors::ExecuteScript::ScriptBody.getName(), R"( require 'os' require 'coroutine' require 'math' require 'io' require 'string' require 'table' require 'package' log:info('OK') )"); plan->reset(); testController.runSession(plan, false); REQUIRE(LogTestController::getInstance().contains("[info] OK")); logTestController.reset(); }<|fim▁end|>
)"); plan->reset();
<|file_name|>test_institution_nodes_list.py<|end_file_name|><|fim▁begin|>from nose.tools import * from tests.base import ApiTestCase from tests.factories import InstitutionFactory, AuthUserFactory, NodeFactory from framework.auth import Auth from api.base.settings.defaults import API_BASE class TestInstitutionNodeList(ApiTestCase): def setUp(self): super(TestInstitutionNodeList, self).setUp() self.institution = InstitutionFactory() self.node1 = NodeFactory(is_public=True) self.node1.primary_institution = self.institution self.node1.save() self.user1 = AuthUserFactory() self.user2 = AuthUserFactory() self.node2 = NodeFactory(creator=self.user1, is_public=False) self.node2.primary_institution = self.institution self.node2.add_contributor(self.user2, auth=Auth(self.user1)) self.node2.save() self.node3 = NodeFactory(creator=self.user2, is_public=False) self.node3.primary_institution = self.institution self.node3.save() self.institution_node_url = '/{0}institutions/{1}/nodes/'.format(API_BASE, self.institution._id) def test_return_all_public_nodes(self): res = self.app.get(self.institution_node_url) assert_equal(res.status_code, 200)<|fim▁hole|> assert_in(self.node1._id, ids) assert_not_in(self.node2._id, ids) assert_not_in(self.node3._id, ids) def test_return_private_nodes_with_auth(self): res = self.app.get(self.institution_node_url, auth=self.user1.auth) assert_equal(res.status_code, 200) ids = [each['id'] for each in res.json['data']] assert_in(self.node1._id, ids) assert_in(self.node2._id, ids) assert_not_in(self.node3._id, ids) def test_return_private_nodes_mixed_auth(self): res = self.app.get(self.institution_node_url, auth=self.user2.auth) assert_equal(res.status_code, 200) ids = [each['id'] for each in res.json['data']] assert_in(self.node1._id, ids) assert_in(self.node2._id, ids) assert_in(self.node3._id, ids)<|fim▁end|>
ids = [each['id'] for each in res.json['data']]
<|file_name|>ext.py<|end_file_name|><|fim▁begin|>import json import logging from foxglove import glove from httpx import Response from .settings import Settings logger = logging.getLogger('ext') def lenient_json(v): if isinstance(v, (str, bytes)): try: return json.loads(v) except (ValueError, TypeError): pass return v class ApiError(RuntimeError): def __init__(self, method, url, status, response_text): self.method = method self.url = url self.status = status self.body = response_text def __str__(self): return f'{self.method} {self.url}, unexpected response {self.status}' class ApiSession: def __init__(self, root_url, settings: Settings): self.settings = settings self.root = root_url.rstrip('/') + '/' async def get(self, uri, *, allowed_statuses=(200,), **data) -> Response: return await self._request('GET', uri, allowed_statuses=allowed_statuses, **data) async def delete(self, uri, *, allowed_statuses=(200,), **data) -> Response: return await self._request('DELETE', uri, allowed_statuses=allowed_statuses, **data) async def post(self, uri, *, allowed_statuses=(200, 201), **data) -> Response: return await self._request('POST', uri, allowed_statuses=allowed_statuses, **data) async def put(self, uri, *, allowed_statuses=(200, 201), **data) -> Response: return await self._request('PUT', uri, allowed_statuses=allowed_statuses, **data) async def _request(self, method, uri, allowed_statuses=(200, 201), **data) -> Response: method, url, data = self._modify_request(method, self.root + str(uri).lstrip('/'), data) kwargs = {} headers = data.pop('headers_', None) if headers is not None: kwargs['headers'] = headers if timeout := data.pop('timeout_', None): kwargs['timeout'] = timeout r = await glove.http.request(method, url, json=data or None, **kwargs) if isinstance(allowed_statuses, int): allowed_statuses = (allowed_statuses,) if allowed_statuses != '*' and r.status_code not in allowed_statuses: data = { 'request_real_url': str(r.request.url), 'request_headers': dict(r.request.headers), 'request_data': data, 'response_headers': dict(r.headers), 'response_content': lenient_json(r.text), } logger.warning(<|fim▁hole|> r.status_code, extra={'data': data} if self.settings.verbose_http_errors else {}, ) raise ApiError(method, url, r.status_code, r.text) else: logger.debug('%s /%s -> %s', method, uri, r.status_code) return r def _modify_request(self, method, url, data): return method, url, data class Mandrill(ApiSession): def __init__(self, settings): super().__init__(settings.mandrill_url, settings) def _modify_request(self, method, url, data): data['key'] = self.settings.mandrill_key return method, url, data class MessageBird(ApiSession): def __init__(self, settings): super().__init__(settings.messagebird_url, settings) def _modify_request(self, method, url, data): data['headers_'] = {'Authorization': f'AccessKey {self.settings.messagebird_key}'} return method, url, data<|fim▁end|>
'%s unexpected response %s /%s -> %s', self.__class__.__name__, method, uri,
<|file_name|>Test_Kivy.py<|end_file_name|><|fim▁begin|>import os<|fim▁hole|>class TestApp(App): pass if __name__ == '__main__': TestApp().run()<|fim▁end|>
os.environ['KIVY_GL_BACKEND'] = 'gl' #need this to fix a kivy segfault that occurs with python3 for some reason from kivy.app import App
<|file_name|>binarysearch.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" ) func binSearch(searchspace []int, key int) int { var min, max int min = searchspace[0] max = searchspace[len(searchspace)-1] for { if max < min { return -1 } m := (min + max) / 2 if searchspace[m] < key { min = m + 1 } else if searchspace[m] > key {<|fim▁hole|> max = m - 1 } else { return m } } } func printSearchResult(a []int, key int) { fmt.Println("Binary Search:") index := binSearch(a, key) if index == -1 { fmt.Println("Search Space: ", a) fmt.Println(key, "was not found") } else { fmt.Println("Search Space: ", a) fmt.Println("a[", index, "] = ", a[index]) } fmt.Println("") } func main() { a := []int{0, 1, 2, 3, 4, 5, 6, 7, 8, 9} printSearchResult(a, 9) printSearchResult(a, 2) printSearchResult(a, 15) printSearchResult(a, 5) printSearchResult(a, 10) }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|><|fim▁end|>
"""Test package."""
<|file_name|>antecedent.ts<|end_file_name|><|fim▁begin|>/** * Created by Maazouza on 07/05/2017.<|fim▁hole|> idAnt: number; idDos: number; dateAnt: string; descriptionAnt: string; commentaireAnt: string; sujetAnt: string; dateCreationAnt: string; }<|fim▁end|>
*/ export class Antecedent {
<|file_name|>exceptions.py<|end_file_name|><|fim▁begin|>AuthorizedException = ( BufferError, ArithmeticError, AssertionError, AttributeError, EnvironmentError, EOFError, LookupError, MemoryError,<|fim▁hole|> RuntimeError, SystemError, TypeError, ValueError )<|fim▁end|>
ReferenceError,
<|file_name|>transaction_generator.cpp<|end_file_name|><|fim▁begin|>// Copyright (C) 2018 go-nebulas authors // // This file is part of the go-nebulas library. // // the go-nebulas library is free software: you can redistribute it and/or // modify // it under the terms of the GNU General Public License as published by<|fim▁hole|>// but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with the go-nebulas library. If not, see // <http://www.gnu.org/licenses/>. // #include "cmd/dummy_neb/generator/transaction_generator.h" transaction_generator::transaction_generator(all_accounts *accounts, generate_block *block, int new_account_num, int tx_num) : generator_base(accounts, block, new_account_num, tx_num), m_new_addresses(), m_last_used_address_index(0) {} transaction_generator::~transaction_generator() {} std::shared_ptr<corepb::Account> transaction_generator::gen_account() { auto ret = m_block->gen_user_account(100_nas); m_new_addresses.push_back(neb::to_address(ret->address())); return ret; } std::shared_ptr<corepb::Transaction> transaction_generator::gen_tx() { auto from_addr = neb::to_address(m_all_accounts->random_user_account()->address()); address_t to_addr; if (m_last_used_address_index < m_new_addresses.size()) { to_addr = m_new_addresses[m_last_used_address_index]; m_last_used_address_index++; } else { to_addr = neb::to_address(m_all_accounts->random_user_account()->address()); } return m_block->add_binary_transaction(from_addr, to_addr, 1_nas); } checker_tasks::task_container_ptr_t transaction_generator::gen_tasks() { return nullptr; }<|fim▁end|>
// the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // the go-nebulas library is distributed in the hope that it will be useful,
<|file_name|>map.js<|end_file_name|><|fim▁begin|>/*global $, google, InfoBox */ var simulation_manager = (function(){ google.maps.visualRefresh = true; var ua_is_mobile = navigator.userAgent.indexOf('iPhone') !== -1 || navigator.userAgent.indexOf('Android') !== -1; var config = (function(){ var params = {}; return { init: function() { $.ajax({ url: 'static/js/config.js', dataType: 'json', async: false, success: function(config_params) { params = config_params; // Override config with the QS params var url_parts = window.location.href.split('?'); if (url_parts.length === 2) { var qs_groups = url_parts[1].split('&'); $.each(qs_groups, function(index, qs_group){ var qs_parts = qs_group.split('='); var key = qs_parts[0]; // Backwards compatibility switch (key) { case 'zoom': key = 'zoom.start'; break; case 'x': key = 'center.x'; break; case 'y': key = 'center.y'; break; } var param_value = decodeURIComponent(qs_parts[1]); params[key] = param_value; }); } }, error: function(a) { alert('Broken JSON in static/js/config.js'); } }); }, getParam: function(key) { return (typeof params[key] === 'undefined') ? null : params[key]; } }; })(); var map = null; var simulation_vehicles = {}; var listener_helpers = (function(){ var listeners = {}; function notify(type) { if (typeof listeners[type] === 'undefined') { return; } $.each(listeners[type], function(i, fn){ fn(); }); } function subscribe(type, fn) { if (typeof listeners[type] === 'undefined') { listeners[type] = []; } listeners[type].push(fn); } return { notify: notify, subscribe: subscribe }; })(); var stationsPool = (function(){ var stations = {}; function get(id) { return (typeof stations[id]) === 'undefined' ? '' : stations[id].get('name'); } function location_get(id) { return (typeof stations[id]) === 'undefined' ? '' : stations[id].get('location'); } function addFeatures(features) { $.each(features, function(index, feature) { var station_id, station_name; if (typeof(feature.properties.stop_id) === 'undefined') { // Custom GeoJSON support station_id = feature.properties.station_id; station_name = feature.properties.name; } else { // GTFS support station_id = feature.properties.stop_id; station_name = feature.properties.stop_name; } var station = new google.maps.MVCObject(); station.set('name', station_name); var station_x = parseFloat(feature.geometry.coordinates[0]); var station_y = parseFloat(feature.geometry.coordinates[1]); station.set('location', new google.maps.LatLng(station_y, station_x)); stations[station_id] = station; }); } return { get: get, addFeatures: addFeatures, location_get: location_get }; })(); // Routes manager. // Roles: // - keep a reference for the routes between stations // i.e. (Zürich HB-Bern, Zürich HB-Olten, Olten-Bern) // Note: one route can contain one or more edges (the low-level entity in the simulation graph) // - interpolate position at given percent along a route var linesPool = (function() { var network_lines = {}; var routes = {}; var route_highlight = new google.maps.Polyline({ path: [], strokeColor: "white", strokeOpacity: 0.8, strokeWeight: 3, map: null, icons: [{ icon: { path: 'M 0,-2 0,2', strokeColor: 'black', strokeOpacity: 1.0 }, repeat: '40px' }], timer: null }); // TODO - that can be a nice feature request for google.maps.geometry lib function positionOnRouteAtPercentGet(ab_edges, percent) { function routeIsDetailedAtPercent() { for (var k=0; k<route.detailed_parts.length; k++) { if ((percent >= route.detailed_parts[k].start) && (percent < route.detailed_parts[k].end)) { return true; } } return false; } var route = routes[ab_edges]; var dAC = route.length*percent; var is_detailed = map_helpers.isDetailView() ? routeIsDetailedAtPercent() : false; var position_data = positionDataGet(route, dAC, is_detailed); if (position_data !== null) { position_data.is_detailed = is_detailed; } return position_data; } function routeAdd(ab_edges) { if (typeof routes[ab_edges] !== 'undefined') { return; } var edges = ab_edges.split(','); var routePoints = []; var dAB = 0; $.each(edges, function(k, edgeID) { if (edgeID.substr(0, 1) === '-') { edgeID = edgeID.substr(1); var points = network_lines[edgeID]?network_lines[edgeID].points.slice().reverse():[]; } else { var points = network_lines[edgeID]?network_lines[edgeID].points: []; } routePoints = routePoints.concat(points); dAB += network_lines[edgeID]? network_lines[edgeID].length: 0; }); var routeDetailedParts = []; var routeDetailedParts_i = 0; var is_detailed_prev = false; var dAC = 0; $.each(edges, function(k, edgeID) { if (edgeID.substr(0, 1) === '-') { edgeID = edgeID.substr(1); } var is_detailed = network_lines[edgeID] && network_lines[edgeID].is_detailed; if (is_detailed) { if (is_detailed_prev === false) { routeDetailedParts[routeDetailedParts_i] = { start: dAC / dAB, end: 1 }; } } else { if (is_detailed_prev) { routeDetailedParts[routeDetailedParts_i].end = dAC / dAB; routeDetailedParts_i += 1; } } is_detailed_prev = is_detailed; dAC += network_lines[edgeID] ? network_lines[edgeID].length :0; }); var route = { points: routePoints, length: dAB, detailed_parts: routeDetailedParts }; routes[ab_edges] = route; } function lengthGet(ab_edges) { return routes[ab_edges].length; } function routeHighlight(vehicle) { var points = []; if (vehicle.source === 'gtfs') { points = routes[vehicle.shape_id].points; } else { $.each(vehicle.edges, function(k, ab_edges){ if (k === 0) { return; } points = points.concat(routes[ab_edges].points); }); } route_highlight.setPath(points); route_highlight.setMap(map); var icon_offset = 0; route_highlight.set('timer', setInterval(function(){ if (icon_offset > 39) { icon_offset = 0; } else { icon_offset += 2; } var icons = route_highlight.get('icons'); icons[0].offset = icon_offset + 'px'; route_highlight.set('icons', icons); }, 20)); } function routeHighlightRemove() { route_highlight.setMap(null); clearInterval(route_highlight.get('timer')); } function loadEncodedEdges(edges) { $.each(edges, function(edge_id, encoded_edge) { network_lines[edge_id] = { points: google.maps.geometry.encoding.decodePath(encoded_edge), is_detailed: false }; }); } function loadGeoJSONEdges(features) { $.each(features, function(index, feature) { var edge_coords = []; $.each(feature.geometry.coordinates, function(i2, feature_coord){ edge_coords.push(new google.maps.LatLng(feature_coord[1], feature_coord[0])); }); var edge_id = feature.properties.edge_id || feature.properties.shape_id; network_lines[edge_id] = { points: edge_coords, is_detailed: feature.properties.detailed === 'yes', length: parseFloat(google.maps.geometry.spherical.computeLength(edge_coords).toFixed(3)) }; }); } function loadGeoJSONShapes(features) { $.each(features, function(index, feature) { var shape_id = feature.properties.shape_id; var points = []; $.each(feature.geometry.coordinates, function(i2, feature_coord){ points.push(new google.maps.LatLng(feature_coord[1], feature_coord[0])); }); var dAB = parseFloat(google.maps.geometry.spherical.computeLength(points).toFixed(3)); var route = { points: points, length: dAB, detailed_parts: [] }; routes[shape_id] = route; }); } function positionDataGet(route, dAC, is_detailed) { var dC = 0; for (var i=1; i<route.points.length; i++) { var pA = route.points[i-1]; var pB = route.points[i]; var d12 = google.maps.geometry.spherical.computeDistanceBetween(pA, pB); if ((dC + d12) > dAC) { var data = { position: google.maps.geometry.spherical.interpolate(pA, pB, (dAC - dC)/d12) }; if (is_detailed) { data.heading = google.maps.geometry.spherical.computeHeading(pA, pB); } return data; } dC += d12; } return null; } function projectDistanceAlongRoute(ab_edges, dAC) { var route = routes[ab_edges]; return positionDataGet(route, dAC, true); } return { positionGet: positionOnRouteAtPercentGet, routeAdd: routeAdd, lengthGet: lengthGet, routeHighlight: routeHighlight, routeHighlightRemove: routeHighlightRemove, loadEncodedEdges: loadEncodedEdges, loadGeoJSONEdges: loadGeoJSONEdges, loadGeoJSONShapes: loadGeoJSONShapes, projectDistanceAlongRoute: projectDistanceAlongRoute }; })(); // Time manager // Roles: // - manages the current number of seconds that passed since midnight var timer = (function(){ var timer_refresh = 100; var ts_midnight = null; var ts_now = null; var ts_minute = null; var seconds_multiply = null; function init() { (function(){ var d = new Date(); var hms = config.getParam('hms'); // Demo data is set for 9 AM if (config.getParam('api_paths.trips') === 'api/demo/trips.json') { hms = '09:00:00'; } if (hms !== null) { var hms_matches = hms.match(/^([0-9]{2}):([0-9]{2}):([0-9]{2})$/); if (hms_matches) { d.setHours(parseInt(hms_matches[1], 10)); d.setMinutes(parseInt(hms_matches[2], 10)); d.setSeconds(parseInt(hms_matches[3], 10)); } } ts_now = d.getTime() / 1000; d.setHours(0); d.setMinutes(0); d.setSeconds(0); d.setMilliseconds(0); ts_midnight = d.getTime() / 1000; })(); seconds_multiply = parseFloat($('#time_multiply').val()); $('#time_multiply').change(function(){ seconds_multiply = parseInt($(this).val(), 10); }); var timeContainer = $('#day_time'); function timeIncrement() { var d_now = new Date(ts_now * 1000); var ts_minute_new = d_now.getMinutes(); if (ts_minute !== ts_minute_new) { if (ts_minute !== null) { listener_helpers.notify('minute_changed'); } ts_minute = ts_minute_new; } timeContainer.text(getHMS()); ts_now += (timer_refresh / 1000) * seconds_multiply; setTimeout(timeIncrement, timer_refresh); } timeIncrement(); } function pad2Dec(what) { return (what < 10 ? '0' + what : what); } function getHMS(ts) { ts = ts || ts_now; var d = new Date(ts * 1000); var hours = pad2Dec(d.getHours()); var minutes = pad2Dec(d.getMinutes()); var seconds = pad2Dec(d.getSeconds()); return hours + ':' + minutes + ':' + seconds; } return { init: init, getTS: function(ts) { return ts_now; }, getHM: function(ts) { var hms = getHMS(ts); return hms.substring(0, 2) + ':' + hms.substring(3, 5); }, getTSMidnight: function() { return ts_midnight; }, getRefreshValue: function() { return timer_refresh; }, getHMS2TS: function(hms) { var hms_parts = hms.split(':'); var hours = parseInt(hms_parts[0], 10); var minutes = parseInt(hms_parts[1], 10); var seconds = parseInt(hms_parts[2], 10); return ts_midnight + hours * 3600 + minutes * 60 + seconds; } }; })(); var simulation_panel = (function(){ var selected_vehicle = null; function Toggler(el_id) { var el = $(el_id); el.attr('data-value-original', el.val()); var subscriber_types = { 'enable': [function(){ el.addClass('toggled'); el.val(el.attr('data-value-toggle')); }], 'disable': [function(){ el.removeClass('toggled'); el.val(el.attr('data-value-original')); }] }; this.subscriber_types = subscriber_types; el.click(function(){ var subscribers = el.hasClass('toggled') ? subscriber_types.disable : subscriber_types.enable; $.each(subscribers, function(index, fn){ fn(); }); }); } Toggler.prototype.subscribe = function(type, fn) { this.subscriber_types[type].push(fn); }; Toggler.prototype.trigger = function(type) { $.each(this.subscriber_types[type], function(index, fn){ fn(); }); }; var vehicle_follow = (function(){ listener_helpers.subscribe('map_init', function(){ function stop_following() { if (selected_vehicle === null) { return; } stop(); } google.maps.event.addListener(map, 'dragstart', stop_following); google.maps.event.addListener(map, 'click', stop_following); google.maps.event.addListener(map, 'dblclick', stop_following); }); var toggler; function init() { toggler = new Toggler('#follow_trigger'); toggler.subscribe('enable', function(){ selected_vehicle.marker.set('follow', 'yes-init'); }); toggler.subscribe('disable', function(){ if (selected_vehicle) { selected_vehicle.marker.set('follow', 'no'); } map.unbind('center'); }); } function start(vehicle) { selected_vehicle = vehicle; toggler.trigger('enable'); } function stop() { toggler.trigger('disable'); } return { init: init, start: start, stop: stop }; })(); var vehicle_route = (function(){ var toggler; function init() { toggler = new Toggler('#route_show_trigger'); toggler.subscribe('enable', function(){ linesPool.routeHighlight(selected_vehicle); }); toggler.subscribe('disable', function(){ linesPool.routeHighlightRemove(); }); } function hide() { toggler.trigger('disable'); } return { init: init, hide: hide }; })(); function station_info_hide() { $('#station_info').addClass('hidden'); } function vehicle_info_display(vehicle) { if ((selected_vehicle !== null) && (selected_vehicle.id === vehicle.id)) { if (selected_vehicle.marker.get('follow') === 'no') { vehicle_follow.start(selected_vehicle); } if (selected_vehicle.marker.get('follow') === 'yes') { vehicle_follow.stop(); } return; } selected_vehicle = vehicle; vehicle_follow.stop(); station_info_hide(); vehicle_route.hide(); $('.vehicle_name', $('#vehicle_info')).text(vehicle.name + ' (' + vehicle.id + ')'); var route_config = config.getParam('routes')[vehicle.route_icon]; if (route_config) { $('.vehicle_name', $('#vehicle_info')).css('background-color', route_config.route_color); $('.vehicle_name', $('#vehicle_info')).css('color', route_config.route_text_color); } var ts = timer.getTS(); var html_rows = []; $.each(vehicle.stations, function(index, stop_id) { var s_dep = (typeof vehicle.depS[index] === 'undefined') ? "n/a" : vehicle.depS[index]; var html_row = '<tr data-dep-sec="' + s_dep + '"><td>' + (index + 1) + '.</td>'; var station_location = stationsPool.location_get(stop_id); if (station_location === null) { html_row += '<td>' + stationsPool.get(stop_id) + '</td>'; } else { html_row += '<td><a href="#station_id=' + stop_id + '" data-station-id="' + stop_id + '">' + stationsPool.get(stop_id) + '</a></td>'; } var hm_arr = (typeof vehicle.arrS[index - 1] === 'undefined') ? '' : timer.getHM(vehicle.arrS[index - 1]); html_row += '<td>' + hm_arr + '</td>'; var hm_dep = (typeof vehicle.depS[index] === 'undefined') ? '' : timer.getHM(vehicle.depS[index]); html_row += '<td>' + hm_dep + '</td></tr>'; html_rows.push(html_row); }); $('#vehicle_timetable > tbody').html(html_rows.join('')); $('#vehicle_timetable tbody tr').each(function(){ var row_dep_sec = $(this).attr('data-dep-sec'); if (row_dep_sec === "n/a") { return; } if (row_dep_sec < ts) { $(this).addClass('passed'); } }); $('#vehicle_info').removeClass('hidden'); } function vehicle_info_hide() { vehicle_follow.stop(); vehicle_route.hide(); selected_vehicle = null; $('#vehicle_info').addClass('hidden'); } function station_info_display(station_id) { var hm = timer.getHM(); var url = config.getParam('api_paths.departures'); if (url === null) { return; } url = url.replace(/\[station_id\]/, station_id); url = url.replace(/\[hhmm\]/, hm.replace(':', '')); $.ajax({ url: url, dataType: 'json', success: function(vehicles) { vehicle_info_hide(); var html_rows = []; $.each(vehicles, function(index, vehicle) { var html_row = '<tr><td>' + (index + 1) + '.</td>'; if (typeof simulation_vehicles[vehicle.id] === 'undefined') { html_row += '<td>' + vehicle.name + '</td>'; } else { html_row += '<td><a href="#vehicle_id=' + vehicle.id + '" data-vehicle-id="' + vehicle.id + '">' + vehicle.name + '</a></td>'; } html_row += '<td>' + stationsPool.get(vehicle.st_b) + '</td>'; html_row += '<td>' + timer.getHM(vehicle.dep) + '</td>'; html_rows.push(html_row); }); $('#station_departures > tbody').html(html_rows.join('')); $('#station_info').removeClass('hidden'); $('.station_name', $('#station_info')).text(stationsPool.get(station_id)); } }); } function init() { vehicle_follow.init(); vehicle_route.init(); $(document).on("click", '#station_departures tbody tr a', function(){ var vehicle_id = $(this).attr('data-vehicle-id'); var vehicle = simulation_vehicles[vehicle_id]; simulation_panel.displayVehicle(vehicle); simulation_panel.followVehicle(vehicle); return false; }); $(document).on("click", '#vehicle_timetable tbody tr a', function(){ var station_id = $(this).attr('data-station-id'); var station_location = stationsPool.location_get(station_id); if (station_location === null) { return false; } map.setCenter(station_location); if (map.getZoom() < config.getParam('zoom.to_stops')) { map.setZoom(config.getParam('zoom.to_stops')); } vehicle_info_hide(); station_info_display(station_id); return false; }); (function(){ var location_el = $('#user_location'); var geolocation_marker = new google.maps.Marker({ icon: { url: 'static/images/geolocation-bluedot.png', size: new google.maps.Size(17, 17), origin: new google.maps.Point(0, 0), anchor: new google.maps.Point(8, 8) }, map: null, position: new google.maps.LatLng(0, 0) }); var geocoder = new google.maps.Geocoder(); function zoom_to_geometry(geometry) { if (geometry.viewport) { map.fitBounds(geometry.viewport); } else { map.setCenter(geometry.location); map.setZoom(15); } } $('#geolocation_click').click(function(){ if (navigator.geolocation) { location_el.val('...getting location'); navigator.geolocation.getCurrentPosition(function (position) { var latlng = new google.maps.LatLng(position.coords.latitude, position.coords.longitude); zoom_to_geometry({location: latlng}); geolocation_marker.setPosition(latlng); if (geolocation_marker.getMap() === null) { geolocation_marker.setMap(map); } geocoder.geocode({latLng: latlng}, function(results, status) { if (status === google.maps.GeocoderStatus.OK) { location_el.val(results[0].formatted_address); } }); }); } }); var autocomplete = new google.maps.places.Autocomplete($('#user_location')[0], { types: ['geocode'] }); autocomplete.bindTo('bounds', map); google.maps.event.addListener(autocomplete, 'place_changed', function(){ var place = autocomplete.getPlace(); if (place.geometry) { zoom_to_geometry(place.geometry); } else { geocoder.geocode({address: place.name}, function(results, status) { if (status === google.maps.GeocoderStatus.OK) { zoom_to_geometry(results[0].geometry); location_el.val(results[0].formatted_address); } }); } }); })(); $('input.panel_collapsible').click(function() { var panel_content = $(this).closest('div[data-type="panel"]').children('div.panel_content'); if ($(this).hasClass('expanded')) { $(this).removeClass('expanded'); panel_content.addClass('hidden'); } else { $(this).addClass('expanded'); panel_content.removeClass('hidden'); } }); } return { init: init, displayVehicle: vehicle_info_display, followVehicle: vehicle_follow.start, displayStation: station_info_display }; })(); var map_helpers = (function(){ var has_detail_view = false; var extended_bounds = null; function init(){ var mapStyles = [ { featureType: "poi.business", stylers: [ { visibility: "off" } ] },{ featureType: "road", elementType: "labels", stylers: [ { visibility: "off" } ] },{ featureType: "road", elementType: "labels", stylers: [ { visibility: "off" } ] },{ featureType: "road", elementType: "geometry", stylers: [ { visibility: "simplified" }, { lightness: 70 } ] },{ featureType: "transit.line", stylers: [ { visibility: "off" } ] },{ featureType: "transit.station.bus", stylers: [ { visibility: "off" } ] } ]; var map_inited = false; var map_options = { zoom: parseInt(config.getParam('zoom.start'), 10), center: new google.maps.LatLng(parseFloat(config.getParam('center.y')), parseFloat(config.getParam('center.x'))), mapTypeId: config.getParam('map_type_id'), styles: mapStyles, disableDefaultUI: true, zoomControl: true, scaleControl: true, streetViewControl: true, overviewMapControl: true, rotateControl: true, mapTypeControl: true, mapTypeControlOptions: { position: google.maps.ControlPosition.TOP_LEFT, mapTypeIds: [google.maps.MapTypeId.ROADMAP, google.maps.MapTypeId.TERRAIN, google.maps.MapTypeId.SATELLITE, 'stamen'] } }; if (config.getParam('tilt') !== null) { map_options.tilt = parseInt(config.getParam('tilt'), 10); } if (config.getParam('zoom.min') !== null) { map_options.minZoom = parseInt(config.getParam('zoom.min'), 10); } if (config.getParam('zoom.max') !== null) { map_options.maxZoom = parseInt(config.getParam('zoom.max'), 10); } map = new google.maps.Map(document.getElementById("map_canvas"), map_options); /* var kmlLayer = new google.maps.KmlLayer(); var kmlUrl = 'http://beta.ctweb.inweb.org.br/publico/stops.kml'; var kmlOptions = { //suppressInfoWindows: true, //preserveViewport: false, map: map, url: kmlUrl }; var kmlLayer = new google.maps.KmlLayer(kmlOptions); */ var stamen_map = new google.maps.StamenMapType('watercolor'); stamen_map.set('name', 'Stamen watercolor'); map.mapTypes.set('stamen', stamen_map); function map_layers_add(){ var edges_layer; var stations_layer; var ft_id; // Graph topology layers - EDGES ft_id = config.getParam('ft_layer_ids.topology_edges'); if (ft_id !== null) { edges_layer = new google.maps.FusionTablesLayer({ query: { select: 'geometry', from: ft_id }, clickable: false, map: map, styles: [ { polylineOptions: { strokeColor: "#FF0000", strokeWeight: 2 } },{ where: "type = 'tunnel'", polylineOptions: { strokeColor: "#FAAFBE", strokeWeight: 1.5 } } ] }); } // Graph topology layers - STATIONS ft_id = config.getParam('ft_layer_ids.topology_stations'); if (ft_id !== null) { stations_layer = new google.maps.FusionTablesLayer({ query: { select: 'geometry', from: ft_id }, suppressInfoWindows: true, map: map }); google.maps.event.addListener(stations_layer, 'click', function(ev){ var station_id = ev.row.id.value; simulation_panel.displayStation(station_id); }); } // GTFS layers - shapes.txt ft_id = config.getParam('ft_layer_ids.gtfs_shapes'); if (ft_id !== null) { edges_layer = new google.maps.FusionTablesLayer({ query: { select: 'geometry', from: ft_id }, clickable: false, map: map }); } // GTFS layers - stops.txt ft_id = config.getParam('ft_layer_ids.gtfs_stops'); if (ft_id !== null) { stations_layer = new google.maps.FusionTablesLayer({ query: { select: 'geometry', from: ft_id }, suppressInfoWindows: true, map: map }); google.maps.event.addListener(stations_layer, 'click', function(ev){ var station_id = ev.row.stop_id.value; simulation_panel.displayStation(station_id); }); } // Area mask ft_id = config.getParam('ft_layer_ids.mask'); if (ft_id !== null) { var layer = new google.maps.FusionTablesLayer({ query: { select: 'geometry', from: ft_id }, clickable: false, map: map }); } function trigger_toggleLayerVisibility() { if (config.getParam('debug') !== null) { console.log('Center: ' + map.getCenter().toUrlValue()); console.log('Zoom: ' + map.getZoom()); } function toggleLayerVisibility(layer, hide) { if ((typeof layer) === 'undefined') { return; } if (hide) { if (layer.getMap() !== null) { layer.setMap(null); } } else { if (layer.getMap() === null) { layer.setMap(map); } } } var map_type_id = map.getMapTypeId(); var is_satellite = (map_type_id === google.maps.MapTypeId.SATELLITE) && (map.getTilt() === 0); var config_preffix = is_satellite ? 'zoom.satellite' : 'zoom.roadmap'; var zoom = map.getZoom(); $.each(['stops', 'shapes'], function(k, layer_type){ var zoom_min = config.getParam(config_preffix + '.' + layer_type + '_min'); if (zoom_min === null) { zoom_min = 0; } var zoom_max = config.getParam(config_preffix + '.' + layer_type + '_max'); if (zoom_max === null) { zoom_max = 30; } var hide_layer = (zoom < zoom_min) || (zoom > zoom_max); if (layer_type === 'stops') { toggleLayerVisibility(stations_layer, hide_layer); } if (layer_type === 'shapes') { toggleLayerVisibility(edges_layer, hide_layer); } }); } google.maps.event.addListener(map, 'idle', trigger_toggleLayerVisibility); google.maps.event.addListener(map, 'maptypeid_changed', trigger_toggleLayerVisibility); trigger_toggleLayerVisibility(); } google.maps.event.addListener(map, 'idle', function() { if (map_inited === false) { // TODO - FIXME later ? // Kind of a hack, getBounds is ready only after a while since loading, so we hook in the 'idle' event map_inited = true; map_layers_add(); listener_helpers.notify('map_init'); function update_detail_view_state() { if (map.getMapTypeId() !== google.maps.MapTypeId.SATELLITE) { has_detail_view = false; return; } if (map.getZoom() < 17) { has_detail_view = false; return; } if (map.getTilt() !== 0) { has_detail_view = false; return; } has_detail_view = true; } google.maps.event.addListener(map, 'zoom_changed', update_detail_view_state); google.maps.event.addListener(map, 'tilt_changed', update_detail_view_state); google.maps.event.addListener(map, 'maptypeid_changed', update_detail_view_state); update_detail_view_state(); function update_extended_bounds() { var map_bounds = map.getBounds(); var bounds_point = map_bounds.getSouthWest(); var new_bounds_sw = new google.maps.LatLng(bounds_point.lat() - map_bounds.toSpan().lat(), bounds_point.lng() - map_bounds.toSpan().lng()); bounds_point = map_bounds.getNorthEast(); var new_bounds_ne = new google.maps.LatLng(bounds_point.lat() + map_bounds.toSpan().lat(), bounds_point.lng() + map_bounds.toSpan().lng()); extended_bounds = new google.maps.LatLngBounds(new_bounds_sw, new_bounds_ne); } google.maps.event.addListener(map, 'bounds_changed', update_extended_bounds); update_extended_bounds(); } }); } return { init: init, isDetailView: function() { return has_detail_view; }, getExtendedBounds: function() { return extended_bounds; } }; })(); // Vehicle helpers // Roles: // - check backend for new vehicles // - manages vehicle objects(class Vehicle) and animates them (see Vehicle.render method) var vehicle_helpers = (function(){ var vehicle_detect = (function(){ var track_vehicle_name = null; var track_vehicle_id = null; function match_by_name(vehicle_name) { if (track_vehicle_name === null) { return false; } vehicle_name = vehicle_name.replace(/[^A-Z0-9]/i, ''); if (track_vehicle_name !== vehicle_name) { return false; } return true; } function match(vehicle_name, vehicle_id) { if (track_vehicle_id === null) { track_vehicle_name = config.getParam('vehicle_name'); if (track_vehicle_name !== null) { track_vehicle_name = track_vehicle_name.replace(/[^A-Z0-9]/i, ''); } track_vehicle_id = config.getParam('vehicle_id'); } if (track_vehicle_id === vehicle_id) { return true; } return match_by_name(vehicle_name); } listener_helpers.subscribe('vehicles_load', function(){ if (config.getParam('action') !== 'vehicle_add') { return; } function str_hhmm_2_sec_ar(str_hhmm) { var sec_ar = []; $.each(str_hhmm.split('_'), function(index, hhmm){ var hhmm_matches = hhmm.match(/([0-9]{2})([0-9]{2})/); sec_ar.push(timer.getHMS2TS(hhmm_matches[1] + ':' + hhmm_matches[2] + ':00')); }); return sec_ar; } var station_ids = config.getParam('station_ids').split('_'); $.each(station_ids, function(index, station_id_s){ station_ids[index] = station_id_s; }); var vehicle_data = { arrs: str_hhmm_2_sec_ar(config.getParam('arrs')), deps: str_hhmm_2_sec_ar(config.getParam('deps')), id: 'custom_vehicle', name: decodeURIComponent(config.getParam('vehicle_name')), sts: station_ids, type: config.getParam('vehicle_type'), edges: [] }; var v = new Vehicle(vehicle_data); simulation_vehicles[vehicle_data.id] = v; v.render(); simulation_panel.displayVehicle(v); simulation_panel.followVehicle(v); }); return { match: match }; })(); // Vehicle icons manager. // Roles: // - keep a reference for each vehicle type (IC, ICE, etc..) var imagesPool = (function(){ var icons = {}; function iconGet(type) { if (icons[type]) { return icons[type]; } var routes_config = config.getParam('routes'); if ((typeof routes_config[type]) === 'undefined') { return null; } if (routes_config[type].icon === false) { return null; } var url = routes_config[type].icon; var icon = { url: url, size: new google.maps.Size(20, 20), origin: new google.maps.Point(0, 0), anchor: new google.maps.Point(10, 10) }; icons[type] = icon; return icon; } var vehicle_detail_base_zoom = 17; var vehicle_detail_config = { "s-bahn-rear": { base_zoom_width: 33, width: 228 }, "s-bahn-middle": { base_zoom_width: 33, width: 247 }, "s-bahn-front": { base_zoom_width: 33, width: 239 }, "s-bahn_old-rear": { base_zoom_width: 26, width: 153 }, "s-bahn_old-middle": { base_zoom_width: 35, width: 228 }, "s-bahn_old-front": { base_zoom_width: 35, width: 224 }, "ic-loco-c2": { base_zoom_width: 36, width: 225 }, "ic-coach": { base_zoom_width: 36, width: 254 }, "ic-loco": { base_zoom_width: 19, width: 126 }, "icn-rear": { base_zoom_width: 32, width: 207 }, "icn-middle": { base_zoom_width: 32, width: 218 }, "icn-front": { base_zoom_width: 32, width: 207 }, "ir-coach": { base_zoom_width: 32, width: 223 } }; var vehicle_detail_icons = {}; var service_parts = { s: { offsets: [-40, -13, 14, 41], vehicles: ['s-bahn-rear', 's-bahn-middle', 's-bahn-middle', 's-bahn-front'] }, sbahn_old: { offsets: [-39, -14, 15, 44], vehicles: ['s-bahn_old-rear', 's-bahn_old-middle', 's-bahn_old-middle', 's-bahn_old-front'] }, ic: { offsets: [-110, -87, -58, -29, 0, 29, 58, 87], vehicles: ['ic-loco', 'ic-coach', 'ic-coach', 'ic-coach', 'ic-coach', 'ic-coach', 'ic-coach', 'ic-loco-c2'] }, icn: { offsets: [-78, -52, -26, 0, 26, 52, 78], vehicles: ['icn-rear', 'icn-middle', 'icn-middle', 'icn-middle', 'icn-middle', 'icn-middle', 'icn-front'] }, ir: { offsets: [-93, -67, -41, -15, 11, 37, 63, 84], vehicles: ['ir-coach', 'ir-coach', 'ir-coach', 'ir-coach', 'ir-coach', 'ir-coach', 'ir-coach', 'ic-loco'] } }; function getVehicleIcon(zoom, type, heading) { var key = zoom + '_' + type + '_' + heading; if (typeof vehicle_detail_icons[key] === 'undefined') { var original_width = vehicle_detail_config[type].width; var icon_width = vehicle_detail_config[type].base_zoom_width * Math.pow(2, parseInt(zoom - vehicle_detail_base_zoom, 10)); var base_url = 'http://static.vasile.ch/simcity/service-vehicle-detail'; var icon = { url: base_url + '/' + type + '/' + heading + '.png', size: new google.maps.Size(original_width, original_width), origin: new google.maps.Point(0, 0), scaledSize: new google.maps.Size(icon_width, icon_width), anchor: new google.maps.Point(parseInt(icon_width/2, 10), parseInt(icon_width/2, 10)) }; vehicle_detail_icons[key] = icon; } return vehicle_detail_icons[key]; } return { iconGet: iconGet, getServicePartsConfig: function(key) { if ((typeof service_parts[key]) === 'undefined') { key = 's'; } return service_parts[key]; }, getVehicleIcon: getVehicleIcon }; })(); var vehicle_ib = new InfoBox({ disableAutoPan: true, pixelOffset: new google.maps.Size(10, 10), vehicle_id: 0, closeBoxURL: '' }); var vehicleIDs = []; function Vehicle(params) { function parseTimes(times) { var time_ar = []; $.each(times, function(k, time){ // 32855 = 9 * 3600 + 7 * 60 + 35 if ((typeof time) === 'number') { if (time < (2 * 24 * 3600)) { time += timer.getTSMidnight(); } time_ar.push(time); return; } // 09:07:35 if (time.match(/^[0-9]{2}:[0-9]{2}:[0-9]{2}$/) !== null) { time = timer.getHMS2TS(time); time_ar.push(time); return; } // 09:07 if (time.match(/^[0-9]{2}:[0-9]{2}$/) !== null) { var hms = time + ':00'; time = timer.getHMS2TS(hms); time_ar.push(time); return; } }); return time_ar; } if ((typeof params.trip_id) === 'undefined') { this.source = 'custom'; this.id = params.id; this.name = params.name; this.stations = params.sts; this.edges = params.edges; this.depS = parseTimes(params.deps); this.arrS = parseTimes(params.arrs); this.route_icon = params.type; this.service_type = params.service_type; $.each(params.edges, function(k, edges) { if (k === 0) { return; } linesPool.routeAdd(edges); }); } else { // GTFS approach this.source = 'gtfs'; this.id = params.trip_id; this.name = params.route_short_name; this.service_type = ''; this.edges = []; this.shape_id = params.shape_id; var departures = []; var arrivals = []; var stations = []; var shape_percent = []; $.each(params.stops, function(k, stop){ if (k < (params.stops.length - 1)) { departures.push(stop.departure_time); } if (k > 0) { arrivals.push(stop.arrival_time); } stations.push(stop.stop_id); shape_percent.push(stop.stop_shape_percent); }); this.stations = stations; this.depS = parseTimes(departures); this.arrS = parseTimes(arrivals); this.shape_percent = shape_percent; this.route_icon = params.type; } var marker = new google.maps.Marker({ position: new google.maps.LatLng(0, 0), map: null, speed: null, status: 'not on map' }); var icon = imagesPool.iconGet(this.route_icon); if (icon !== null) { marker.setIcon(icon); } this.marker = marker; this.detail_markers = []; // TODO - FIXME .apply var that = this; google.maps.event.addListener(marker, 'click', function() { simulation_panel.displayVehicle(that); }); this.mouseOverMarker = function() { if (map.getZoom() < config.getParam('zoom.vehicle_mouseover_min')) { return; } if (vehicle_ib.get('vehicle_id') === that.id) { return; } vehicle_ib.set('vehicle_id', that.id); vehicle_ib.close(); var popup_div = $('#vehicle_popup'); $('span.vehicle_name', popup_div).text(that.name); var route_config = config.getParam('routes')[that.route_icon]; if (route_config) { $('span.vehicle_name', popup_div).css('background-color', route_config.route_color); $('span.vehicle_name', popup_div).css('color', route_config.route_text_color); } $('.status', popup_div).html(marker.get('status')); vehicle_ib.setContent($('#vehicle_popup_container').html()); vehicle_ib.open(map, this); }; this.mouseOutMarker = function() { vehicle_ib.set('vehicle_id', null); vehicle_ib.close(); }; google.maps.event.addListener(marker, 'mouseover', this.mouseOverMarker); google.maps.event.addListener(marker, 'mouseout', this.mouseOutMarker); if (vehicle_detect.match(this.name, this.id)) { simulation_panel.displayVehicle(this); simulation_panel.followVehicle(this); } } Vehicle.prototype.render = function() { // TODO - FIXME .apply var that = this; function animate() { var ts = timer.getTS(); var vehicle_position = null; var route_percent = 0; var d_AC = 0; var animation_timeout = 1000; for (var i=0; i<that.arrS.length; i++) { if (ts < that.arrS[i]) { var station_a = that.stations[i]; var station_b = that.stations[i+1]; var route_id = (that.source === 'gtfs') ? that.shape_id : that.edges[i+1]; var speed = that.marker.get('speed'); if (ts > that.depS[i]) { var routeLength = linesPool.lengthGet(route_id); // Vehicle is in motion between two stations if ((speed === 0) || (speed === null)) { var trackLength = routeLength; if (that.source === 'gtfs') { trackLength = routeLength * (that.shape_percent[i+1] - that.shape_percent[i]) / 100; } var speed = trackLength * 0.001 * 3600 / (that.arrS[i] - that.depS[i]); that.marker.set('speed', parseInt(speed, 10)); that.marker.set('status', 'Heading to ' + stationsPool.get(station_b) + '(' + timer.getHM(that.arrS[i]) + ')<br/>Speed: ' + that.marker.get('speed') + ' km/h'); } route_percent = (ts - that.depS[i])/(that.arrS[i] - that.depS[i]); if (that.source === 'gtfs') { route_percent = (that.shape_percent[i] + route_percent * (that.shape_percent[i+1] - that.shape_percent[i])) / 100; } d_AC = routeLength * route_percent; } else { // Vehicle is in a station if ((speed !== 0) || (speed === null)) { that.marker.set('status', 'Departing ' + stationsPool.get(station_a) + ' at ' + timer.getHM(that.depS[i])); that.marker.set('speed', 0); } if (that.source === 'gtfs') { route_percent = that.shape_percent[i] / 100; } } var vehicle_position_data = linesPool.positionGet(route_id, route_percent); if (vehicle_position_data === null) { break; } var vehicle_position = vehicle_position_data.position; if (that.marker.get('follow') === 'yes-init') { that.marker.set('follow', 'yes'); map.panTo(vehicle_position); if (map.getZoom() < config.getParam('zoom.vehicle_follow')) { map.setZoom(config.getParam('zoom.vehicle_follow')); } map.setMapTypeId(google.maps.MapTypeId.SATELLITE); map.bindTo('center', that.marker, 'position'); } that.updateIcon(vehicle_position_data, d_AC, i); if (map.getZoom() >= 12) { animation_timeout = timer.getRefreshValue(); } setTimeout(animate, animation_timeout); break; } } // end arrivals loop if (vehicle_position === null) { that.marker.setMap(null); delete simulation_vehicles[that.id]; } } animate(); }; Vehicle.prototype.updateIcon = function(data, d_AC, i) { var service_parts = imagesPool.getServicePartsConfig(this.service_type); var render_in_detail = data.is_detailed && (service_parts !== null); var vehicle_position = data.position; this.marker.setPosition(data.position); if (render_in_detail) { if (this.marker.getMap() !== null) { this.marker.setMap(null); } if (map_helpers.getExtendedBounds().contains(vehicle_position)) { var that = this; $.each(service_parts.offsets, function(k, offset){ if ((typeof that.detail_markers[k]) === 'undefined') { that.detail_markers[k] = new google.maps.Marker({ map: null }); var marker = that.detail_markers[k]; google.maps.event.addListener(marker, 'mouseover', that.mouseOverMarker); google.maps.event.addListener(marker, 'mouseout', that.mouseOutMarker); google.maps.event.addListener(marker, 'click', function(){ simulation_panel.displayVehicle(that); }); } var marker = that.detail_markers[k]; var route = that.edges[i+1]; var route_length = linesPool.lengthGet(route); var d_AC_new = d_AC + offset; if ((d_AC + offset) > route_length) { d_AC_new -= route_length; route = that.edges[i+2]; } var position_data = linesPool.projectDistanceAlongRoute(route, d_AC_new); if (position_data === null) { marker.setMap(null); return; } var heading = parseInt(position_data.heading, 10); if (heading < 0) { heading += 360; } heading = ('00' + heading).slice(-3); var zoom = map.getZoom(); var icon = imagesPool.getVehicleIcon(zoom, service_parts.vehicles[k], heading); if (((typeof marker.getIcon()) === 'undefined') || (marker.getIcon().url !== icon.url) || (marker.get('zoom') !== zoom)) { marker.setIcon(icon); marker.set('zoom', map.getZoom()); } marker.setPosition(position_data.position); if (marker.getMap() === null) { marker.setMap(map); } }); } else { $.each(this.detail_markers, function(k, marker){ marker.setMap(null); }); this.detail_markers = []; } } else { $.each(this.detail_markers, function(k, marker){ marker.setMap(null); }); this.detail_markers = []; if (map.getBounds().contains(vehicle_position)) { if (this.marker.getMap() === null) { this.marker.setMap(map); } } else { if (this.marker.getMap() !== null) { this.marker.setMap(null); } } } }; return { load: function() { var hm = timer.getHM(); var url = config.getParam('api_paths.trips'); url = url.replace(/\[hhmm\]/, hm.replace(':', '')); $.ajax({ url: url, dataType: 'json', success: function(vehicles) { $.each(vehicles, function(index, data) { var vehicle_id = ((typeof data.trip_id) === 'undefined') ? data.id : data.trip_id; if ((typeof simulation_vehicles[vehicle_id]) !== 'undefined') { return; } var v = new Vehicle(data); v.render(); simulation_vehicles[vehicle_id] = v; });<|fim▁hole|> } }; })(); listener_helpers.subscribe('map_init', function(){ function loadStations(url) { if (url === null) { return; } $.ajax({ url: url, dataType: 'json', success: function(geojson) { if (typeof(geojson.features) === 'undefined') { console.log('Malformed GeoJSON. URL: ' + url); return; } stationsPool.addFeatures(geojson.features); vehicle_helpers.load(); listener_helpers.subscribe('minute_changed', vehicle_helpers.load); }, error: function(jqXHR, textStatus, errorThrown) { console.log('Error from server ' + textStatus + ' for url: ' + url); } }); } // GTFS approach var url = config.getParam('geojson.gtfs_shapes'); if (url !== null) { $.ajax({ url: url, dataType: 'json', async: false, success: function(geojson) { if (typeof(geojson.features) === 'undefined') { console.log('Malformed GeoJSON. URL: ' + url); } else { linesPool.loadGeoJSONShapes(geojson.features); } }, error: function(jqXHR, textStatus, errorThrown) { console.log('Error from server ' + textStatus + ' for url: ' + url); } }); } loadStations(config.getParam('geojson.gtfs_stops')); // Custom topology approach var url = config.getParam('geojson.topology_edges'); if (url !== null) { $.ajax({ url: url, dataType: 'json', async: false, success: function(geojson) { if (typeof(geojson.features) === 'undefined') { console.log('Malformed GeoJSON. URL: ' + url); } else { linesPool.loadGeoJSONEdges(geojson.features); } }, error: function(jqXHR, textStatus, errorThrown) { console.log('Error from server ' + textStatus + ' for url: ' + url); } }); } loadStations(config.getParam('geojson.topology_stations')); }); function ui_init() { var view_mode = config.getParam('view_mode'); var panel_display = (ua_is_mobile === false) && (view_mode !== 'iframe'); if (panel_display) { $('#panel').removeClass('hidden'); } var time_multiply = config.getParam('time_multiply'); if (time_multiply !== null) { $('#time_multiply').val(time_multiply); } } return { init: function(){ config.init(); ui_init(); timer.init(); map_helpers.init(); simulation_panel.init(); }, getMap: function(){ return map; } }; })(); $(document).ready(simulation_manager.init);<|fim▁end|>
listener_helpers.notify('vehicles_load'); } });
<|file_name|>__manifest__.py<|end_file_name|><|fim▁begin|>{ "name": "website_sale_birthdate",<|fim▁hole|> "category": "eCommerce", "vesion": "13.0.1.0", "depends": ["website_sale", "partner_person"], "data": ["views.xml"], "installable": False, }<|fim▁end|>
"author": "IT-Projects LLC, Ivan Yelizariev", "license": "Other OSI approved licence", # MIT "support": "[email protected]", "website": "https://yelizariev.github.io",
<|file_name|>altered_template.py<|end_file_name|><|fim▁begin|>import sys # If our base template isn't on the PYTHONPATH already, we need to do this: sys.path.append('../path/to/base/templates') import basetemplate<|fim▁hole|> def add_resources(self): self.add_bucket() def add_bucket(self): """This will add a bucket using the base template, and then add a custom CORS configuration to it.""" super(AlteredTemplate, self).add_bucket() self.resources['StaticFiles']['Properties']['CorsConfiguration'] = { 'CorsRules': [ { 'AllowedHeaders': ['*'], 'AllowedMethods': ['GET'], 'AllowedOrigins': ['*'], } ] } cft = AlteredTemplate("S3 Bucket Project", options) cft.add_resources()<|fim▁end|>
class AlteredTemplate(basetemplate.BaseTemplate): """This project only needs an S3 bucket, but no EC2 server."""
<|file_name|>pivot_table.js<|end_file_name|><|fim▁begin|>import dt from 'datatables.net-bs'; import 'datatables.net-bs/css/dataTables.bootstrap.css'; import $ from 'jquery'; import { d3format, fixDataTableBodyHeight } from '../javascripts/modules/utils';<|fim▁hole|>import './pivot_table.css'; dt(window, $); module.exports = function (slice, payload) { const container = slice.container; const fd = slice.formData; const height = container.height(); let cols = payload.data.columns; if (Array.isArray(cols[0])) { cols = cols.map(col => col[0]); } // payload data is a string of html with a single table element container.html(payload.data.html); // jQuery hack to set verbose names in headers const replaceCell = function () { const s = $(this)[0].textContent; $(this)[0].textContent = slice.datasource.verbose_map[s] || s; }; slice.container.find('thead tr:first th').each(replaceCell); slice.container.find('thead tr th:first-child').each(replaceCell); // jQuery hack to format number slice.container.find('tbody tr').each(function () { $(this).find('td').each(function (i) { const metric = cols[i]; const format = slice.datasource.column_formats[metric] || fd.number_format || '.3s'; const tdText = $(this)[0].textContent; if (!isNaN(tdText) && tdText !== '') { $(this)[0].textContent = d3format(format, tdText); } }); }); if (fd.groupby.length === 1) { // When there is only 1 group by column, // we use the DataTable plugin to make the header fixed. // The plugin takes care of the scrolling so we don't need // overflow: 'auto' on the table. container.css('overflow', 'hidden'); const table = container.find('table').DataTable({ paging: false, searching: false, bInfo: false, scrollY: `${height}px`, scrollCollapse: true, scrollX: true, }); table.column('-1').order('desc').draw(); fixDataTableBodyHeight(container.find('.dataTables_wrapper'), height); } else { // When there is more than 1 group by column we just render the table, without using // the DataTable plugin, so we need to handle the scrolling ourselves. // In this case the header is not fixed. container.css('overflow', 'auto'); container.css('height', `${height + 10}px`); } };<|fim▁end|>
<|file_name|>util.py<|end_file_name|><|fim▁begin|>############################################################################## # # Copyright Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED<|fim▁hole|># WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import time class Wait(object): class TimeOutWaitingFor(Exception): "A test condition timed out" timeout = 9 wait = .01 def __init__(self, timeout=None, wait=None, exception=None, getnow=(lambda: time.time), getsleep=(lambda: time.sleep)): if timeout is not None: self.timeout = timeout if wait is not None: self.wait = wait if exception is not None: self.TimeOutWaitingFor = exception self.getnow = getnow self.getsleep = getsleep def __call__(self, func=None, timeout=None, wait=None, message=None): if func is None: return lambda func: self(func, timeout, wait, message) if func(): return now = self.getnow() sleep = self.getsleep() if timeout is None: timeout = self.timeout if wait is None: wait = self.wait wait = float(wait) deadline = now() + timeout while 1: sleep(wait) if func(): return if now() > deadline: raise self.TimeOutWaitingFor( message or getattr(func, '__doc__') or getattr(func, '__name__') ) wait = Wait()<|fim▁end|>
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
<|file_name|>file_reporter.rs<|end_file_name|><|fim▁begin|>use super::{ErrorReporter, ReportKind, Message }; use std::fs::File; use std::io::{BufRead, BufReader, Write}; use std::cmp; use std::iter; use crate::common::{ constants::SPACES_PER_TAB, node_info::Span, }; /* Reports errors by printing/highlighting content from the given file */ pub struct FileErrorReporter { file_path: String, errors: i32, messages: Vec<Message>, } impl FileErrorReporter { pub fn new(file: &str) -> FileErrorReporter { FileErrorReporter { file_path: file.to_owned(), errors: 0, messages: vec![], } } fn read_lines(&self) -> Vec<String> { let mut lines = vec![]; // Todo: Replace with something slightly saner // Works for small files, but can be expensive memory wise in case the source file is huge let f = match File::open(&self.file_path) { Ok(file) => file, Err(e) => panic!("Failed to open file {}: {}", self.file_path, e), }; let reader = BufReader::new(f); for line in reader.lines() { match line { Ok(content) => lines.push(content.replace("\t", &" ".repeat(SPACES_PER_TAB as usize))), Err(e) => panic!("IO error: {}", e), } } lines } fn update_error_count(&mut self, error_type: &ReportKind) { match error_type { ReportKind::TokenError | ReportKind::TypeError | ReportKind::NameError | ReportKind::SyntaxError | ReportKind::DataFlowError => self.errors += 1, ReportKind::Note | ReportKind::Warning => (), } } } impl ErrorReporter for FileErrorReporter { fn report_error(&mut self, report_kind: ReportKind, span: Span, message : String) { self.update_error_count(&report_kind); self.messages.push( Message::HighlightMessage { span, report_kind, message <|fim▁hole|> fn has_errors(&self) -> bool { self.errors != 0 } fn has_reports(&self) -> bool { self.messages.len() > 0 } fn errors(&self) -> i32 { self.errors } fn reports(&self) -> i32 { self.messages.len() as i32 } fn print_errors(&self) { let lines = self.read_lines(); for msg in self.messages.iter() { match msg { Message::HighlightMessage{ span, report_kind, message, } => { write_highlight_message(span, *report_kind, message, lines.as_slice()) } } } } fn clear_reports(&mut self) { self.messages.clear(); } } fn write_highlight_message(span: &Span, report_kind: ReportKind, message: &String, lines: &[String]) { // may be called from multiple threads, at least in e2e tests. Prevent output from being garbled // when multiple threads attempt to print at the same time let stdout = std::io::stdout(); let stderr = std::io::stderr(); let mut _stdouthandle = stdout.lock(); let mut handle = stderr.lock(); // group notes with the warning/error, otherwise add a newline if report_kind != ReportKind::Note { writeln!(&mut handle).unwrap(); } // main error/warning/note print writeln!(&mut handle, "{}:{} {}: {}", span.line, span.column, report_kind, message).unwrap(); // print line if (span.line as usize) < lines.len() { let line = &lines[(span.line - 1) as usize]; write!(&mut handle, "{}", line).unwrap(); if !line.ends_with("\n") { writeln!(&mut handle).unwrap(); } // indentation for highlighting line write!(&mut handle, "{}", iter::repeat(" "). take(cmp::max(span.column - 1, 0) as usize). collect::<String>()).unwrap(); // highlighting let color = report_kind.get_color(); for _ in 0..span.length { write!(&mut handle, "{}", color.bold().paint("^").to_string()).unwrap(); } writeln!(&mut handle).unwrap(); } }<|fim▁end|>
}); }
<|file_name|>representative.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################# from openerp import models, fields, api, tools, _ class odisea_representative(models.Model): """Representative""" _name = 'odisea.representative' _description = 'Representative' @api.multi def _has_image(self): return dict((p.id, bool(p.image)) for p in self) name = fields.Char(string='Name', required=True) cuit = fields.Char(string='CUIT', size=13) title = fields.Many2one('res.partner.title', 'Title') company = fields.Char(string='Company') ref = fields.Char('Contact Reference', select=1) website = fields.Char('Website', help="Website of Partner or Company") comment = fields.Text('Notes') category_id = fields.Many2many('res.partner.category', id1='partner_id', id2='category_id', string='Tags') active = fields.Boolean('Active', default=True) street = fields.Char('Street') street2 = fields.Char('Street2') zip = fields.Char('Zip', size=24, change_default=True) city = fields.Char('City') <|fim▁hole|> fax = fields.Char('Fax') mobile = fields.Char('Mobile') birthdate = fields.Char('Birthdate') function = fields.Char('Job Position') is_company = fields.Boolean('Is a Company', help="Check if the contact is a company, otherwise it is a person") use_parent_address = fields.Boolean('Use Company Address', help="Select this if you want to set company's address information for this contact") # image: all image fields are base64 encoded and PIL-supported image = fields.Binary("Image", help="This field holds the image used as avatar for this contact, limited to 1024x1024px") image_medium = fields.Binary(compute="_get_image", string="Medium-sized image", store= False, help="Medium-sized image of this contact. It is automatically "\ "resized as a 128x128px image, with aspect ratio preserved. "\ "Use this field in form views or some kanban views.") image_small = fields.Binary(compute="_get_image", string="Small-sized image", store= False, help="Small-sized image of this contact. It is automatically "\ "resized as a 64x64px image, with aspect ratio preserved. "\ "Use this field anywhere a small image is required.") has_image = fields.Boolean(compute=_has_image) color = fields.Integer('Color Index') @api.multi def onchange_state(self, state_id): if state_id: state = self.env['res.country.state'].browse(state_id) return {'value': {'country_id': state.country_id.id}} return {} @api.multi def onchange_type(self, is_company): value = {'title': False} if is_company: value['use_parent_address'] = False domain = {'title': [('domain', '=', 'partner')]} else: domain = {'title': [('domain', '=', 'contact')]} return {'value': value, 'domain': domain} @api.one @api.depends("image") def _get_image(self): """ calculate the images sizes and set the images to the corresponding fields """ image = self.image # check if the context contains the magic `bin_size` key if self.env.context.get("bin_size"): # refetch the image with a clean context image = self.env[self._name].with_context({}).browse(self.id).image data = tools.image_get_resized_images(image, return_big=True, avoid_resize_big=False) self.image_big = data["image"] self.image_medium = data["image_medium"] self.image_small = data["image_small"] return True<|fim▁end|>
state_id = fields.Many2one("res.country.state", 'State', ondelete='restrict') country_id = fields.Many2one('res.country', 'Country', ondelete='restrict') email = fields.Char('Email') phone = fields.Char('Phone')
<|file_name|>error.go<|end_file_name|><|fim▁begin|>// Copyright 2017 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. /* Show nicely (but simple) formatted HTML error pages (or respond with JSON if the appropriate `Accept` header is set)) for the http package. */ package http import ( "encoding/json" "fmt" "html/template" "net/http" "time" "github.com/Lopastvertoleta/go-ethereum/log" ) //templateMap holds a mapping of an HTTP error code to a template var templateMap map[int]*template.Template //parameters needed for formatting the correct HTML page type ErrorParams struct { Msg string Code int Timestamp string template *template.Template Details template.HTML } //we init the error handling right on boot time, so lookup and http response is fast func init() { initErrHandling() } func initErrHandling() { //pages are saved as strings - get these strings genErrPage := GetGenericErrorPage() notFoundPage := GetNotFoundErrorPage() //map the codes to the available pages tnames := map[int]string{ 0: genErrPage, //default 400: genErrPage, 404: notFoundPage, 500: genErrPage, } templateMap = make(map[int]*template.Template) for code, tname := range tnames { //assign formatted HTML to the code templateMap[code] = template.Must(template.New(fmt.Sprintf("%d", code)).Parse(tname)) } } //ShowError is used to show an HTML error page to a client. //If there is an `Accept` header of `application/json`, JSON will be returned instead //The function just takes a string message which will be displayed in the error page. //The code is used to evaluate which template will be displayed //(and return the correct HTTP status code) func ShowError(w http.ResponseWriter, r *http.Request, msg string, code int) { if code == http.StatusInternalServerError { log.Error(msg) } respond(w, r, &ErrorParams{ Code: code, Msg: msg, Timestamp: time.Now().Format(time.RFC1123), template: getTemplate(code), }) } //evaluate if client accepts html or json response func respond(w http.ResponseWriter, r *http.Request, params *ErrorParams) { w.WriteHeader(params.Code) if r.Header.Get("Accept") == "application/json" { respondJson(w, params) } else { respondHtml(w, params) } } //return a HTML page func respondHtml(w http.ResponseWriter, params *ErrorParams) { err := params.template.Execute(w, params) if err != nil { log.Error(err.Error()) } } //return JSON func respondJson(w http.ResponseWriter, params *ErrorParams) { w.Header().Set("Content-Type", "application/json") json.NewEncoder(w).Encode(params) } //get the HTML template for a given code<|fim▁hole|> return templateMap[0] } }<|fim▁end|>
func getTemplate(code int) *template.Template { if val, tmpl := templateMap[code]; tmpl { return val } else {
<|file_name|>models.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import os.path from django.db import models from django.utils.translation import ugettext_lazy as _ from django.conf import settings as django_settings from django.db.models import signals from know.plugins.attachments import settings from know import managers from know.models.pluginbase import ReusablePlugin from know.models.article import BaseRevisionMixin class IllegalFileExtension(Exception): """File extension on upload is not allowed""" pass class Attachment(ReusablePlugin): objects = managers.ArticleFkManager() current_revision = models.OneToOneField( 'AttachmentRevision', verbose_name=_(u'current revision'), blank=True, null=True, related_name='current_set', help_text=_(u'The revision of this attachment currently in use (on all articles using the attachment)'), ) original_filename = models.CharField( max_length=256, verbose_name=_(u'original filename'), blank=True, null=True, ) def can_write(self, **kwargs): user = kwargs.get('user', None) if not settings.ANONYMOUS and (not user or user.is_anonymous()): return False return ReusablePlugin.can_write(self, **kwargs) def can_delete(self, user): return self.can_write(user=user) class Meta: verbose_name = _(u'attachment') verbose_name_plural = _(u'attachments') app_label = settings.APP_LABEL def __unicode__(self): return "%s: %s" % (self.article.current_revision.title, self.original_filename) def extension_allowed(filename): try: extension = filename.split(".")[-1] except IndexError: # No extension raise IllegalFileExtension("No file extension found in filename. That's not okay!") if not extension.lower() in map(lambda x: x.lower(), settings.FILE_EXTENSIONS): raise IllegalFileExtension("The following filename is illegal: %s. Extension has to be one of %s" % (filename, ", ".join(settings.FILE_EXTENSIONS))) return extension def upload_path(instance, filename): from os import path extension = extension_allowed(filename) # Has to match original extension filename if instance.id and instance.attachment and instance.attachment.original_filename: original_extension = instance.attachment.original_filename.split(".")[-1] if not extension.lower() == original_extension: raise IllegalFileExtension("File extension has to be '%s', not '%s'." % (original_extension, extension.lower())) elif instance.attachment: instance.attachment.original_filename = filename upload_path = settings.UPLOAD_PATH upload_path = upload_path.replace('%aid', str(instance.attachment.article.id)) if settings.UPLOAD_PATH_OBSCURIFY: import random import hashlib m = hashlib.md5(str(random.randint(0, 100000000000000))) upload_path = path.join(upload_path, m.hexdigest()) if settings.APPEND_EXTENSION: filename += '.upload' return path.join(upload_path, filename) class AttachmentRevision(BaseRevisionMixin, models.Model): attachment = models.ForeignKey('Attachment') file = models.FileField( upload_to=upload_path, max_length=255, verbose_name=_(u'file'), storage=settings.STORAGE_BACKEND, ) description = models.TextField( blank=True, ) class Meta: verbose_name = _(u'attachment revision') verbose_name_plural = _(u'attachment revisions') ordering = ('created',) get_latest_by = ('revision_number',) app_label = settings.APP_LABEL def get_filename(self): """Used to retrieve the filename of a revision. But attachment.original_filename should always be used in the frontend such that filenames stay consistent.""" # TODO: Perhaps we can let file names change when files are replaced? if not self.file:<|fim▁hole|> def get_size(self): """Used to retrieve the file size and not cause exceptions.""" try: return self.file.size except OSError: return None except ValueError: return None def save(self, *args, **kwargs): if (not self.id and not self.previous_revision and self.attachment and self.attachment.current_revision and self.attachment.current_revision != self): self.previous_revision = self.attachment.current_revision if not self.revision_number: try: previous_revision = self.attachment.attachmentrevision_set.latest() self.revision_number = previous_revision.revision_number + 1 # NB! The above should not raise the below exception, but somehow it does. except AttachmentRevision.DoesNotExist, Attachment.DoesNotExist: self.revision_number = 1 super(AttachmentRevision, self).save(*args, **kwargs) if not self.attachment.current_revision: # If I'm saved from Django admin, then article.current_revision is me! self.attachment.current_revision = self self.attachment.save() def __unicode__(self): return "%s: %s (r%d)" % (self.attachment.article.current_revision.title, self.attachment.original_filename, self.revision_number) def on_revision_delete(instance, *args, **kwargs): if not instance.file: return # Remove file path = instance.file.path.split("/")[:-1] instance.file.delete(save=False) # Clean up empty directories # Check for empty folders in the path. Delete the first two. if len(path[-1]) == 32: # Path was (most likely) obscurified so we should look 2 levels down max_depth = 2 else: max_depth = 1 for depth in range(0, max_depth): delete_path = "/".join(path[:-depth] if depth > 0 else path) try: if len(os.listdir(os.path.join(django_settings.MEDIA_ROOT, delete_path))) == 0: os.rmdir(delete_path) except OSError: # Raised by os.listdir if directory is missing pass signals.pre_delete.connect(on_revision_delete, AttachmentRevision)<|fim▁end|>
return None filename = self.file.name.split("/")[-1] return ".".join(filename.split(".")[:-1])
<|file_name|>menuButton.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react' const styles = require('./menuPart.less') import Icon from 'client/components/icon/icon' interface compProps { icon:string; text:string; onClick:()=>any; } const MenuButton:React.StatelessComponent<compProps> = ({icon, text, onClick}) => { return <div className={styles.menuButton} onClick={onClick}> <Icon name={icon} className={styles.icon}/> <span className={styles.text}>{text}</span> </div> }<|fim▁hole|>export default MenuButton<|fim▁end|>
<|file_name|>NewDebugger.java<|end_file_name|><|fim▁begin|>/** * yamsLog is a program for real time multi sensor logging and * supervision * Copyright (C) 2014 * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ import Database.Messages.ProjectMetaData; import Database.Sensors.Sensor; import Errors.BackendError; import FrontendConnection.Backend; import FrontendConnection.Listeners.ProjectCreationStatusListener; import protobuf.Protocol; import java.util.ArrayList; import java.util.List; import java.util.Random; /** * Created with IntelliJ IDEA. * User: Aitesh * Date: 2014-04-10 * Time: 09:34 * To change this template use File | Settings | File Templates. */ public class NewDebugger implements ProjectCreationStatusListener { private boolean projectListChanged; public NewDebugger(){ try{ Backend.createInstance(null); //args[0],Integer.parseInt(args[1]), Backend.getInstance().addProjectCreationStatusListener(this); Backend.getInstance().connectToServer("130.236.63.46",2001); } catch (BackendError b){ b.printStackTrace(); } projectListChanged = false; } private synchronized boolean readWriteBoolean(Boolean b){ if(b!=null) projectListChanged=b; return projectListChanged; } public void runPlayback() { try{ Thread.sleep(1000); //Backend.getInstance().sendSettingsRequestMessageALlSensors(0); readWriteBoolean(false); Random r = new Random(); String[] strings = new String[]{"Name","Code","File","Today","Monday","Tuesday","Wednesday","Thursday","Friday","Gotta","Get","Out","It","S","Friday"}; String project = "projectName" +r.nextInt()%10000; String playbackProject = "realCollection"; Thread.sleep(1000); System.out.println("Setting active project to : " + playbackProject); Backend.getInstance().setActiveProject(playbackProject); ProjectMetaData d = new ProjectMetaData(); d.setTest_leader("ffu"); d.setDate(1l); List<String> s = new ArrayList<String>(); s.add("memer1"); d.setMember_names(s); d.setTags(s); d.setDescription("desc"); List l = d.getMember_names(); l.add(r.nextInt()+". name"); d.setMember_names(l); Backend.getInstance().sendProjectMetaData(d); System.out.println("starting data collection"); Thread.sleep(100); String experimentName = "smallrun"; //Backend.getInstance().setActiveProject(playbackProject); // projektnamn: Thread.sleep(3000); // Backend.getInstance().getSensorConfigurationForPlayback().getSensorId(); List<Protocol.SensorConfiguration> playbackConfig; playbackConfig = Backend.getInstance().getSensorConfigurationForPlayback(); List<Integer> listOfIds = new ArrayList<Integer>(); /* for (Protocol.SensorConfiguration aPlaybackConfig : playbackConfig) { listOfIds.add(aPlaybackConfig.getSensorId()); }*/ System.out.println("LIST OF IDS SENT TO SERVER-------------------------------------------"); System.out.println(listOfIds); System.out.println("LIST OF IDS END -----------------------------------------------------"); System.out.println("LIST OF IDS CURRENTLY IN DATABASE -----------------------------------"); System.out.println(Backend.getInstance().getSensors()); System.out.println("LIST IN DATABASE END ------------------------------------------------"); Backend.getInstance().sendExperimentPlaybackRequest(experimentName, listOfIds); //Thread.sleep(3000); //Backend.getInstance().stopConnection(); } catch (BackendError b){ b.printStackTrace(); } catch (InterruptedException ignore){ } } public void run(){ try{ Thread.sleep(1000); Backend.getInstance().sendSettingsRequestMessageALlSensors(0); readWriteBoolean(false); Random r = new Random(); String[] strings = new String[]{"Name","Code","File","Today","Monday","Tuesday","Wednesday","Thursday","Friday","Gotta","Get","Out","It","S","Friday"}; String project = "projectName" +r.nextInt()%10000; String playbackProject = "realCollection"; for(int i = 0; i < 1000; i++){ // project+=strings[r.nextInt(strings.length)]; } //System.out.println(project); //if(r.nextInt()>0) return; Thread.sleep(1000); Backend.getInstance().createNewProjectRequest(project);//"projectName"+ System.currentTimeMillis()); <|fim▁hole|> System.out.println("finished waiting"); } // = Backend.getInstance().getProjectFilesFromServer().get()); System.out.println("Setting active project to : " + project); Backend.getInstance().setActiveProject(project); ProjectMetaData d = new ProjectMetaData(); //d.setEmail("[email protected]"); d.setTest_leader("ffu"); d.setDate(1l); List<String> s = new ArrayList<String>(); s.add("memer1"); d.setMember_names(s); d.setTags(s); d.setDescription("desc"); List l = d.getMember_names(); l.add(r.nextInt() + ". name"); d.setMember_names(l); Backend.getInstance().sendProjectMetaData(d); /* while(!readWriteBoolean(null) && readWriteBoolean(null)){ Backend.getInstance().sendProjectMetaData(d); List<String> f =d.getTags(); f.add(String.valueOf(System.currentTimeMillis())); d.setTags(f); } **/ System.out.println("starting data collection"); Thread.sleep(100); String experimentName = "experimentNam5"+System.currentTimeMillis(); //String experimentName = "smallrun"; // projektnamn: Backend.getInstance().startDataCollection(experimentName); Thread.sleep(3000); Backend.getInstance().stopDataCollection(); Thread.sleep(1); } catch (BackendError b){ b.printStackTrace(); } catch (InterruptedException ignore){ } System.out.println("Exiting"); } public void runTest(){ // try{ // Thread.sleep(1000); Backend.getInstance().sendSettingsRequestMessageALlSensors(0); // Thread.sleep(1000); Backend.getInstance().createNewProjectRequest("projectName"+ System.currentTimeMillis()); // // //Backend.getInstance().startDataCollection("test1234"); // // //Thread.sleep(5000); Backend.getInstance().stopDataCollection(); // Backend localInstance = Backend.getInstance(); // // Sensor sensor = localInstance.getSensors().get(0); // for(int i = 0; i<sensor.getAttributeList(0).size();i++){ // System.out.print(String.format("%f", sensor.getAttributeList(0).get(i).floatValue()).replace(',', '.') + ","); // // System.out.print(sensor.getId() + ","); // for (int j = 1; j < sensor.getAttributesName().length;j++){ // // System.out.print(sensor.getAttributeList(j).get(i).floatValue() + " ,"); // } // System.out.println(); // } // // } catch (BackendError b){ // b.printStackTrace(); // } catch (InterruptedException ignore){} } @Override public void projectCreationStatusChanged(Protocol.CreateNewProjectResponseMsg.ResponseType responseType) { readWriteBoolean(true); } }<|fim▁end|>
if(!readWriteBoolean(null)){ System.out.println("Waiting on projectListAgain"); while (!readWriteBoolean(null));
<|file_name|>fullFocalplane.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python """Perform preprocessing and generate raytrace exec scripts for one focal plane. For documentation using the python_control for ImSim/PhoSim version <= v.3.0.x, see README.v3.0.x.txt. For documentation using the python_control for ImSim/PhoSim version == v.3.2.x, see README.txt. The behavior of this script differs depending on the version of ImSim/PhoSim. For versions <= v3.0.x, it functions like the original fullFocalplane.py and calls AllChipsScriptGenerator.makeScripts() to generate a script and some tarballs that can in turn be executed to run the preprocessing step (which in turn calls AllChipsScriptGenerator) to generate shells scripts and tarballs for performing the raytrace stage. See README.v3.0.x.txt for more info. The behavior for ImSim/PhoSim version == 3.2.x is to run the preprocessing step directly through the class PhosimManager.PhosimPrepreprocessor (which in turn calls phosim.py in the phosin.git repository). After the preprocessing is complete, PhosimPreprocessor generates shell scripts for the raytrace phase. A few notes on options: --skip_atmoscreens: Use this to optionally skip the step to generate atmosphere screens during preprocessing and instead perform this operation at the start of the raytrace phase. This is useful in distributed environments where the cost of transferring the atmosphere screens to the compute node is higher than recalculating them. --logtostderr: (only v3.2.x and higher) By default, log output from python_controls is done via the python logging module, and directed to either log_dir in the imsim_config_file or /tmp/fullFocalplane.log if log_dir is not specified. This option overrides this behavior and prints logging information to stdout. Note: output from phosim.py and the phosim binaries are still printed to stdout. TODO(gardnerj): Add stdout log redirect TODO(gardnerj): Support sensor_ids argument for phosim.py. TODO(gardnerj): Support not running e2adc step. """ from __future__ import with_statement import ConfigParser from distutils import version import logging from optparse import OptionParser # Can't use argparse yet, since we must work in 2.5 import os import sys from AllChipsScriptGenerator import AllChipsScriptGenerator import PhosimManager import PhosimUtil import PhosimVerifier import ScriptWriter __author__ = 'Jeff Gardner ([email protected])' logger = logging.getLogger(__name__) def DoPreprocOldVersion(trimfile, policy, extra_commands, scheduler, sensor_id): """Do preprocessing for v3.1.0 and earlier. Args: trimfile: Full path to trim metadata file. policy: ConfigParser object from python_controls config file. extra_commands: Full path to extra commands or 'extraid' file. scheduler: Name of scheduler (currently, just 'csh' is supported). sensor_id: If not '', run just this single sensor ID. Returns: 0 (success) """ with PhosimUtil.WithTimer() as t: # Determine the pre-processing scheduler so that we know which class to use if scheduler == 'csh': scriptGenerator = AllChipsScriptGenerator(trimfile, policy, extra_commands) scriptGenerator.makeScripts(sensor_id) elif scheduler == 'pbs': scriptGenerator = AllChipsScriptGenerator_Pbs(trimfile, policy, extra_commands) scriptGenerator.makeScripts(sensor_id) elif scheduler == 'exacycle': print 'Exacycle funtionality not added yet.' return 1 else: print 'Scheduler "%s" unknown. Use -h or --help for help.' % scheduler t.LogWall('makeScripts') return 0 def DoPreproc(trimfile, imsim_config_file, extra_commands, scheduler, skip_atmoscreens=False, keep_scratch_dirs=False): """Do preprocessing for v3.2.0 and later. Args: trimfile: Full path to trim metadata file. imsim_config_file: Full path to the python_controls config file. extra_commands: Full path to extra commands or 'extraid' file. scheduler: Name of scheduler (currently, just 'csh' is supported). skip_atmoscreens: Generate atmosphere screens in raytrace stage instead of preprocessing stage. keep_scratch_dirs: Do not delete the working directories at the end of execution. Returns: 0 upon success, 1 upon failure. """ if scheduler == 'csh': preprocessor = PhosimManager.Preprocessor(imsim_config_file, trimfile, extra_commands) elif scheduler == 'pbs': # Construct PhosimPreprocessor with PBS-specific ScriptWriter preprocessor = PhosimManager.Preprocessor( imsim_config_file, trimfile, extra_commands, script_writer_class=ScriptWriter.PbsRaytraceScriptWriter) # Read in PBS-specific config policy = ConfigParser.RawConfigParser() policy.read(imsim_config_file) preprocessor.script_writer.ParsePbsConfig(policy) else: logger.critical('Unknown scheduler: %s. Use -h or --help for help', scheduler) return 1 preprocessor.InitExecEnvironment() with PhosimUtil.WithTimer() as t: if not preprocessor.DoPreprocessing(skip_atmoscreens=skip_atmoscreens): logger.critical('DoPreprocessing() failed.') return 1 t.LogWall('DoPreprocessing') exec_manifest_fn = 'execmanifest_raytrace_%s.txt' % preprocessor.focalplane.observationID files_to_stage = preprocessor.ArchiveRaytraceInputByExt(exec_archive_name=exec_manifest_fn) if not files_to_stage: logger.critical('Output archive step failed.') return 1 with PhosimUtil.WithTimer() as t: preprocessor.StageOutput(files_to_stage) t.LogWall('StageOutput') if not keep_scratch_dirs: preprocessor.Cleanup() verifier = PhosimVerifier.PreprocVerifier(imsim_config_file, trimfile, extra_commands) missing_files = verifier.VerifySharedOutput() if missing_files: logger.critical('Verification failed with the following files missing:') for fn in missing_files: logger.critical(' %s', fn) sys.stderr.write('Verification failed with the following files missing:\n') for fn in missing_files: sys.stderr.write(' %s\n', fn) else: logger.info('Verification completed successfully.') return 0 def ConfigureLogging(trimfile, policy, log_to_stdout, imsim_config_file, extra_commands=None): """Configures logger. If log_to_stdout, the logger will write to stdout. Otherwise, it will write to: 'log_dir' in the config file, if present /tmp/fullFocalplane.log if 'log_dir' is not present. Stdout from phosim.py and PhoSim binaries always goes to stdout. """ if log_to_stdout: log_fn = None else: if policy.has_option('general', 'log_dir'): # Log to file in log_dir obsid, filter_num = PhosimManager.ObservationIdFromTrimfile( trimfile, extra_commands=options.extra_commands) log_dir = os.path.join(policy.get('general', 'log_dir'), obsid) log_fn = os.path.join(log_dir, 'fullFocalplane_%s.log' % obsid) else: log_fn = '/tmp/fullFocalplane.log' PhosimUtil.ConfigureLogging(policy.getint('general', 'debug_level'), logfile_fullpath=log_fn) params_str = 'trimfile=%s\nconfig_file=%s\n' % (trimfile, imsim_config_file) if extra_commands: params_str += 'extra_commands=%s\n' % extra_commands PhosimUtil.WriteLogHeader(__file__, params_str=params_str) def main(trimfile, imsim_config_file, extra_commands, skip_atmoscreens, keep_scratch_dirs, sensor_ids, log_to_stdout=False): """ Run the fullFocalplanePbs.py script, populating it with the correct user and cluster job submission information from an LSST policy file. """ policy = ConfigParser.RawConfigParser() policy.read(imsim_config_file) if policy.has_option('general', 'phosim_version'): phosim_version = policy.get('general', 'phosim_version') else: phosim_version = '3.0.1' ConfigureLogging(trimfile, policy, log_to_stdout, imsim_config_file, extra_commands) # print 'Running fullFocalPlane on: ', trimfile logger.info('Running fullFocalPlane on: %s ', trimfile) # print 'Using Imsim/Phosim version', phosim_version logger.info('Using Imsim/Phosim version %s', phosim_version) # Must pass absolute paths to imsim/phosim workers if not os.path.isabs(trimfile): trimfile = os.path.abspath(trimfile) if not os.path.isabs(imsim_config_file): imsim_config_file = os.path.abspath(imsim_config_file) if not os.path.isabs(extra_commands): extra_commands = os.path.abspath(extra_commands) scheduler = policy.get('general','scheduler2') if version.LooseVersion(phosim_version) < version.LooseVersion('3.1.0'): if len(sensor_ids.split('|')) > 1: logger.critical('Multiple sensors not supported in version < 3.1.0.') return 1 sensor_id = '' if sensor_ids == 'all' else sensor_ids return DoPreprocOldVersion(trimfile, policy, extra_commandsm,scheduler, sensor_id) elif version.LooseVersion(phosim_version) > version.LooseVersion('3.2.0'): if sensor_ids != 'all': logger.critical('Single exposure mode is currently not supported for' ' phosim > 3.2.0') return 1 return DoPreproc(trimfile, imsim_config_file, extra_commands, scheduler, skip_atmoscreens=skip_atmoscreens, keep_scratch_dirs=keep_scratch_dirs) logger.critical('Unsupported phosim version %s', phosim_version) return 1 if __name__ == '__main__': usage = 'usage: %prog trimfile imsim_config_file [options]' parser = OptionParser(usage=usage) parser.add_option('-a', '--skip_atmoscreens', dest='skip_atmoscreens', action='store_true', default=False, help='Generate atmospheric screens in raytrace stage instead' ' of preprocessing stage.') parser.add_option('-c', '--command', dest='extra_commands', help='Extra commands filename.') parser.add_option('-k', '--keep_scratch', dest='keep_scratch_dirs', action='store_true', default=False, help='Do not cleanup working directories.' ' (version 3.2.x and higher only).') parser.add_option('-l', '--logtostdout', dest='log_to_stdout', action='store_true', default=False, help='Write logging output to stdout instead of log file' ' (version 3.2.x and higher only).') parser.add_option('-s', '--sensor', dest='sensor_ids', default='all', help='Specify a list of sensor ids to use delimited by "|",' ' or use "all" for all.') (options, args) = parser.parse_args() if len(args) != 2: print 'Incorrect number of arguments. Use -h or --help for help.' print usage quit()<|fim▁hole|> sys.exit(main(trimfile, imsim_config_file, options.extra_commands, options.skip_atmoscreens, options.keep_scratch_dirs, options.sensor_ids, options.log_to_stdout))<|fim▁end|>
trimfile = args[0] imsim_config_file = args[1]
<|file_name|>test_hashes.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # Copyright (C) 2017-2021 The btclib developers # # This file is part of btclib. It is subject to the license terms in the # LICENSE file found in the top-level directory of this distribution. # # No part of btclib including this file, may be copied, modified, propagated, # or distributed except according to the terms contained in the LICENSE file. "Tests for the `btclib.hashes` module." from btclib.hashes import hash160, hash256 from tests.test_to_key import ( net_unaware_compressed_pub_keys, net_unaware_uncompressed_pub_keys, plain_prv_keys, )<|fim▁hole|> test_vectors = ( plain_prv_keys + net_unaware_compressed_pub_keys + net_unaware_uncompressed_pub_keys ) for hexstring in test_vectors: hash160(hexstring) hash256(hexstring) # def test_fingerprint() -> None: # # seed = "bfc4cbaad0ff131aa97fa30a48d09ae7df914bcc083af1e07793cd0a7c61a03f65d622848209ad3366a419f4718a80ec9037df107d8d12c19b83202de00a40ad" # xprv = rootxprv_from_seed(seed) # pf = fingerprint(xprv) # xprv is automatically converted to xpub # child_key = derive(xprv, 0x80000000) # pf2 = BIP32KeyData.b58decode(child_key).parent_fingerprint # assert pf == pf2<|fim▁end|>
def test_hash160_hash256() -> None:
<|file_name|>0003_auto__add_field_map_bucket.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'Map.bucket' db.add_column(u'scout_map', 'bucket', self.gf('django.db.models.fields.related.ForeignKey')(default=1, related_name='map', to=orm['bucket.Bucket']), keep_default=False) def backwards(self, orm): # Deleting field 'Map.bucket' db.delete_column(u'scout_map', 'bucket_id') models = { u'accounts.profile': { 'Meta': {'object_name': 'Profile'}, 'favourite_snack': ('django.db.models.fields.CharField', [], {'max_length': '5'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'mugshot': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'blank': 'True'}), 'privacy': ('django.db.models.fields.CharField', [], {'default': "'registered'", 'max_length': '15'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': u"orm['auth.User']"}) }, u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, u'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'bucket.bucket': { 'Meta': {'object_name': 'Bucket'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'scout.datalayer': { 'Meta': {'object_name': 'DataLayer'}, u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'map': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'datalayers'", 'to': u"orm['scout.Map']"}) }, u'scout.map': { 'Meta': {'object_name': 'Map'}, 'bucket': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'map'", 'to': u"orm['bucket.Bucket']"}), 'center': ('django.contrib.gis.db.models.fields.PointField', [], {'geography': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'locate': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'modified_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'slug': ('autoslug.fields.AutoSlugField', [], {'unique': 'True', 'max_length': '50', 'populate_from': "'name'", 'unique_with': '()'}), 'tilelayer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'maps'", 'to': u"orm['scout.TileLayer']"}), 'zoom': ('django.db.models.fields.IntegerField', [], {'default': '7'}) }, u'scout.marker': { 'Meta': {'object_name': 'Marker'}, 'address': ('django.db.models.fields.TextField', [], {'default': "''"}), 'category': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'markers'", 'to': u"orm['scout.MarkerCategory']"}), 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['accounts.Profile']"}), 'created_on': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'datalayer': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'markers'", 'to': u"orm['scout.DataLayer']"}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'picture': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'position': ('django.contrib.gis.db.models.fields.PointField', [], {'geography': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}) }, u'scout.markercategory': { 'Meta': {'object_name': 'MarkerCategory'}, 'icon_color': ('django.db.models.fields.CharField', [], {'max_length': '30'}), 'icon_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'marker_color': ('django.db.models.fields.CharField', [], {'max_length': '30'}),<|fim▁hole|> }, u'scout.tilelayer': { 'Meta': {'object_name': 'TileLayer'}, 'attribution': ('django.db.models.fields.CharField', [], {'max_length': '300'}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'max_zoom': ('django.db.models.fields.IntegerField', [], {'default': '18'}), 'min_zoom': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'url_template': ('django.db.models.fields.CharField', [], {'max_length': '200'}) } } complete_apps = ['scout']<|fim▁end|>
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
<|file_name|>SpecializedFunctionKey.java<|end_file_name|><|fim▁begin|>/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.metadata; import com.facebook.presto.spi.type.Type; import java.util.Map; import java.util.Objects; import static com.google.common.base.Preconditions.checkNotNull; public class SpecializedFunctionKey { private final ParametricFunction function; private final Map<String, Type> boundTypeParameters; private final int arity; public SpecializedFunctionKey(ParametricFunction function, Map<String, Type> boundTypeParameters, int arity) { this.function = checkNotNull(function, "function is null"); this.boundTypeParameters = checkNotNull(boundTypeParameters, "boundTypeParameters is null"); this.arity = arity; } public ParametricFunction getFunction() { return function; } public Map<String, Type> getBoundTypeParameters() { return boundTypeParameters; } public int getArity() { return arity; } @Override public boolean equals(Object o) { if (this == o) { return true; } if (o == null || getClass() != o.getClass()) { return false; } SpecializedFunctionKey that = (SpecializedFunctionKey) o; return Objects.equals(arity, that.arity) && Objects.equals(boundTypeParameters, that.boundTypeParameters) && Objects.equals(function.getSignature(), that.function.getSignature()); } @Override public int hashCode() { return Objects.hash(function.getSignature(), boundTypeParameters, arity);<|fim▁hole|><|fim▁end|>
} }
<|file_name|>unique-assign-generic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. <|fim▁hole|>fn f<T>(t: T) -> T { let t1 = t; t1 } pub fn main() { let t = f(box 100); assert_eq!(t, box 100); }<|fim▁end|>
#![allow(unknown_features)] #![feature(box_syntax)]
<|file_name|>PlayListItemAudioPanel.cpp<|end_file_name|><|fim▁begin|>/*************************************************************** * This source files comes from the xLights project * https://www.xlights.org * https://github.com/smeighan/xLights * See the github commit history for a record of contributing * developers. * Copyright claimed based on commit dates recorded in Github * License: https://github.com/smeighan/xLights/blob/master/License.txt **************************************************************/ #include "PlayListItemAudioPanel.h" #include "PlayListItemAudio.h" #include "PlayListDialog.h" #include "PlayListSimpleDialog.h" //(*InternalHeaders(PlayListItemAudioPanel) #include <wx/intl.h> #include <wx/string.h> //*) //(*IdInit(PlayListItemAudioPanel) const long PlayListItemAudioPanel::ID_STATICTEXT2 = wxNewId(); const long PlayListItemAudioPanel::ID_FILEPICKERCTRL2 = wxNewId(); const long PlayListItemAudioPanel::ID_CHECKBOX2 = wxNewId(); const long PlayListItemAudioPanel::ID_SLIDER1 = wxNewId(); const long PlayListItemAudioPanel::ID_CHECKBOX1 = wxNewId(); const long PlayListItemAudioPanel::ID_STATICTEXT4 = wxNewId(); const long PlayListItemAudioPanel::ID_SPINCTRL1 = wxNewId(); const long PlayListItemAudioPanel::ID_STATICTEXT3 = wxNewId(); const long PlayListItemAudioPanel::ID_TEXTCTRL1 = wxNewId(); //*) BEGIN_EVENT_TABLE(PlayListItemAudioPanel,wxPanel) //(*EventTable(PlayListItemAudioPanel) //*) END_EVENT_TABLE() class AudioFilePickerCtrl : public wxFilePickerCtrl { public: AudioFilePickerCtrl(wxWindow *parent, wxWindowID id, const wxString& path = wxEmptyString, const wxString& message = wxFileSelectorPromptStr, const wxString& wildcard = wxFileSelectorDefaultWildcardStr, const wxPoint& pos = wxDefaultPosition, const wxSize& size = wxDefaultSize, long style = wxFLP_DEFAULT_STYLE, const wxValidator& validator = wxDefaultValidator, const wxString& name = wxFilePickerCtrlNameStr) : wxFilePickerCtrl(parent, id, path, message, AUDIOFILES, pos, size, style, validator, name) {} virtual ~AudioFilePickerCtrl() {} }; PlayListItemAudioPanel::PlayListItemAudioPanel(wxWindow* parent, PlayListItemAudio* audio, wxWindowID id,const wxPoint& pos,const wxSize& size) { _audio = audio; //(*Initialize(PlayListItemAudioPanel) wxFlexGridSizer* FlexGridSizer1; Create(parent, id, wxDefaultPosition, wxDefaultSize, wxTAB_TRAVERSAL, _T("id")); FlexGridSizer1 = new wxFlexGridSizer(0, 2, 0, 0); FlexGridSizer1->AddGrowableCol(1); StaticText2 = new wxStaticText(this, ID_STATICTEXT2, _("Audio File:"), wxDefaultPosition, wxDefaultSize, 0, _T("ID_STATICTEXT2")); FlexGridSizer1->Add(StaticText2, 1, wxALL|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5); FilePickerCtrl_AudioFile = new AudioFilePickerCtrl(this, ID_FILEPICKERCTRL2, wxEmptyString, _("Audio File"), wxEmptyString, wxDefaultPosition, wxDefaultSize, wxFLP_FILE_MUST_EXIST|wxFLP_OPEN|wxFLP_USE_TEXTCTRL, wxDefaultValidator, _T("ID_FILEPICKERCTRL2")); FlexGridSizer1->Add(FilePickerCtrl_AudioFile, 1, wxALL|wxEXPAND, 5); FlexGridSizer1->Add(-1,-1,1, wxALL|wxALIGN_CENTER_HORIZONTAL|wxALIGN_CENTER_VERTICAL, 5); CheckBox_OverrideVolume = new wxCheckBox(this, ID_CHECKBOX2, _("Override Volume"), wxDefaultPosition, wxDefaultSize, 0, wxDefaultValidator, _T("ID_CHECKBOX2")); CheckBox_OverrideVolume->SetValue(false); FlexGridSizer1->Add(CheckBox_OverrideVolume, 1, wxALL|wxEXPAND, 5); FlexGridSizer1->Add(-1,-1,1, wxALL|wxALIGN_CENTER_HORIZONTAL|wxALIGN_CENTER_VERTICAL, 5); Slider1 = new wxSlider(this, ID_SLIDER1, 100, 0, 100, wxDefaultPosition, wxDefaultSize, 0, wxDefaultValidator, _T("ID_SLIDER1")); FlexGridSizer1->Add(Slider1, 1, wxALL|wxEXPAND, 5); FlexGridSizer1->Add(-1,-1,1, wxALL|wxALIGN_CENTER_HORIZONTAL|wxALIGN_CENTER_VERTICAL, 5); CheckBox_FastStartAudio = new wxCheckBox(this, ID_CHECKBOX1, _("Fast Start Audio"), wxDefaultPosition, wxDefaultSize, 0, wxDefaultValidator, _T("ID_CHECKBOX1")); CheckBox_FastStartAudio->SetValue(false); FlexGridSizer1->Add(CheckBox_FastStartAudio, 1, wxALL|wxEXPAND, 5); StaticText4 = new wxStaticText(this, ID_STATICTEXT4, _("Priority:"), wxDefaultPosition, wxDefaultSize, 0, _T("ID_STATICTEXT4")); FlexGridSizer1->Add(StaticText4, 1, wxALL|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5); SpinCtrl_Priority = new wxSpinCtrl(this, ID_SPINCTRL1, _T("5"), wxDefaultPosition, wxDefaultSize, 0, 1, 10, 5, _T("ID_SPINCTRL1")); SpinCtrl_Priority->SetValue(_T("5")); FlexGridSizer1->Add(SpinCtrl_Priority, 1, wxALL|wxEXPAND, 5); StaticText3 = new wxStaticText(this, ID_STATICTEXT3, _("Delay:"), wxDefaultPosition, wxDefaultSize, 0, _T("ID_STATICTEXT3")); FlexGridSizer1->Add(StaticText3, 1, wxALL|wxALIGN_LEFT|wxALIGN_CENTER_VERTICAL, 5); TextCtrl_Delay = new wxTextCtrl(this, ID_TEXTCTRL1, _("0.000"), wxDefaultPosition, wxDefaultSize, wxTE_RIGHT, wxDefaultValidator, _T("ID_TEXTCTRL1")); FlexGridSizer1->Add(TextCtrl_Delay, 1, wxALL|wxEXPAND, 5); SetSizer(FlexGridSizer1); FlexGridSizer1->Fit(this); FlexGridSizer1->SetSizeHints(this); Connect(ID_FILEPICKERCTRL2,wxEVT_COMMAND_FILEPICKER_CHANGED,(wxObjectEventFunction)&PlayListItemAudioPanel::OnFilePickerCtrl2FileChanged); Connect(ID_CHECKBOX2,wxEVT_COMMAND_CHECKBOX_CLICKED,(wxObjectEventFunction)&PlayListItemAudioPanel::OnCheckBox_OverrideVolumeClick); //*) FilePickerCtrl_AudioFile->SetFileName(wxFileName(audio->GetAudioFile())); TextCtrl_Delay->SetValue(wxString::Format(wxT("%.3f"), (float)audio->GetDelay() / 1000.0)); CheckBox_FastStartAudio->SetValue(audio->GetFastStartAudio()); if (audio->GetVolume() != -1) { CheckBox_OverrideVolume->SetValue(true); Slider1->SetValue(audio->GetVolume()); } else { CheckBox_OverrideVolume->SetValue(false); } ValidateWindow(); } PlayListItemAudioPanel::~PlayListItemAudioPanel() { //(*Destroy(PlayListItemAudioPanel) //*) _audio->SetDelay(wxAtof(TextCtrl_Delay->GetValue()) * 1000); _audio->SetFastStartAudio(CheckBox_FastStartAudio->GetValue()); if (CheckBox_OverrideVolume->GetValue()) { _audio->SetVolume(Slider1->GetValue()); } else { _audio->SetVolume(-1); } _audio->SetAudioFile(FilePickerCtrl_AudioFile->GetFileName().GetFullPath().ToStdString()); } void PlayListItemAudioPanel::OnTextCtrl_DelayText(wxCommandEvent& event) { } void PlayListItemAudioPanel::OnFilePickerCtrl2FileChanged(wxFileDirPickerEvent& event) { _audio->SetAudioFile(FilePickerCtrl_AudioFile->GetFileName().GetFullPath().ToStdString());<|fim▁hole|> wxPostEvent(GetParent()->GetParent()->GetParent()->GetParent(), e); } void PlayListItemAudioPanel::ValidateWindow() { if (CheckBox_OverrideVolume->GetValue()) { Slider1->Enable(); } else { Slider1->Enable(false); } } void PlayListItemAudioPanel::OnCheckBox_OverrideVolumeClick(wxCommandEvent& event) { ValidateWindow(); }<|fim▁end|>
wxCommandEvent e(EVT_UPDATEITEMNAME);
<|file_name|>z3str.cpp<|end_file_name|><|fim▁begin|>/* * Copyright 2012 Aarhus University * * Licensed under the GNU General Public License, Version 3 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.gnu.org/licenses/gpl-3.0.html * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include <assert.h> #include <cstdlib> #include <iostream> #include <ostream><|fim▁hole|>#include <cstdlib> #include <math.h> #include <QDebug> #include <QDateTime> #include "util/loggingutil.h" #include "statistics/statsstorage.h" #include "z3str.h" namespace artemis { Z3STRConstraintWriter::Z3STRConstraintWriter(ConcolicBenchmarkFeatures disabledFeatures) : SMTConstraintWriter(disabledFeatures) { } std::string Z3STRConstraintWriter::ifLabel() { return "if"; } void Z3STRConstraintWriter::visit(Symbolic::SymbolicString* symbolicstring, void* args) { // If we are coercing from an input (string) to an integer, then this is a special case. // Instead of returning a symbolic string (which would raise an error) we just silently ignore the coercion and record // the variable as an integer instead of a string. if(args != NULL) { CoercionPromise* promise = (CoercionPromise*)args; if (promise->coerceTo == Symbolic::INT) { promise->isCoerced = true; recordAndEmitType(symbolicstring->getSource(), Symbolic::INT); mExpressionBuffer = encodeIdentifier(symbolicstring->getSource().getIdentifier()); mExpressionType = Symbolic::INT; return; } } // Checks this symbolic value is of type STRING and raises an error otherwise. recordAndEmitType(symbolicstring->getSource(), Symbolic::STRING); mExpressionBuffer = encodeIdentifier(symbolicstring->getSource().getIdentifier()); mExpressionType = Symbolic::STRING; } void Z3STRConstraintWriter::visit(Symbolic::ConstantString* constantstring, void* args) { std::ostringstream strs; strs << "\"" << *constantstring->getValue() << "\""; mExpressionBuffer = strs.str(); mExpressionType = Symbolic::STRING; } void Z3STRConstraintWriter::visit(Symbolic::StringBinaryOperation* stringbinaryoperation, void* args) { static const char* op[] = { "(Concat ", "(= ", "(= (= ", "_", "_", "_", "_", "(= ", "(= (= " }; static const char* opclose[] = { ")", ")", ") false)", "_", "_", "_", "_", ")", ") false)" }; switch (stringbinaryoperation->getOp()) { case Symbolic::CONCAT: case Symbolic::STRING_EQ: case Symbolic::STRING_NEQ: case Symbolic::STRING_SEQ: case Symbolic::STRING_SNEQ: break; // these are supported default: error("Unsupported operation on strings"); return; } stringbinaryoperation->getLhs()->accept(this); std::string lhs = mExpressionBuffer; if(!checkType(Symbolic::STRING)){ error("String operation with incorrectly typed LHS"); return; } stringbinaryoperation->getRhs()->accept(this); std::string rhs = mExpressionBuffer; if(!checkType(Symbolic::STRING)){ error("String operation with incorrectly typed RHS"); return; } std::ostringstream strs; strs << op[stringbinaryoperation->getOp()] << lhs << " " << rhs << opclose[stringbinaryoperation->getOp()]; mExpressionBuffer = strs.str(); mExpressionType = opGetType(stringbinaryoperation->getOp()); } void Z3STRConstraintWriter::visit(Symbolic::StringCoercion* stringcoercion, void* args) { CoercionPromise promise(Symbolic::STRING); stringcoercion->getExpression()->accept(this); if (!promise.isCoerced) { coercetype(mExpressionType, Symbolic::STRING, mExpressionBuffer); // Sets mExpressionBuffer and Type. } } void Z3STRConstraintWriter::visit(Symbolic::StringRegexReplace* regex, void* args) { // special case input filtering (filters matching X and replacing with "") if (regex->getReplace()->compare("") == 0) { // right now, only support a very limited number of whitespace filters bool replaceSpaces = regex->getRegexpattern()->compare("/ /g") == 0 || regex->getRegexpattern()->compare("/ /") == 0; bool replaceNewlines = regex->getRegexpattern()->compare("/\\n/g") == 0 || regex->getRegexpattern()->compare("/\\r/") == 0 || regex->getRegexpattern()->compare("/\\r\\n/") == 0; if (replaceSpaces || replaceNewlines || true) { // TODO: Hack, always filter away these for now regex->getSource()->accept(this, args); // send args through, allow local coercions // You could use the following block to prevent certain characters to be used, // but this would be problematic wrt. possible coercions, so we just ignore these filtering regexes. //if(!checkType(Symbolic::STRING)){ // error("String regex operation on non-string"); // return; //} // //if(replaceSpaces){ // mOutput << "(assert (= (Contains " << mExpressionBuffer << " \" \") false))\n"; // mConstriantLog << "(assert (= (Contains " << mExpressionBuffer << " \" \") false))\n"; //} // In fact the solver currently cannot return results which contain newlines, // so we can completely ignore the case of replaceNewlines. // to be explicit, we just let the parent buffer flow down mExpressionBuffer = mExpressionBuffer; mExpressionType = mExpressionType; Statistics::statistics()->accumulate("Concolic::Solver::RegexSuccessfullyTranslated", 1); return; } } Statistics::statistics()->accumulate("Concolic::Solver::RegexNotTranslated", 1); error("Regex constraints not supported"); } void Z3STRConstraintWriter::visit(Symbolic::StringReplace* replace, void* args) { replace->getSource()->accept(this); if(!checkType(Symbolic::STRING)){ error("String replace operation on non-string"); return; } std::ostringstream strs; strs << "(Replace " << mExpressionBuffer << " \"" << *replace->getPattern() << "\" \"" << *replace->getReplace() << "\")"; mExpressionBuffer = strs.str(); mExpressionType = Symbolic::STRING; } void Z3STRConstraintWriter::visit(Symbolic::StringLength* stringlength, void* args) { stringlength->getString()->accept(this); if(!checkType(Symbolic::STRING)){ error("String length operation on non-string"); return; } std::ostringstream strs; strs << "(Length " << mExpressionBuffer << ")"; mExpressionBuffer = strs.str(); mExpressionType = Symbolic::INT; } }<|fim▁end|>
#include <sstream>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from JumpScale import j from GitFactory import GitFactory j.base.loader.makeAvailable(j, 'clients')<|fim▁hole|><|fim▁end|>
j.clients.git = GitFactory()
<|file_name|>processor.rs<|end_file_name|><|fim▁begin|>use std::borrow::Cow; use std::collections::HashMap; use std::io::Write; use serde_json::{to_string_pretty, to_value, Number, Value}; use crate::context::{ValueRender, ValueTruthy}; use crate::errors::{Error, Result}; use crate::parser::ast::*; use crate::renderer::call_stack::CallStack; use crate::renderer::for_loop::ForLoop; use crate::renderer::macros::MacroCollection; use crate::renderer::square_brackets::pull_out_square_bracket; use crate::renderer::stack_frame::{FrameContext, FrameType, Val}; use crate::template::Template; use crate::tera::Tera; use crate::utils::render_to_string; use crate::Context; /// Special string indicating request to dump context static MAGICAL_DUMP_VAR: &str = "__tera_context"; /// This will convert a Tera variable to a json pointer if it is possible by replacing /// the index with their evaluated stringified value fn evaluate_sub_variables<'a>(key: &str, call_stack: &CallStack<'a>) -> Result<String> { let sub_vars_to_calc = pull_out_square_bracket(key); let mut new_key = key.to_string(); for sub_var in &sub_vars_to_calc { // Translate from variable name to variable value match process_path(sub_var.as_ref(), call_stack) { Err(e) => { return Err(Error::msg(format!( "Variable {} can not be evaluated because: {}", key, e ))); } Ok(post_var) => { let post_var_as_str = match *post_var { Value::String(ref s) => s.to_string(), Value::Number(ref n) => n.to_string(), _ => { return Err(Error::msg(format!( "Only variables evaluating to String or Number can be used as \ index (`{}` of `{}`)", sub_var, key, ))); } }; // Rebuild the original key String replacing variable name with value let nk = new_key.clone(); let divider = "[".to_string() + sub_var + "]"; let mut the_parts = nk.splitn(2, divider.as_str()); new_key = the_parts.next().unwrap().to_string() + "." + post_var_as_str.as_ref() + the_parts.next().unwrap_or(""); } } } Ok(new_key .replace("/", "~1") // https://tools.ietf.org/html/rfc6901#section-3 .replace("['", ".\"") .replace("[\"", ".\"") .replace("[", ".") .replace("']", "\"") .replace("\"]", "\"") .replace("]", "")) } fn process_path<'a>(path: &str, call_stack: &CallStack<'a>) -> Result<Val<'a>> { if !path.contains('[') { match call_stack.lookup(path) { Some(v) => Ok(v), None => Err(Error::msg(format!( "Variable `{}` not found in context while rendering '{}'", path, call_stack.active_template().name ))), } } else { let full_path = evaluate_sub_variables(path, call_stack)?; match call_stack.lookup(full_path.as_ref()) { Some(v) => Ok(v), None => Err(Error::msg(format!( "Variable `{}` not found in context while rendering '{}': \ the evaluated version was `{}`. Maybe the index is out of bounds?", path, call_stack.active_template().name, full_path, ))), } } } /// Processes the ast and renders the output pub struct Processor<'a> { /// The template we're trying to render template: &'a Template, /// Root template of template to render - contains ast to use for rendering /// Can be the same as `template` if a template has no inheritance template_root: &'a Template, /// The Tera object with template details tera: &'a Tera, /// The call stack for processing call_stack: CallStack<'a>, /// The macros organised by template and namespaces macros: MacroCollection<'a>, /// If set, rendering should be escaped should_escape: bool, /// Used when super() is used in a block, to know where we are in our stack of /// definitions and for which block /// Vec<(block name, tpl_name, level)> blocks: Vec<(&'a str, &'a str, usize)>, } impl<'a> Processor<'a> { /// Create a new `Processor` that will do the rendering pub fn new( template: &'a Template, tera: &'a Tera, context: &'a Context, should_escape: bool, ) -> Self { // Gets the root template if we are rendering something with inheritance or just return // the template we're dealing with otherwise let template_root = template .parents .last() .map(|parent| tera.get_template(parent).unwrap()) .unwrap_or(template); let call_stack = CallStack::new(&context, template); Processor { template, template_root, tera, call_stack, macros: MacroCollection::from_original_template(&template, &tera), should_escape, blocks: Vec::new(), } } fn render_body(&mut self, body: &'a [Node], write: &mut impl Write) -> Result<()> { for n in body { self.render_node(n, write)?; if self.call_stack.should_break_body() { break; } } Ok(()) } fn render_for_loop(&mut self, for_loop: &'a Forloop, write: &mut impl Write) -> Result<()> { let container_name = match for_loop.container.val { ExprVal::Ident(ref ident) => ident, ExprVal::FunctionCall(FunctionCall { ref name, .. }) => name, ExprVal::Array(_) => "an array literal", _ => return Err(Error::msg(format!( "Forloop containers have to be an ident or a function call (tried to iterate on '{:?}')", for_loop.container.val, ))), }; let for_loop_name = &for_loop.value; let for_loop_body = &for_loop.body; let for_loop_empty_body = &for_loop.empty_body; let container_val = self.safe_eval_expression(&for_loop.container)?; let for_loop = match *container_val { Value::Array(_) => { if for_loop.key.is_some() { return Err(Error::msg(format!( "Tried to iterate using key value on variable `{}`, but it isn't an object/map", container_name, ))); } ForLoop::from_array(&for_loop.value, container_val) } Value::String(_) => { if for_loop.key.is_some() { return Err(Error::msg(format!( "Tried to iterate using key value on variable `{}`, but it isn't an object/map", container_name, ))); } ForLoop::from_string(&for_loop.value, container_val) } Value::Object(_) => { if for_loop.key.is_none() { return Err(Error::msg(format!( "Tried to iterate using key value on variable `{}`, but it is missing a key", container_name, ))); } match container_val { Cow::Borrowed(c) => { ForLoop::from_object(&for_loop.key.as_ref().unwrap(), &for_loop.value, c) } Cow::Owned(c) => ForLoop::from_object_owned( &for_loop.key.as_ref().unwrap(), &for_loop.value, c, ), } } _ => { return Err(Error::msg(format!( "Tried to iterate on a container (`{}`) that has a unsupported type", container_name, ))); } }; let len = for_loop.len(); match (len, for_loop_empty_body) { (0, Some(empty_body)) => self.render_body(&empty_body, write), (0, _) => Ok(()), (_, _) => { self.call_stack.push_for_loop_frame(for_loop_name, for_loop); for _ in 0..len { self.render_body(&for_loop_body, write)?; if self.call_stack.should_break_for_loop() { break; } self.call_stack.increment_for_loop()?; } self.call_stack.pop(); Ok(()) } } } fn render_if_node(&mut self, if_node: &'a If, write: &mut impl Write) -> Result<()> { for &(_, ref expr, ref body) in &if_node.conditions { if self.eval_as_bool(expr)? { return self.render_body(body, write); } } if let Some((_, ref body)) = if_node.otherwise { return self.render_body(body, write); } Ok(()) } /// The way inheritance work is that the top parent will be rendered by the renderer so for blocks /// we want to look from the bottom (`level = 0`, the template the user is actually rendering) /// to the top (the base template). fn render_block( &mut self, block: &'a Block, level: usize, write: &mut impl Write, ) -> Result<()> { let level_template = match level { 0 => self.call_stack.active_template(), _ => self .tera .get_template(&self.call_stack.active_template().parents[level - 1]) .unwrap(), }; let blocks_definitions = &level_template.blocks_definitions; // Can we find this one block in these definitions? If so render it if let Some(block_def) = blocks_definitions.get(&block.name) { let (_, Block { ref body, .. }) = block_def[0]; self.blocks.push((&block.name[..], &level_template.name[..], level)); return self.render_body(body, write); } // Do we have more parents to look through? if level < self.call_stack.active_template().parents.len() { return self.render_block(block, level + 1, write); } // Nope, just render the body we got self.render_body(&block.body, write) } fn get_default_value(&mut self, expr: &'a Expr) -> Result<Val<'a>> { if let Some(default_expr) = expr.filters[0].args.get("value") { self.eval_expression(default_expr) } else { Err(Error::msg("The `default` filter requires a `value` argument.")) } } fn eval_in_condition(&mut self, in_cond: &'a In) -> Result<bool> { let lhs = self.eval_expression(&in_cond.lhs)?; let rhs = self.eval_expression(&in_cond.rhs)?; let present = match *rhs { Value::Array(ref v) => v.contains(&lhs), Value::String(ref s) => match *lhs { Value::String(ref s2) => s.contains(s2), _ => { return Err(Error::msg(format!( "Tried to check if {:?} is in a string, but it isn't a string", lhs ))) } }, Value::Object(ref map) => match *lhs { Value::String(ref s2) => map.contains_key(s2), _ => { return Err(Error::msg(format!( "Tried to check if {:?} is in a object, but it isn't a string", lhs ))) } }, _ => { return Err(Error::msg( "The `in` operator only supports strings, arrays and objects.", )) } }; Ok(if in_cond.negated { !present } else { present }) } fn eval_expression(&mut self, expr: &'a Expr) -> Result<Val<'a>> { let mut needs_escape = false; let mut res = match expr.val { ExprVal::Array(ref arr) => { let mut values = vec![]; for v in arr { values.push(self.eval_expression(v)?.into_owned()); } Cow::Owned(Value::Array(values)) } ExprVal::In(ref in_cond) => Cow::Owned(Value::Bool(self.eval_in_condition(in_cond)?)), ExprVal::String(ref val) => { needs_escape = true; Cow::Owned(Value::String(val.to_string())) } ExprVal::StringConcat(ref str_concat) => { let mut res = String::new(); for s in &str_concat.values { match *s { ExprVal::String(ref v) => res.push_str(&v), ExprVal::Int(ref v) => res.push_str(&format!("{}", v)), ExprVal::Float(ref v) => res.push_str(&format!("{}", v)), ExprVal::Ident(ref i) => match *self.lookup_ident(i)? { Value::String(ref v) => res.push_str(&v), Value::Number(ref v) => res.push_str(&v.to_string()), _ => return Err(Error::msg(format!(<|fim▁hole|> ))), }, ExprVal::FunctionCall(ref fn_call) => match *self.eval_tera_fn_call(fn_call, &mut needs_escape)? { Value::String(ref v) => res.push_str(&v), Value::Number(ref v) => res.push_str(&v.to_string()), _ => return Err(Error::msg(format!( "Tried to concat a value that is not a string or a number from function call {}", fn_call.name ))), }, _ => unreachable!(), }; } Cow::Owned(Value::String(res)) } ExprVal::Int(val) => Cow::Owned(Value::Number(val.into())), ExprVal::Float(val) => Cow::Owned(Value::Number(Number::from_f64(val).unwrap())), ExprVal::Bool(val) => Cow::Owned(Value::Bool(val)), ExprVal::Ident(ref ident) => { needs_escape = ident != MAGICAL_DUMP_VAR; // Negated idents are special cased as `not undefined_ident` should not // error but instead be falsy values match self.lookup_ident(ident) { Ok(val) => { if val.is_null() && expr.has_default_filter() { self.get_default_value(expr)? } else { val } } Err(e) => { if expr.has_default_filter() { self.get_default_value(expr)? } else { if !expr.negated { return Err(e); } // A negative undefined ident is !false so truthy return Ok(Cow::Owned(Value::Bool(true))); } } } } ExprVal::FunctionCall(ref fn_call) => { self.eval_tera_fn_call(fn_call, &mut needs_escape)? } ExprVal::MacroCall(ref macro_call) => { let val = render_to_string( || format!("macro {}", macro_call.name), |w| self.eval_macro_call(macro_call, w), )?; Cow::Owned(Value::String(val)) } ExprVal::Test(ref test) => Cow::Owned(Value::Bool(self.eval_test(test)?)), ExprVal::Logic(_) => Cow::Owned(Value::Bool(self.eval_as_bool(expr)?)), ExprVal::Math(_) => match self.eval_as_number(&expr.val) { Ok(Some(n)) => Cow::Owned(Value::Number(n)), Ok(None) => Cow::Owned(Value::String("NaN".to_owned())), Err(e) => return Err(Error::msg(e)), }, }; for filter in &expr.filters { if filter.name == "safe" || filter.name == "default" { continue; } res = self.eval_filter(&res, filter, &mut needs_escape)?; } // Lastly, we need to check if the expression is negated, thus turning it into a bool if expr.negated { return Ok(Cow::Owned(Value::Bool(!res.is_truthy()))); } // Checks if it's a string and we need to escape it (if the last filter is `safe` we don't) if self.should_escape && needs_escape && res.is_string() && !expr.is_marked_safe() { res = Cow::Owned( to_value(self.tera.get_escape_fn()(res.as_str().unwrap())).map_err(Error::json)?, ); } Ok(res) } /// Render an expression and never escape its result fn safe_eval_expression(&mut self, expr: &'a Expr) -> Result<Val<'a>> { let should_escape = self.should_escape; self.should_escape = false; let res = self.eval_expression(expr); self.should_escape = should_escape; res } /// Evaluate a set tag and add the value to the right context fn eval_set(&mut self, set: &'a Set) -> Result<()> { let assigned_value = self.safe_eval_expression(&set.value)?; self.call_stack.add_assignment(&set.key[..], set.global, assigned_value); Ok(()) } fn eval_test(&mut self, test: &'a Test) -> Result<bool> { let tester_fn = self.tera.get_tester(&test.name)?; let err_wrap = |e| Error::call_test(&test.name, e); let mut tester_args = vec![]; for arg in &test.args { tester_args .push(self.safe_eval_expression(arg).map_err(err_wrap)?.clone().into_owned()); } let found = self.lookup_ident(&test.ident).map(|found| found.clone().into_owned()).ok(); let result = tester_fn.test(found.as_ref(), &tester_args).map_err(err_wrap)?; if test.negated { Ok(!result) } else { Ok(result) } } fn eval_tera_fn_call( &mut self, function_call: &'a FunctionCall, needs_escape: &mut bool, ) -> Result<Val<'a>> { let tera_fn = self.tera.get_function(&function_call.name)?; *needs_escape = !tera_fn.is_safe(); let err_wrap = |e| Error::call_function(&function_call.name, e); let mut args = HashMap::new(); for (arg_name, expr) in &function_call.args { args.insert( arg_name.to_string(), self.safe_eval_expression(expr).map_err(err_wrap)?.clone().into_owned(), ); } Ok(Cow::Owned(tera_fn.call(&args).map_err(err_wrap)?)) } fn eval_macro_call(&mut self, macro_call: &'a MacroCall, write: &mut impl Write) -> Result<()> { let active_template_name = if let Some(block) = self.blocks.last() { block.1 } else if self.template.name != self.template_root.name { &self.template_root.name } else { &self.call_stack.active_template().name }; let (macro_template_name, macro_definition) = self.macros.lookup_macro( active_template_name, &macro_call.namespace[..], &macro_call.name[..], )?; let mut frame_context = FrameContext::with_capacity(macro_definition.args.len()); // First the default arguments for (arg_name, default_value) in &macro_definition.args { let value = match macro_call.args.get(arg_name) { Some(val) => self.safe_eval_expression(val)?, None => match *default_value { Some(ref val) => self.safe_eval_expression(val)?, None => { return Err(Error::msg(format!( "Macro `{}` is missing the argument `{}`", macro_call.name, arg_name ))); } }, }; frame_context.insert(&arg_name, value); } self.call_stack.push_macro_frame( &macro_call.namespace, &macro_call.name, frame_context, self.tera.get_template(macro_template_name)?, ); self.render_body(&macro_definition.body, write)?; self.call_stack.pop(); Ok(()) } fn eval_filter( &mut self, value: &Val<'a>, fn_call: &'a FunctionCall, needs_escape: &mut bool, ) -> Result<Val<'a>> { let filter_fn = self.tera.get_filter(&fn_call.name)?; *needs_escape = !filter_fn.is_safe(); let err_wrap = |e| Error::call_filter(&fn_call.name, e); let mut args = HashMap::new(); for (arg_name, expr) in &fn_call.args { args.insert( arg_name.to_string(), self.safe_eval_expression(expr).map_err(err_wrap)?.clone().into_owned(), ); } Ok(Cow::Owned(filter_fn.filter(&value, &args).map_err(err_wrap)?)) } fn eval_as_bool(&mut self, bool_expr: &'a Expr) -> Result<bool> { let res = match bool_expr.val { ExprVal::Logic(LogicExpr { ref lhs, ref rhs, ref operator }) => { match *operator { LogicOperator::Or => self.eval_as_bool(lhs)? || self.eval_as_bool(rhs)?, LogicOperator::And => self.eval_as_bool(lhs)? && self.eval_as_bool(rhs)?, LogicOperator::Gt | LogicOperator::Gte | LogicOperator::Lt | LogicOperator::Lte => { let l = self.eval_expr_as_number(lhs)?; let r = self.eval_expr_as_number(rhs)?; let (ll, rr) = match (l, r) { (Some(nl), Some(nr)) => (nl, nr), _ => return Err(Error::msg("Comparison to NaN")), }; match *operator { LogicOperator::Gte => ll.as_f64().unwrap() >= rr.as_f64().unwrap(), LogicOperator::Gt => ll.as_f64().unwrap() > rr.as_f64().unwrap(), LogicOperator::Lte => ll.as_f64().unwrap() <= rr.as_f64().unwrap(), LogicOperator::Lt => ll.as_f64().unwrap() < rr.as_f64().unwrap(), _ => unreachable!(), } } LogicOperator::Eq | LogicOperator::NotEq => { let mut lhs_val = self.eval_expression(lhs)?; let mut rhs_val = self.eval_expression(rhs)?; // Monomorphize number vals. if lhs_val.is_number() || rhs_val.is_number() { // We're not implementing JS so can't compare things of different types if !lhs_val.is_number() || !rhs_val.is_number() { return Ok(false); } lhs_val = Cow::Owned(Value::Number( Number::from_f64(lhs_val.as_f64().unwrap()).unwrap(), )); rhs_val = Cow::Owned(Value::Number( Number::from_f64(rhs_val.as_f64().unwrap()).unwrap(), )); } match *operator { LogicOperator::Eq => *lhs_val == *rhs_val, LogicOperator::NotEq => *lhs_val != *rhs_val, _ => unreachable!(), } } } } ExprVal::Ident(_) => { let mut res = self .eval_expression(&bool_expr) .unwrap_or(Cow::Owned(Value::Bool(false))) .is_truthy(); if bool_expr.negated { res = !res; } res } ExprVal::Math(_) | ExprVal::Int(_) | ExprVal::Float(_) => { match self.eval_as_number(&bool_expr.val)? { Some(n) => n.as_f64().unwrap() != 0.0, None => false, } } ExprVal::In(ref in_cond) => self.eval_in_condition(&in_cond)?, ExprVal::Test(ref test) => self.eval_test(test)?, ExprVal::Bool(val) => val, ExprVal::String(ref string) => !string.is_empty(), ExprVal::FunctionCall(ref fn_call) => { let v = self.eval_tera_fn_call(fn_call, &mut false)?; match v.as_bool() { Some(val) => val, None => { return Err(Error::msg(format!( "Function `{}` was used in a logic operation but is not returning a bool", fn_call.name, ))); } } } ExprVal::StringConcat(_) => { let res = self.eval_expression(bool_expr)?; !res.as_str().unwrap().is_empty() } ExprVal::MacroCall(ref macro_call) => { let mut buf = Vec::new(); self.eval_macro_call(&macro_call, &mut buf)?; !buf.is_empty() } _ => unreachable!("unimplemented logic operation for {:?}", bool_expr), }; if bool_expr.negated { return Ok(!res); } Ok(res) } /// In some cases, we will have filters in lhs/rhs of a math expression /// `eval_as_number` only works on ExprVal rather than Expr fn eval_expr_as_number(&mut self, expr: &'a Expr) -> Result<Option<Number>> { if !expr.filters.is_empty() { match *self.eval_expression(expr)? { Value::Number(ref s) => Ok(Some(s.clone())), _ => { Err(Error::msg("Tried to do math with an expression not resulting in a number")) } } } else { self.eval_as_number(&expr.val) } } /// Return the value of an expression as a number fn eval_as_number(&mut self, expr: &'a ExprVal) -> Result<Option<Number>> { let result = match *expr { ExprVal::Ident(ref ident) => { let v = &*self.lookup_ident(ident)?; if v.is_i64() { Some(Number::from(v.as_i64().unwrap())) } else if v.is_u64() { Some(Number::from(v.as_u64().unwrap())) } else if v.is_f64() { Some(Number::from_f64(v.as_f64().unwrap()).unwrap()) } else { return Err(Error::msg(format!( "Variable `{}` was used in a math operation but is not a number", ident ))); } } ExprVal::Int(val) => Some(Number::from(val)), ExprVal::Float(val) => Some(Number::from_f64(val).unwrap()), ExprVal::Math(MathExpr { ref lhs, ref rhs, ref operator }) => { let (l, r) = match (self.eval_expr_as_number(lhs)?, self.eval_expr_as_number(rhs)?) { (Some(l), Some(r)) => (l, r), _ => return Ok(None), }; match *operator { MathOperator::Mul => { if l.is_i64() && r.is_i64() { let ll = l.as_i64().unwrap(); let rr = r.as_i64().unwrap(); let res = match ll.checked_mul(rr) { Some(s) => s, None => { return Err(Error::msg(format!( "{} x {} results in an out of bounds i64", ll, rr ))); } }; Some(Number::from(res)) } else if l.is_u64() && r.is_u64() { let ll = l.as_u64().unwrap(); let rr = r.as_u64().unwrap(); let res = match ll.checked_mul(rr) { Some(s) => s, None => { return Err(Error::msg(format!( "{} x {} results in an out of bounds u64", ll, rr ))); } }; Some(Number::from(res)) } else { let ll = l.as_f64().unwrap(); let rr = r.as_f64().unwrap(); Number::from_f64(ll * rr) } } MathOperator::Div => { let ll = l.as_f64().unwrap(); let rr = r.as_f64().unwrap(); let res = ll / rr; if res.is_nan() { None } else { Number::from_f64(res) } } MathOperator::Add => { if l.is_i64() && r.is_i64() { let ll = l.as_i64().unwrap(); let rr = r.as_i64().unwrap(); let res = match ll.checked_add(rr) { Some(s) => s, None => { return Err(Error::msg(format!( "{} + {} results in an out of bounds i64", ll, rr ))); } }; Some(Number::from(res)) } else if l.is_u64() && r.is_u64() { let ll = l.as_u64().unwrap(); let rr = r.as_u64().unwrap(); let res = match ll.checked_add(rr) { Some(s) => s, None => { return Err(Error::msg(format!( "{} + {} results in an out of bounds u64", ll, rr ))); } }; Some(Number::from(res)) } else { let ll = l.as_f64().unwrap(); let rr = r.as_f64().unwrap(); Some(Number::from_f64(ll + rr).unwrap()) } } MathOperator::Sub => { if l.is_i64() && r.is_i64() { let ll = l.as_i64().unwrap(); let rr = r.as_i64().unwrap(); let res = match ll.checked_sub(rr) { Some(s) => s, None => { return Err(Error::msg(format!( "{} - {} results in an out of bounds i64", ll, rr ))); } }; Some(Number::from(res)) } else if l.is_u64() && r.is_u64() { let ll = l.as_u64().unwrap(); let rr = r.as_u64().unwrap(); let res = match ll.checked_sub(rr) { Some(s) => s, None => { return Err(Error::msg(format!( "{} - {} results in an out of bounds u64", ll, rr ))); } }; Some(Number::from(res)) } else { let ll = l.as_f64().unwrap(); let rr = r.as_f64().unwrap(); Some(Number::from_f64(ll - rr).unwrap()) } } MathOperator::Modulo => { if l.is_i64() && r.is_i64() { let ll = l.as_i64().unwrap(); let rr = r.as_i64().unwrap(); if rr == 0 { return Err(Error::msg(format!( "Tried to do a modulo by zero: {:?}/{:?}", lhs, rhs ))); } Some(Number::from(ll % rr)) } else if l.is_u64() && r.is_u64() { let ll = l.as_u64().unwrap(); let rr = r.as_u64().unwrap(); if rr == 0 { return Err(Error::msg(format!( "Tried to do a modulo by zero: {:?}/{:?}", lhs, rhs ))); } Some(Number::from(ll % rr)) } else { let ll = l.as_f64().unwrap(); let rr = r.as_f64().unwrap(); Number::from_f64(ll % rr) } } } } ExprVal::FunctionCall(ref fn_call) => { let v = self.eval_tera_fn_call(fn_call, &mut false)?; if v.is_i64() { Some(Number::from(v.as_i64().unwrap())) } else if v.is_u64() { Some(Number::from(v.as_u64().unwrap())) } else if v.is_f64() { Some(Number::from_f64(v.as_f64().unwrap()).unwrap()) } else { return Err(Error::msg(format!( "Function `{}` was used in a math operation but is not returning a number", fn_call.name ))); } } ExprVal::String(ref val) => { return Err(Error::msg(format!("Tried to do math with a string: `{}`", val))); } ExprVal::Bool(val) => { return Err(Error::msg(format!("Tried to do math with a boolean: `{}`", val))); } ExprVal::StringConcat(ref val) => { return Err(Error::msg(format!( "Tried to do math with a string concatenation: {}", val.to_template_string() ))); } ExprVal::Test(ref test) => { return Err(Error::msg(format!("Tried to do math with a test: {}", test.name))); } _ => unreachable!("unimplemented math expression for {:?}", expr), }; Ok(result) } /// Only called while rendering a block. /// This will look up the block we are currently rendering and its level and try to render /// the block at level + n, where would be the next template in the hierarchy the block is present fn do_super(&mut self, write: &mut impl Write) -> Result<()> { let &(block_name, _, level) = self.blocks.last().unwrap(); let mut next_level = level + 1; while next_level <= self.template.parents.len() { let blocks_definitions = &self .tera .get_template(&self.template.parents[next_level - 1]) .unwrap() .blocks_definitions; if let Some(block_def) = blocks_definitions.get(block_name) { let (ref tpl_name, Block { ref body, .. }) = block_def[0]; self.blocks.push((block_name, tpl_name, next_level)); self.render_body(body, write)?; self.blocks.pop(); // Can't go any higher for that block anymore? if next_level >= self.template.parents.len() { // then remove it from the stack, we're done with it self.blocks.pop(); } return Ok(()); } else { next_level += 1; } } Err(Error::msg("Tried to use super() in the top level block")) } /// Looks up identifier and returns its value fn lookup_ident(&self, key: &str) -> Result<Val<'a>> { // Magical variable that just dumps the context if key == MAGICAL_DUMP_VAR { // Unwraps are safe since we are dealing with things that are already Value return Ok(Cow::Owned( to_value( to_string_pretty(&self.call_stack.current_context_cloned().take()).unwrap(), ) .unwrap(), )); } process_path(key, &self.call_stack) } /// Process the given node, appending the string result to the buffer /// if it is possible fn render_node(&mut self, node: &'a Node, write: &mut impl Write) -> Result<()> { match *node { // Comments are ignored when rendering Node::Comment(_, _) => (), Node::Text(ref s) | Node::Raw(_, ref s, _) => write!(write, "{}", s)?, Node::VariableBlock(_, ref expr) => self.eval_expression(expr)?.render(write)?, Node::Set(_, ref set) => self.eval_set(set)?, Node::FilterSection(_, FilterSection { ref filter, ref body }, _) => { let body = render_to_string( || format!("filter {}", filter.name), |w| self.render_body(body, w), )?; // the safe filter doesn't actually exist if filter.name == "safe" { write!(write, "{}", body)?; } else { self.eval_filter(&Cow::Owned(Value::String(body)), filter, &mut false)? .render(write)?; } } // Macros have been imported at the beginning Node::ImportMacro(_, _, _) => (), Node::If(ref if_node, _) => self.render_if_node(if_node, write)?, Node::Forloop(_, ref forloop, _) => self.render_for_loop(forloop, write)?, Node::Break(_) => { self.call_stack.break_for_loop()?; } Node::Continue(_) => { self.call_stack.continue_for_loop()?; } Node::Block(_, ref block, _) => self.render_block(block, 0, write)?, Node::Super => self.do_super(write)?, Node::Include(_, ref tpl_names, ignore_missing) => { let mut found = false; for tpl_name in tpl_names { let template = self.tera.get_template(tpl_name); if template.is_err() { continue; } let template = template.unwrap(); self.macros.add_macros_from_template(&self.tera, template)?; self.call_stack.push_include_frame(tpl_name, template); self.render_body(&template.ast, write)?; self.call_stack.pop(); found = true; break; } if !found && !ignore_missing { return Err(Error::template_not_found( vec!["[", &tpl_names.join(", "), "]"].join(""), )); } } Node::Extends(_, ref name) => { return Err(Error::msg(format!( "Inheritance in included templates is currently not supported: extended `{}`", name ))); } // TODO: make that a compile time error Node::MacroDefinition(_, ref def, _) => { return Err(Error::invalid_macro_def(&def.name)); } }; Ok(()) } /// Helper fn that tries to find the current context: are we in a macro? in a parent template? /// in order to give the best possible error when getting an error when rendering a tpl fn get_error_location(&self) -> String { let mut error_location = format!("Failed to render '{}'", self.template.name); // in a macro? if self.call_stack.current_frame().kind == FrameType::Macro { let frame = self.call_stack.current_frame(); error_location += &format!( ": error while rendering macro `{}::{}`", frame.macro_namespace.expect("Macro namespace"), frame.name, ); } // which template are we in? if let Some(&(ref name, ref _template, ref level)) = self.blocks.last() { let block_def = self .template .blocks_definitions .get(&(*name).to_string()) .and_then(|b| b.get(*level)); if let Some(&(ref tpl_name, _)) = block_def { if tpl_name != &self.template.name { error_location += &format!(" (error happened in '{}').", tpl_name); } } else { error_location += " (error happened in a parent template)"; } } else if let Some(parent) = self.template.parents.last() { // Error happened in the base template, outside of blocks error_location += &format!(" (error happened in '{}').", parent); } error_location } /// Entry point for the rendering pub fn render(&mut self, write: &mut impl Write) -> Result<()> { for node in &self.template_root.ast { self.render_node(node, write) .map_err(|e| Error::chain(self.get_error_location(), e))?; } Ok(()) } }<|fim▁end|>
"Tried to concat a value that is not a string or a number from ident {}", i
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from typing import Any from django.utils.translation import ugettext as _ from django.conf import settings from django.contrib.auth import authenticate, login, get_backends from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponseForbidden, HttpResponse from django.shortcuts import redirect from django.template import RequestContext, loader from django.utils.timezone import now from django.utils.cache import patch_cache_control from django.core.exceptions import ValidationError from django.core import validators from django.contrib.auth.views import login as django_login_page, \ logout_then_login as django_logout_then_login from django.forms.models import model_to_dict from django.core.mail import send_mail from django.middleware.csrf import get_token from zerver.models import Message, UserProfile, Stream, Subscription, Huddle, \ Recipient, Realm, UserMessage, DefaultStream, RealmEmoji, RealmAlias, \ RealmFilter, \ PreregistrationUser, get_client, MitUser, UserActivity, PushDeviceToken, \ get_stream, UserPresence, get_recipient, \ split_email_to_domain, resolve_email_to_domain, email_to_username, get_realm, \ completely_open, get_unique_open_realm, remote_user_to_email, email_allowed_for_realm from zerver.lib.actions import do_change_password, do_change_full_name, do_change_is_admin, \ do_activate_user, do_create_user, \ internal_send_message, update_user_presence, do_events_register, \ get_status_dict, do_change_enable_offline_email_notifications, \ do_change_enable_digest_emails, do_set_realm_name, do_set_realm_restricted_to_domain, \ do_set_realm_invite_required, do_set_realm_invite_by_admins_only, \ do_set_realm_create_stream_by_admins_only, get_default_subs, \ user_email_is_unique, do_invite_users, do_refer_friend, compute_mit_user_fullname, \ do_set_muted_topics, clear_followup_emails_queue, do_update_pointer, realm_user_count from zerver.lib.push_notifications import num_push_devices_for_user from zerver.forms import RegistrationForm, HomepageForm, ToSForm, \ CreateUserForm, is_inactive, OurAuthenticationForm from django.views.decorators.csrf import csrf_exempt from django_auth_ldap.backend import LDAPBackend, _LDAPUser from zerver.lib import bugdown from zerver.lib.validator import check_string, check_list, check_bool from zerver.decorator import require_post, authenticated_json_post_view, \ has_request_variables, authenticated_json_view, to_non_negative_int, \ JsonableError, get_user_profile_by_email, REQ, require_realm_admin, \ zulip_login_required from zerver.lib.avatar import avatar_url from zerver.lib.upload import upload_message_image_through_web_client, \ get_signed_upload_url, get_realm_for_filename from zerver.lib.response import json_success, json_error from zerver.lib.utils import statsd, generate_random_token from zproject.backends import password_auth_enabled, dev_auth_enabled from confirmation.models import Confirmation import requests import subprocess import calendar import datetime import ujson import simplejson import re from six.moves import urllib import base64 import time import logging import jwt import hashlib import hmac from zproject.jinja2 import render_to_response from zerver.lib.rest import rest_dispatch as _rest_dispatch rest_dispatch = csrf_exempt((lambda request, *args, **kwargs: _rest_dispatch(request, globals(), *args, **kwargs))) def name_changes_disabled(realm): return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled @require_post def accounts_register(request): key = request.POST['key'] confirmation = Confirmation.objects.get(confirmation_key=key) prereg_user = confirmation.content_object email = prereg_user.email mit_beta_user = isinstance(confirmation.content_object, MitUser) try: existing_user_profile = get_user_profile_by_email(email) except UserProfile.DoesNotExist: existing_user_profile = None validators.validate_email(email) unique_open_realm = get_unique_open_realm() if unique_open_realm: realm = unique_open_realm domain = realm.domain elif not mit_beta_user and prereg_user.referred_by: # If someone invited you, you are joining their realm regardless # of your e-mail address. # # MitUsers can't be referred and don't have a referred_by field. realm = prereg_user.referred_by.realm domain = realm.domain if not email_allowed_for_realm(email, realm): return render_to_response("zerver/closed_realm.html", {"closed_domain_name": realm.name}) elif not mit_beta_user and prereg_user.realm: # You have a realm set, even though nobody referred you. This # happens if you sign up through a special URL for an open # realm. domain = prereg_user.realm.domain realm = get_realm(domain) else: domain = resolve_email_to_domain(email) realm = get_realm(domain) if realm and realm.deactivated: # The user is trying to register for a deactivated realm. Advise them to # contact support. return render_to_response("zerver/deactivated.html", {"deactivated_domain_name": realm.name, "zulip_administrator": settings.ZULIP_ADMINISTRATOR}) try: if existing_user_profile is not None and existing_user_profile.is_mirror_dummy: # Mirror dummy users to be activated must be inactive is_inactive(email) else: # Other users should not already exist at all. user_email_is_unique(email) except ValidationError: return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) name_validated = False full_name = None if request.POST.get('from_confirmation'): try: del request.session['authenticated_full_name'] except KeyError: pass if domain == "mit.edu": hesiod_name = compute_mit_user_fullname(email) form = RegistrationForm( initial={'full_name': hesiod_name if "@" not in hesiod_name else ""}) name_validated = True elif settings.POPULATE_PROFILE_VIA_LDAP: for backend in get_backends(): if isinstance(backend, LDAPBackend): ldap_attrs = _LDAPUser(backend, backend.django_to_ldap_username(email)).attrs try: request.session['authenticated_full_name'] = ldap_attrs[settings.AUTH_LDAP_USER_ATTR_MAP['full_name']][0] name_validated = True # We don't use initial= here, because if the form is # complete (that is, no additional fields need to be # filled out by the user) we want the form to validate, # so they can be directly registered without having to # go through this interstitial. form = RegistrationForm( {'full_name': request.session['authenticated_full_name']}) # FIXME: This will result in the user getting # validation errors if they have to enter a password. # Not relevant for ONLY_SSO, though. break except TypeError: # Let the user fill out a name and/or try another backend form = RegistrationForm() elif 'full_name' in request.POST: form = RegistrationForm( initial={'full_name': request.POST.get('full_name')} ) else: form = RegistrationForm() else: postdata = request.POST.copy() if name_changes_disabled(realm): # If we populate profile information via LDAP and we have a # verified name from you on file, use that. Otherwise, fall # back to the full name in the request. try: postdata.update({'full_name': request.session['authenticated_full_name']}) name_validated = True except KeyError: pass form = RegistrationForm(postdata) if not password_auth_enabled(realm): form['password'].field.required = False if form.is_valid(): if password_auth_enabled(realm): password = form.cleaned_data['password'] else: # SSO users don't need no passwords password = None full_name = form.cleaned_data['full_name'] short_name = email_to_username(email) first_in_realm = len(UserProfile.objects.filter(realm=realm, is_bot=False)) == 0 # FIXME: sanitize email addresses and fullname if existing_user_profile is not None and existing_user_profile.is_mirror_dummy: try: user_profile = existing_user_profile do_activate_user(user_profile) do_change_password(user_profile, password) do_change_full_name(user_profile, full_name) except UserProfile.DoesNotExist: user_profile = do_create_user(email, password, realm, full_name, short_name, prereg_user=prereg_user, newsletter_data={"IP": request.META['REMOTE_ADDR']}) else: user_profile = do_create_user(email, password, realm, full_name, short_name, prereg_user=prereg_user, newsletter_data={"IP": request.META['REMOTE_ADDR']}) # This logs you in using the ZulipDummyBackend, since honestly nothing # more fancy than this is required. login(request, authenticate(username=user_profile.email, use_dummy_backend=True)) if first_in_realm: do_change_is_admin(user_profile, True) return HttpResponseRedirect(reverse('zerver.views.initial_invite_page')) else: return HttpResponseRedirect(reverse('zerver.views.home')) return render_to_response('zerver/register.html', {'form': form, 'company_name': domain, 'email': email, 'key': key, 'full_name': request.session.get('authenticated_full_name', None), 'lock_name': name_validated and name_changes_disabled(realm), # password_auth_enabled is normally set via our context processor, # but for the registration form, there is no logged in user yet, so # we have to set it here. 'password_auth_enabled': password_auth_enabled(realm), }, request=request) @zulip_login_required def accounts_accept_terms(request): email = request.user.email domain = resolve_email_to_domain(email) if request.method == "POST": form = ToSForm(request.POST) if form.is_valid(): full_name = form.cleaned_data['full_name'] send_mail('Terms acceptance for ' + full_name, loader.render_to_string('zerver/tos_accept_body.txt', {'name': full_name, 'email': email, 'ip': request.META['REMOTE_ADDR'], 'browser': request.META.get('HTTP_USER_AGENT', "Unspecified")}), settings.EMAIL_HOST_USER, ["[email protected]"]) do_change_full_name(request.user, full_name) return redirect(home) else: form = ToSForm() return render_to_response('zerver/accounts_accept_terms.html', { 'form': form, 'company_name': domain, 'email': email }, request=request) from zerver.lib.ccache import make_ccache @authenticated_json_view @has_request_variables def webathena_kerberos_login(request, user_profile, cred=REQ(default=None)): if cred is None: return json_error(_("Could not find Kerberos credential")) if not user_profile.realm.domain == "mit.edu": return json_error(_("Webathena login only for mit.edu realm")) try: parsed_cred = ujson.loads(cred) user = parsed_cred["cname"]["nameString"][0] if user == "golem": # Hack for an mit.edu user whose Kerberos username doesn't<|fim▁hole|> # match what he zephyrs as user = "ctl" assert(user == user_profile.email.split("@")[0]) ccache = make_ccache(parsed_cred) except Exception: return json_error(_("Invalid Kerberos cache")) # TODO: Send these data via (say) rabbitmq try: subprocess.check_call(["ssh", "[email protected]", "--", "/home/zulip/zulip/bots/process_ccache", user, user_profile.api_key, base64.b64encode(ccache)]) except Exception: logging.exception("Error updating the user's ccache") return json_error(_("We were unable to setup mirroring for you")) return json_success() def api_endpoint_docs(request): raw_calls = open('templates/zerver/api_content.json', 'r').read() calls = ujson.loads(raw_calls) langs = set() for call in calls: call["endpoint"] = "%s/v1/%s" % (settings.EXTERNAL_API_URI, call["endpoint"]) call["example_request"]["curl"] = call["example_request"]["curl"].replace("https://api.zulip.com", settings.EXTERNAL_API_URI) response = call['example_response'] if '\n' not in response: # For 1-line responses, pretty-print them extended_response = response.replace(", ", ",\n ") else: extended_response = response call['rendered_response'] = bugdown.convert("~~~ .py\n" + extended_response + "\n~~~\n", "default") for example_type in ('request', 'response'): for lang in call.get('example_' + example_type, []): langs.add(lang) return render_to_response( 'zerver/api_endpoints.html', { 'content': calls, 'langs': langs, }, request=request) @authenticated_json_post_view @has_request_variables def json_invite_users(request, user_profile, invitee_emails=REQ()): if not invitee_emails: return json_error(_("You must specify at least one email address.")) invitee_emails = set(re.split(r'[, \n]', invitee_emails)) stream_names = request.POST.getlist('stream') if not stream_names: return json_error(_("You must specify at least one stream for invitees to join.")) # We unconditionally sub you to the notifications stream if it # exists and is public. notifications_stream = user_profile.realm.notifications_stream if notifications_stream and not notifications_stream.invite_only: stream_names.append(notifications_stream.name) streams = [] for stream_name in stream_names: stream = get_stream(stream_name, user_profile.realm) if stream is None: return json_error(_("Stream does not exist: %s. No invites were sent.") % (stream_name,)) streams.append(stream) ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams) if ret_error is not None: return json_error(data=error_data, msg=ret_error) else: return json_success() def create_homepage_form(request, user_info=None): if user_info: return HomepageForm(user_info, domain=request.session.get("domain")) # An empty fields dict is not treated the same way as not # providing it. return HomepageForm(domain=request.session.get("domain")) def maybe_send_to_registration(request, email, full_name=''): form = create_homepage_form(request, user_info={'email': email}) request.verified_email = None if form.is_valid(): # Construct a PreregistrationUser object and send the user over to # the confirmation view. prereg_user = None if settings.ONLY_SSO: try: prereg_user = PreregistrationUser.objects.filter(email__iexact=email).latest("invited_at") except PreregistrationUser.DoesNotExist: prereg_user = create_preregistration_user(email, request) else: prereg_user = create_preregistration_user(email, request) return redirect("".join(( settings.EXTERNAL_URI_SCHEME, request.get_host(), "/", # Split this so we only get the part after the / Confirmation.objects.get_link_for_object(prereg_user).split("/", 3)[3], '?full_name=', # urllib does not handle Unicode, so coerece to encoded byte string # Explanation: http://stackoverflow.com/a/5605354/90777 urllib.parse.quote_plus(full_name.encode('utf8'))))) else: return render_to_response('zerver/accounts_home.html', {'form': form}, request=request) def login_or_register_remote_user(request, remote_username, user_profile, full_name=''): if user_profile is None or user_profile.is_mirror_dummy: # Since execution has reached here, the client specified a remote user # but no associated user account exists. Send them over to the # PreregistrationUser flow. return maybe_send_to_registration(request, remote_user_to_email(remote_username), full_name) else: login(request, user_profile) return HttpResponseRedirect("%s%s" % (settings.EXTERNAL_URI_SCHEME, request.get_host())) def remote_user_sso(request): try: remote_user = request.META["REMOTE_USER"] except KeyError: raise JsonableError(_("No REMOTE_USER set.")) user_profile = authenticate(remote_user=remote_user) return login_or_register_remote_user(request, remote_user, user_profile) @csrf_exempt def remote_user_jwt(request): try: json_web_token = request.POST["json_web_token"] payload, signing_input, header, signature = jwt.load(json_web_token) except KeyError: raise JsonableError(_("No JSON web token passed in request")) except jwt.DecodeError: raise JsonableError(_("Bad JSON web token")) remote_user = payload.get("user", None) if remote_user is None: raise JsonableError(_("No user specified in JSON web token claims")) domain = payload.get('realm', None) if domain is None: raise JsonableError(_("No domain specified in JSON web token claims")) email = "%s@%s" % (remote_user, domain) try: jwt.verify_signature(payload, signing_input, header, signature, settings.JWT_AUTH_KEYS[domain]) # We do all the authentication we need here (otherwise we'd have to # duplicate work), but we need to call authenticate with some backend so # that the request.backend attribute gets set. user_profile = authenticate(username=email, use_dummy_backend=True) except (jwt.DecodeError, jwt.ExpiredSignature): raise JsonableError(_("Bad JSON web token signature")) except KeyError: raise JsonableError(_("Realm not authorized for JWT login")) except UserProfile.DoesNotExist: user_profile = None return login_or_register_remote_user(request, email, user_profile, remote_user) def google_oauth2_csrf(request, value): return hmac.new(get_token(request).encode('utf-8'), value, hashlib.sha256).hexdigest() def start_google_oauth2(request): uri = 'https://accounts.google.com/o/oauth2/auth?' cur_time = str(int(time.time())) csrf_state = '{}:{}'.format( cur_time, google_oauth2_csrf(request, cur_time), ) prams = { 'response_type': 'code', 'client_id': settings.GOOGLE_OAUTH2_CLIENT_ID, 'redirect_uri': ''.join(( settings.EXTERNAL_URI_SCHEME, request.get_host(), reverse('zerver.views.finish_google_oauth2'), )), 'scope': 'profile email', 'state': csrf_state, } return redirect(uri + urllib.parse.urlencode(prams)) # Workaround to support the Python-requests 1.0 transition of .json # from a property to a function requests_json_is_function = callable(requests.Response.json) def extract_json_response(resp): if requests_json_is_function: return resp.json() else: return resp.json def finish_google_oauth2(request): error = request.GET.get('error') if error == 'access_denied': return redirect('/') elif error is not None: logging.warning('Error from google oauth2 login %r', request.GET) return HttpResponse(status=400) value, hmac_value = request.GET.get('state').split(':') if hmac_value != google_oauth2_csrf(request, value): logging.warning('Google oauth2 CSRF error') return HttpResponse(status=400) resp = requests.post( 'https://www.googleapis.com/oauth2/v3/token', data={ 'code': request.GET.get('code'), 'client_id': settings.GOOGLE_OAUTH2_CLIENT_ID, 'client_secret': settings.GOOGLE_OAUTH2_CLIENT_SECRET, 'redirect_uri': ''.join(( settings.EXTERNAL_URI_SCHEME, request.get_host(), reverse('zerver.views.finish_google_oauth2'), )), 'grant_type': 'authorization_code', }, ) if resp.status_code == 400: logging.warning('User error converting Google oauth2 login to token: %r' % (resp.text,)) return HttpResponse(status=400) elif resp.status_code != 200: raise Exception('Could not convert google oauth2 code to access_token\r%r' % (resp.text,)) access_token = extract_json_response(resp)['access_token'] resp = requests.get( 'https://www.googleapis.com/plus/v1/people/me', params={'access_token': access_token} ) if resp.status_code == 400: logging.warning('Google login failed making info API call: %r' % (resp.text,)) return HttpResponse(status=400) elif resp.status_code != 200: raise Exception('Google login failed making API call\r%r' % (resp.text,)) body = extract_json_response(resp) try: full_name = body['name']['formatted'] except KeyError: # Only google+ users have a formated name. I am ignoring i18n here. full_name = u'{} {}'.format( body['name']['givenName'], body['name']['familyName'] ) for email in body['emails']: if email['type'] == 'account': break else: raise Exception('Google oauth2 account email not found %r' % (body,)) email_address = email['value'] user_profile = authenticate(username=email_address, use_dummy_backend=True) return login_or_register_remote_user(request, email_address, user_profile, full_name) def login_page(request, **kwargs): extra_context = kwargs.pop('extra_context', {}) if dev_auth_enabled(): # Development environments usually have only a few users, but # it still makes sense to limit how many users we render to # support performance testing with DevAuthBackend. MAX_DEV_BACKEND_USERS = 100 users_query = UserProfile.objects.select_related().filter(is_bot=False, is_active=True) users = users_query.order_by('email')[0:MAX_DEV_BACKEND_USERS] extra_context['direct_admins'] = [u.email for u in users if u.is_realm_admin] extra_context['direct_users'] = [u.email for u in users if not u.is_realm_admin] template_response = django_login_page( request, authentication_form=OurAuthenticationForm, extra_context=extra_context, **kwargs) try: template_response.context_data['email'] = request.GET['email'] except KeyError: pass return template_response def dev_direct_login(request, **kwargs): # This function allows logging in without a password and should only be called in development environments. # It may be called if the DevAuthBackend is included in settings.AUTHENTICATION_BACKENDS if (not dev_auth_enabled()) or settings.PRODUCTION: # This check is probably not required, since authenticate would fail without an enabled DevAuthBackend. raise Exception('Direct login not supported.') email = request.POST['direct_email'] user_profile = authenticate(username=email) if user_profile is None: raise Exception("User cannot login") login(request, user_profile) return HttpResponseRedirect("%s%s" % (settings.EXTERNAL_URI_SCHEME, request.get_host())) @authenticated_json_post_view @has_request_variables def json_bulk_invite_users(request, user_profile, invitee_emails=REQ(validator=check_list(check_string))): invitee_emails = set(invitee_emails) streams = get_default_subs(user_profile) ret_error, error_data = do_invite_users(user_profile, invitee_emails, streams) if ret_error is not None: return json_error(data=error_data, msg=ret_error) else: # Report bulk invites to internal Zulip. invited = PreregistrationUser.objects.filter(referred_by=user_profile) internal_message = "%s <`%s`> invited %d people to Zulip." % ( user_profile.full_name, user_profile.email, invited.count()) internal_send_message(settings.NEW_USER_BOT, "stream", "signups", user_profile.realm.domain, internal_message) return json_success() @zulip_login_required def initial_invite_page(request): user = request.user # Only show the bulk-invite page for the first user in a realm domain_count = len(UserProfile.objects.filter(realm=user.realm)) if domain_count > 1: return redirect('zerver.views.home') params = {'company_name': user.realm.domain} if (user.realm.restricted_to_domain): params['invite_suffix'] = user.realm.domain return render_to_response('zerver/initial_invite_page.html', params, request=request) @require_post def logout_then_login(request, **kwargs): return django_logout_then_login(request, kwargs) def create_preregistration_user(email, request): domain = request.session.get("domain") if completely_open(domain): # Clear the "domain" from the session object; it's no longer needed request.session["domain"] = None # The user is trying to sign up for a completely open realm, # so create them a PreregistrationUser for that realm return PreregistrationUser.objects.create(email=email, realm=get_realm(domain)) # MIT users who are not explicitly signing up for an open realm # require special handling (They may already have an (inactive) # account, for example) if split_email_to_domain(email) == "mit.edu": return MitUser.objects.get_or_create(email=email)[0] return PreregistrationUser.objects.create(email=email) def accounts_home_with_domain(request, domain): if completely_open(domain): # You can sign up for a completely open realm through a # special registration path that contains the domain in the # URL. We store this information in the session rather than # elsewhere because we don't have control over URL or form # data for folks registering through OpenID. request.session["domain"] = domain return accounts_home(request) else: return HttpResponseRedirect(reverse('zerver.views.accounts_home')) def send_registration_completion_email(email, request): """ Send an email with a confirmation link to the provided e-mail so the user can complete their registration. """ prereg_user = create_preregistration_user(email, request) context = {'support_email': settings.ZULIP_ADMINISTRATOR, 'voyager': settings.VOYAGER} Confirmation.objects.send_confirmation(prereg_user, email, additional_context=context) def accounts_home(request): if request.method == 'POST': form = create_homepage_form(request, user_info=request.POST) if form.is_valid(): email = form.cleaned_data['email'] send_registration_completion_email(email, request) return HttpResponseRedirect(reverse('send_confirm', kwargs={'email': email})) try: email = request.POST['email'] # Note: We don't check for uniqueness is_inactive(email) except ValidationError: return HttpResponseRedirect(reverse('django.contrib.auth.views.login') + '?email=' + urllib.parse.quote_plus(email)) else: form = create_homepage_form(request) return render_to_response('zerver/accounts_home.html', {'form': form, 'current_url': request.get_full_path}, request=request) def approximate_unread_count(user_profile): not_in_home_view_recipients = [sub.recipient.id for sub in \ Subscription.objects.filter( user_profile=user_profile, in_home_view=False)] muted_topics = ujson.loads(user_profile.muted_topics) # If muted_topics is empty, it looks like []. If it is non-empty, it look # like [[u'devel', u'test']]. We should switch to a consistent envelope, but # until we do we still have both in the database. if muted_topics: muted_topics = muted_topics[0] return UserMessage.objects.filter( user_profile=user_profile, message_id__gt=user_profile.pointer).exclude( message__recipient__type=Recipient.STREAM, message__recipient__id__in=not_in_home_view_recipients).exclude( message__subject__in=muted_topics).exclude( flags=UserMessage.flags.read).count() def sent_time_in_epoch_seconds(user_message): # user_message is a UserMessage object. if not user_message: return None # We have USE_TZ = True, so our datetime objects are timezone-aware. # Return the epoch seconds in UTC. return calendar.timegm(user_message.message.pub_date.utctimetuple()) @zulip_login_required def home(request): # We need to modify the session object every two weeks or it will expire. # This line makes reloading the page a sufficient action to keep the # session alive. request.session.modified = True user_profile = request.user request._email = request.user.email request.client = get_client("website") narrow = [] # type: List[List[str]] narrow_stream = None narrow_topic = request.GET.get("topic") if request.GET.get("stream"): try: narrow_stream = get_stream(request.GET.get("stream"), user_profile.realm) assert(narrow_stream is not None) assert(narrow_stream.is_public()) narrow = [["stream", narrow_stream.name]] except Exception: logging.exception("Narrow parsing") if narrow_topic is not None: narrow.append(["topic", narrow_topic]) register_ret = do_events_register(user_profile, request.client, apply_markdown=True, narrow=narrow) user_has_messages = (register_ret['max_message_id'] != -1) # Reset our don't-spam-users-with-email counter since the # user has since logged in if not user_profile.last_reminder is None: user_profile.last_reminder = None user_profile.save(update_fields=["last_reminder"]) # Brand new users get the tutorial needs_tutorial = settings.TUTORIAL_ENABLED and \ user_profile.tutorial_status != UserProfile.TUTORIAL_FINISHED first_in_realm = realm_user_count(user_profile.realm) == 1 # If you are the only person in the realm and you didn't invite # anyone, we'll continue to encourage you to do so on the frontend. prompt_for_invites = first_in_realm and \ not PreregistrationUser.objects.filter(referred_by=user_profile).count() if user_profile.pointer == -1 and user_has_messages: # Put the new user's pointer at the bottom # # This improves performance, because we limit backfilling of messages # before the pointer. It's also likely that someone joining an # organization is interested in recent messages more than the very # first messages on the system. register_ret['pointer'] = register_ret['max_message_id'] user_profile.last_pointer_updater = request.session.session_key if user_profile.pointer == -1: latest_read = None else: try: latest_read = UserMessage.objects.get(user_profile=user_profile, message__id=user_profile.pointer) except UserMessage.DoesNotExist: # Don't completely fail if your saved pointer ID is invalid logging.warning("%s has invalid pointer %s" % (user_profile.email, user_profile.pointer)) latest_read = None desktop_notifications_enabled = user_profile.enable_desktop_notifications if narrow_stream is not None: desktop_notifications_enabled = False if user_profile.realm.notifications_stream: notifications_stream = user_profile.realm.notifications_stream.name else: notifications_stream = "" # Pass parameters to the client-side JavaScript code. # These end up in a global JavaScript Object named 'page_params'. page_params = dict( voyager = settings.VOYAGER, debug_mode = settings.DEBUG, test_suite = settings.TEST_SUITE, poll_timeout = settings.POLL_TIMEOUT, login_page = settings.HOME_NOT_LOGGED_IN, maxfilesize = settings.MAX_FILE_UPLOAD_SIZE, password_auth_enabled = password_auth_enabled(user_profile.realm), have_initial_messages = user_has_messages, subbed_info = register_ret['subscriptions'], unsubbed_info = register_ret['unsubscribed'], email_dict = register_ret['email_dict'], people_list = register_ret['realm_users'], bot_list = register_ret['realm_bots'], initial_pointer = register_ret['pointer'], initial_presences = register_ret['presences'], initial_servertime = time.time(), # Used for calculating relative presence age fullname = user_profile.full_name, email = user_profile.email, domain = user_profile.realm.domain, realm_name = register_ret['realm_name'], realm_invite_required = register_ret['realm_invite_required'], realm_invite_by_admins_only = register_ret['realm_invite_by_admins_only'], realm_create_stream_by_admins_only = register_ret['realm_create_stream_by_admins_only'], realm_restricted_to_domain = register_ret['realm_restricted_to_domain'], enter_sends = user_profile.enter_sends, left_side_userlist = register_ret['left_side_userlist'], referrals = register_ret['referrals'], realm_emoji = register_ret['realm_emoji'], needs_tutorial = needs_tutorial, first_in_realm = first_in_realm, prompt_for_invites = prompt_for_invites, notifications_stream = notifications_stream, # Stream message notification settings: stream_desktop_notifications_enabled = user_profile.enable_stream_desktop_notifications, stream_sounds_enabled = user_profile.enable_stream_sounds, # Private message and @-mention notification settings: desktop_notifications_enabled = desktop_notifications_enabled, sounds_enabled = user_profile.enable_sounds, enable_offline_email_notifications = user_profile.enable_offline_email_notifications, enable_offline_push_notifications = user_profile.enable_offline_push_notifications, twenty_four_hour_time = register_ret['twenty_four_hour_time'], enable_digest_emails = user_profile.enable_digest_emails, event_queue_id = register_ret['queue_id'], last_event_id = register_ret['last_event_id'], max_message_id = register_ret['max_message_id'], unread_count = approximate_unread_count(user_profile), furthest_read_time = sent_time_in_epoch_seconds(latest_read), staging = settings.ZULIP_COM_STAGING or settings.DEVELOPMENT, alert_words = register_ret['alert_words'], muted_topics = register_ret['muted_topics'], realm_filters = register_ret['realm_filters'], is_admin = user_profile.is_realm_admin, can_create_streams = user_profile.can_create_streams(), name_changes_disabled = name_changes_disabled(user_profile.realm), has_mobile_devices = num_push_devices_for_user(user_profile) > 0, autoscroll_forever = user_profile.autoscroll_forever, default_desktop_notifications = user_profile.default_desktop_notifications, avatar_url = avatar_url(user_profile), mandatory_topics = user_profile.realm.mandatory_topics, show_digest_email = user_profile.realm.show_digest_email, ) if narrow_stream is not None: # In narrow_stream context, initial pointer is just latest message recipient = get_recipient(Recipient.STREAM, narrow_stream.id) try: initial_pointer = Message.objects.filter(recipient=recipient).order_by('id').reverse()[0].id except IndexError: initial_pointer = -1 page_params["narrow_stream"] = narrow_stream.name if narrow_topic is not None: page_params["narrow_topic"] = narrow_topic page_params["narrow"] = [dict(operator=term[0], operand=term[1]) for term in narrow] page_params["max_message_id"] = initial_pointer page_params["initial_pointer"] = initial_pointer page_params["have_initial_messages"] = (initial_pointer != -1) statsd.incr('views.home') show_invites = True # Some realms only allow admins to invite users if user_profile.realm.invite_by_admins_only and not user_profile.is_realm_admin: show_invites = False product_name = "Zulip" page_params['product_name'] = product_name request._log_data['extra'] = "[%s]" % (register_ret["queue_id"],) response = render_to_response('zerver/index.html', {'user_profile': user_profile, 'page_params' : simplejson.encoder.JSONEncoderForHTML().encode(page_params), 'nofontface': is_buggy_ua(request.META.get("HTTP_USER_AGENT", "Unspecified")), 'avatar_url': avatar_url(user_profile), 'show_debug': settings.DEBUG and ('show_debug' in request.GET), 'pipeline': settings.PIPELINE, 'show_invites': show_invites, 'is_admin': user_profile.is_realm_admin, 'show_webathena': user_profile.realm.domain == "mit.edu", 'enable_feedback': settings.ENABLE_FEEDBACK, 'embedded': narrow_stream is not None, 'product_name': product_name }, request=request) patch_cache_control(response, no_cache=True, no_store=True, must_revalidate=True) return response @zulip_login_required def desktop_home(request): return HttpResponseRedirect(reverse('zerver.views.home')) def is_buggy_ua(agent): """Discrimiate CSS served to clients based on User Agent Due to QTBUG-3467, @font-face is not supported in QtWebKit. This may get fixed in the future, but for right now we can just serve the more conservative CSS to all our desktop apps. """ return ("Humbug Desktop/" in agent or "Zulip Desktop/" in agent or "ZulipDesktop/" in agent) and \ "Mac" not in agent def get_pointer_backend(request, user_profile): return json_success({'pointer': user_profile.pointer}) @has_request_variables def update_pointer_backend(request, user_profile, pointer=REQ(converter=to_non_negative_int)): if pointer <= user_profile.pointer: return json_success() try: UserMessage.objects.get( user_profile=user_profile, message__id=pointer ) except UserMessage.DoesNotExist: raise JsonableError(_("Invalid message ID")) request._log_data["extra"] = "[%s]" % (pointer,) update_flags = (request.client.name.lower() in ['android', "zulipandroid"]) do_update_pointer(user_profile, pointer, update_flags=update_flags) return json_success() def generate_client_id(): return generate_random_token(32) # The order of creation of the various dictionaries are important. # We filter on {userprofile,stream,subscription_recipient}_ids. @require_realm_admin def export(request, user_profile): if (Message.objects.filter(sender__realm=user_profile.realm).count() > 1000000 or UserMessage.objects.filter(user_profile__realm=user_profile.realm).count() > 3000000): return json_error(_("Realm has too much data for non-batched export.")) response = {} response['zerver_realm'] = [model_to_dict(x) for x in Realm.objects.select_related().filter(id=user_profile.realm.id)] response['zerver_userprofile'] = [model_to_dict(x, exclude=["password", "api_key"]) for x in UserProfile.objects.select_related().filter(realm=user_profile.realm)] userprofile_ids = set(userprofile["id"] for userprofile in response['zerver_userprofile']) response['zerver_stream'] = [model_to_dict(x, exclude=["email_token"]) for x in Stream.objects.select_related().filter(realm=user_profile.realm, invite_only=False)] stream_ids = set(x["id"] for x in response['zerver_stream']) response['zerver_usermessage'] = [model_to_dict(x) for x in UserMessage.objects.select_related() if x.user_profile_id in userprofile_ids] user_recipients = [model_to_dict(x) for x in Recipient.objects.select_related().filter(type=1) if x.type_id in userprofile_ids] stream_recipients = [model_to_dict(x) for x in Recipient.objects.select_related().filter(type=2) if x.type_id in stream_ids] stream_recipient_ids = set(x["id"] for x in stream_recipients) # only check for subscriptions to streams response['zerver_subscription'] = [model_to_dict(x) for x in Subscription.objects.select_related() if x.user_profile_id in userprofile_ids and x.recipient_id in stream_recipient_ids] subscription_recipient_ids = set(x["recipient"] for x in response['zerver_subscription']) huddle_recipients = [model_to_dict(r) for r in Recipient.objects.select_related().filter(type=3) if r.type_id in subscription_recipient_ids] huddle_ids = set(x["type_id"] for x in huddle_recipients) response["zerver_recipient"] = user_recipients + stream_recipients + huddle_recipients response['zerver_huddle'] = [model_to_dict(h) for h in Huddle.objects.select_related() if h.id in huddle_ids] recipient_ids = set(x["id"] for x in response['zerver_recipient']) response["zerver_message"] = [model_to_dict(m) for m in Message.objects.select_related() if m.recipient_id in recipient_ids and m.sender_id in userprofile_ids] for (table, model) in [("defaultstream", DefaultStream), ("realmemoji", RealmEmoji), ("realmalias", RealmAlias), ("realmfilter", RealmFilter)]: response["zerver_"+table] = [model_to_dict(x) for x in model.objects.select_related().filter(realm_id=user_profile.realm.id)] # type: ignore return json_success(response) def get_profile_backend(request, user_profile): result = dict(pointer = user_profile.pointer, client_id = generate_client_id(), max_message_id = -1) messages = Message.objects.filter(usermessage__user_profile=user_profile).order_by('-id')[:1] if messages: result['max_message_id'] = messages[0].id return json_success(result) @require_realm_admin @has_request_variables def update_realm(request, user_profile, name=REQ(validator=check_string, default=None), restricted_to_domain=REQ(validator=check_bool, default=None), invite_required=REQ(validator=check_bool, default=None), invite_by_admins_only=REQ(validator=check_bool, default=None), create_stream_by_admins_only=REQ(validator=check_bool, default=None)): realm = user_profile.realm data = {} if name is not None and realm.name != name: do_set_realm_name(realm, name) data['name'] = 'updated' if restricted_to_domain is not None and realm.restricted_to_domain != restricted_to_domain: do_set_realm_restricted_to_domain(realm, restricted_to_domain) data['restricted_to_domain'] = restricted_to_domain if invite_required is not None and realm.invite_required != invite_required: do_set_realm_invite_required(realm, invite_required) data['invite_required'] = invite_required if invite_by_admins_only is not None and realm.invite_by_admins_only != invite_by_admins_only: do_set_realm_invite_by_admins_only(realm, invite_by_admins_only) data['invite_by_admins_only'] = invite_by_admins_only if create_stream_by_admins_only is not None and realm.create_stream_by_admins_only != create_stream_by_admins_only: do_set_realm_create_stream_by_admins_only(realm, create_stream_by_admins_only) data['create_stream_by_admins_only'] = create_stream_by_admins_only return json_success(data) @authenticated_json_post_view @has_request_variables def json_upload_file(request, user_profile): if len(request.FILES) == 0: return json_error(_("You must specify a file to upload")) if len(request.FILES) != 1: return json_error(_("You may only upload one file at a time")) user_file = list(request.FILES.values())[0] if ((settings.MAX_FILE_UPLOAD_SIZE * 1024 * 1024) < user_file._get_size()): return json_error(_("File Upload is larger than allowed limit")) uri = upload_message_image_through_web_client(request, user_file, user_profile) return json_success({'uri': uri}) @zulip_login_required @has_request_variables def get_uploaded_file(request, realm_id, filename, redir=REQ(validator=check_bool, default=True)): if settings.LOCAL_UPLOADS_DIR is not None: return HttpResponseForbidden() # Should have been served by nginx user_profile = request.user url_path = "%s/%s" % (realm_id, filename) if realm_id == "unk": realm_id = get_realm_for_filename(url_path) if realm_id is None: # File does not exist return json_error(_("That file does not exist."), status=404) # Internal users can access all uploads so we can receive attachments in cross-realm messages if user_profile.realm.id == int(realm_id) or user_profile.realm.domain == 'zulip.com': uri = get_signed_upload_url(url_path) if redir: return redirect(uri) else: return json_success({'uri': uri}) else: return HttpResponseForbidden() @csrf_exempt @require_post @has_request_variables def api_fetch_api_key(request, username=REQ(), password=REQ()): # type: (Any, Any, Any) -> Any return_data = {} # type: Dict[str, bool] if username == "google-oauth2-token": user_profile = authenticate(google_oauth2_token=password, return_data=return_data) else: user_profile = authenticate(username=username, password=password, return_data=return_data) if return_data.get("inactive_user") == True: return json_error(_("Your account has been disabled."), data={"reason": "user disable"}, status=403) if return_data.get("inactive_realm") == True: return json_error(_("Your realm has been deactivated."), data={"reason": "realm deactivated"}, status=403) if return_data.get("password_auth_disabled") == True: return json_error(_("Password auth is disabled in your team."), data={"reason": "password auth disabled"}, status=403) if user_profile is None: if return_data.get("valid_attestation") == True: # We can leak that the user is unregistered iff they present a valid authentication string for the user. return json_error(_("This user is not registered; do so from a browser."), data={"reason": "unregistered"}, status=403) return json_error(_("Your username or password is incorrect."), data={"reason": "incorrect_creds"}, status=403) return json_success({"api_key": user_profile.api_key, "email": user_profile.email}) @authenticated_json_post_view @has_request_variables def json_fetch_api_key(request, user_profile, password=REQ(default='')): if password_auth_enabled(user_profile.realm) and not user_profile.check_password(password): return json_error(_("Your username or password is incorrect.")) return json_success({"api_key": user_profile.api_key}) @csrf_exempt def api_fetch_google_client_id(request): if not settings.GOOGLE_CLIENT_ID: return json_error(_("GOOGLE_CLIENT_ID is not configured"), status=400) return json_success({"google_client_id": settings.GOOGLE_CLIENT_ID}) def get_status_list(requesting_user_profile): return {'presences': get_status_dict(requesting_user_profile), 'server_timestamp': time.time()} @has_request_variables def update_active_status_backend(request, user_profile, status=REQ(), new_user_input=REQ(validator=check_bool, default=False)): status_val = UserPresence.status_from_string(status) if status_val is None: raise JsonableError(_("Invalid presence status: %s") % (status,)) else: update_user_presence(user_profile, request.client, now(), status_val, new_user_input) ret = get_status_list(user_profile) if user_profile.realm.domain == "mit.edu": try: activity = UserActivity.objects.get(user_profile = user_profile, query="get_events_backend", client__name="zephyr_mirror") ret['zephyr_mirror_active'] = \ (activity.last_visit.replace(tzinfo=None) > datetime.datetime.utcnow() - datetime.timedelta(minutes=5)) except UserActivity.DoesNotExist: ret['zephyr_mirror_active'] = False return json_success(ret) @authenticated_json_post_view def json_get_active_statuses(request, user_profile): return json_success(get_status_list(user_profile)) # Does not need to be authenticated because it's called from rest_dispatch @has_request_variables def api_events_register(request, user_profile, apply_markdown=REQ(default=False, validator=check_bool), all_public_streams=REQ(default=None, validator=check_bool)): return events_register_backend(request, user_profile, apply_markdown=apply_markdown, all_public_streams=all_public_streams) def _default_all_public_streams(user_profile, all_public_streams): if all_public_streams is not None: return all_public_streams else: return user_profile.default_all_public_streams def _default_narrow(user_profile, narrow): default_stream = user_profile.default_events_register_stream if not narrow and user_profile.default_events_register_stream is not None: narrow = [('stream', default_stream.name)] return narrow @has_request_variables def events_register_backend(request, user_profile, apply_markdown=True, all_public_streams=None, event_types=REQ(validator=check_list(check_string), default=None), narrow=REQ(validator=check_list(check_list(check_string, length=2)), default=[]), queue_lifespan_secs=REQ(converter=int, default=0)): all_public_streams = _default_all_public_streams(user_profile, all_public_streams) narrow = _default_narrow(user_profile, narrow) ret = do_events_register(user_profile, request.client, apply_markdown, event_types, queue_lifespan_secs, all_public_streams, narrow=narrow) return json_success(ret) @authenticated_json_post_view @has_request_variables def json_refer_friend(request, user_profile, email=REQ()): if not email: return json_error(_("No email address specified")) if user_profile.invites_granted - user_profile.invites_used <= 0: return json_error(_("Insufficient invites")) do_refer_friend(user_profile, email); return json_success() @authenticated_json_post_view @has_request_variables def json_set_muted_topics(request, user_profile, muted_topics=REQ(validator=check_list(check_list(check_string, length=2)), default=[])): do_set_muted_topics(user_profile, muted_topics) return json_success() def add_push_device_token(request, user_profile, token, kind, ios_app_id=None): if token == '' or len(token) > 4096: return json_error(_('Empty or invalid length token')) # If another user was previously logged in on the same device and didn't # properly log out, the token will still be registered to the wrong account PushDeviceToken.objects.filter(token=token).delete() # Overwrite with the latest value token, created = PushDeviceToken.objects.get_or_create(user=user_profile, token=token, kind=kind, ios_app_id=ios_app_id) if not created: token.last_updated = now() token.save(update_fields=['last_updated']) return json_success() @has_request_variables def add_apns_device_token(request, user_profile, token=REQ(), appid=REQ(default=settings.ZULIP_IOS_APP_ID)): return add_push_device_token(request, user_profile, token, PushDeviceToken.APNS, ios_app_id=appid) @has_request_variables def add_android_reg_id(request, user_profile, token=REQ()): return add_push_device_token(request, user_profile, token, PushDeviceToken.GCM) def remove_push_device_token(request, user_profile, token, kind): if token == '' or len(token) > 4096: return json_error(_('Empty or invalid length token')) try: token = PushDeviceToken.objects.get(token=token, kind=kind) token.delete() except PushDeviceToken.DoesNotExist: return json_error(_("Token does not exist")) return json_success() @has_request_variables def remove_apns_device_token(request, user_profile, token=REQ()): return remove_push_device_token(request, user_profile, token, PushDeviceToken.APNS) @has_request_variables def remove_android_reg_id(request, user_profile, token=REQ()): return remove_push_device_token(request, user_profile, token, PushDeviceToken.GCM) def generate_204(request): return HttpResponse(content=None, status=204) def process_unsubscribe(token, type, unsubscribe_function): try: confirmation = Confirmation.objects.get(confirmation_key=token) except Confirmation.DoesNotExist: return render_to_response('zerver/unsubscribe_link_error.html') user_profile = confirmation.content_object unsubscribe_function(user_profile) return render_to_response('zerver/unsubscribe_success.html', {"subscription_type": type, "external_host": settings.EXTERNAL_HOST}) # Email unsubscribe functions. All have the function signature # processor(user_profile). def do_missedmessage_unsubscribe(user_profile): do_change_enable_offline_email_notifications(user_profile, False) def do_welcome_unsubscribe(user_profile): clear_followup_emails_queue(user_profile.email) def do_digest_unsubscribe(user_profile): do_change_enable_digest_emails(user_profile, False) # The keys are part of the URL for the unsubscribe link and must be valid # without encoding. # The values are a tuple of (display name, unsubscribe function), where the # display name is what we call this class of email in user-visible text. email_unsubscribers = { "missed_messages": ("missed messages", do_missedmessage_unsubscribe), "welcome": ("welcome", do_welcome_unsubscribe), "digest": ("digest", do_digest_unsubscribe) } # Login NOT required. These are for one-click unsubscribes. def email_unsubscribe(request, type, token): if type in email_unsubscribers: display_name, unsubscribe_function = email_unsubscribers[type] return process_unsubscribe(token, display_name, unsubscribe_function) return render_to_response('zerver/unsubscribe_link_error.html', {}, request=request)<|fim▁end|>
<|file_name|>JobMetricsListener.java<|end_file_name|><|fim▁begin|>/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.ql.exec.spark.status.impl; import java.util.Iterator; import java.util.List; import java.util.Map; <|fim▁hole|>import org.apache.spark.executor.TaskMetrics; import org.apache.spark.scheduler.SparkListenerJobStart; import org.apache.spark.scheduler.SparkListenerTaskEnd; import com.google.common.collect.Lists; import com.google.common.collect.Maps; public class JobMetricsListener extends JavaSparkListener { private static final Logger LOG = LoggerFactory.getLogger(JobMetricsListener.class); private final Map<Integer, int[]> jobIdToStageId = Maps.newHashMap(); private final Map<Integer, Integer> stageIdToJobId = Maps.newHashMap(); private final Map<Integer, Map<String, List<TaskMetrics>>> allJobMetrics = Maps.newHashMap(); @Override public synchronized void onTaskEnd(SparkListenerTaskEnd taskEnd) { int stageId = taskEnd.stageId(); int stageAttemptId = taskEnd.stageAttemptId(); String stageIdentifier = stageId + "_" + stageAttemptId; Integer jobId = stageIdToJobId.get(stageId); if (jobId == null) { LOG.warn("Can not find job id for stage[" + stageId + "]."); } else { Map<String, List<TaskMetrics>> jobMetrics = allJobMetrics.get(jobId); if (jobMetrics == null) { jobMetrics = Maps.newHashMap(); allJobMetrics.put(jobId, jobMetrics); } List<TaskMetrics> stageMetrics = jobMetrics.get(stageIdentifier); if (stageMetrics == null) { stageMetrics = Lists.newLinkedList(); jobMetrics.put(stageIdentifier, stageMetrics); } stageMetrics.add(taskEnd.taskMetrics()); } } @Override public synchronized void onJobStart(SparkListenerJobStart jobStart) { int jobId = jobStart.jobId(); int size = jobStart.stageIds().size(); int[] intStageIds = new int[size]; for (int i = 0; i < size; i++) { Integer stageId = (Integer) jobStart.stageIds().apply(i); intStageIds[i] = stageId; stageIdToJobId.put(stageId, jobId); } jobIdToStageId.put(jobId, intStageIds); } public synchronized Map<String, List<TaskMetrics>> getJobMetric(int jobId) { return allJobMetrics.get(jobId); } public synchronized void cleanup(int jobId) { allJobMetrics.remove(jobId); jobIdToStageId.remove(jobId); Iterator<Map.Entry<Integer, Integer>> iterator = stageIdToJobId.entrySet().iterator(); while (iterator.hasNext()) { Map.Entry<Integer, Integer> entry = iterator.next(); if (entry.getValue() == jobId) { iterator.remove(); } } } }<|fim▁end|>
import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.spark.JavaSparkListener;
<|file_name|>ss_script_handler.py<|end_file_name|><|fim▁begin|># Jacqueline Kory Westlund # May 2016 # # The MIT License (MIT) # # Copyright (c) 2016 Personal Robots Group # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. import sys # For getting generic exception info import datetime # For getting time deltas for timeouts import time # For sleep import json # For packing ros message properties import random # For picking robot responses and shuffling answer options import logging # Log messages import Queue # for queuing messages for the main game loop from SS_Errors import NoStoryFound # Custom exception when no stories found from ss_script_parser import ss_script_parser # Parses scripts from ss_personalization_manager import ss_personalization_manager from ss_ros import ss_ros # Our ROS connection class ss_script_handler(): """ Social stories script handler parses and deals with script lines. Uses the script parser to get the next line in a script. We keep loading script lines and parsing script lines separate on the offchance that we might want to replace how scripts are stored and accessed (e.g., in a database versus in text files). """ # Constants for script playback: # Time to pause after showing answer feedback and playing robot # feedback speech before moving on to the next question. ANSWER_FEEDBACK_PAUSE_TIME = 2 # Time to wait for robot to finish speaking or acting before # moving on to the next script line (in seconds). WAIT_TIME = 30 def __init__(self, ros_node, session, participant, script_path, story_script_path, session_script_path, database, queue, percent_correct_to_level): """ Save references to ROS connection and logger, get scripts and set up to read script lines """ # Set up logger. self._logger = logging.getLogger(__name__) self._logger.info("Setting up script handler...") # Save reference to our ros node so we can publish messages. self._ros_node = ros_node # Save script paths so we can load scripts later. self._script_path = script_path if (story_script_path is None): self._story_script_path = "" else: self._story_script_path = story_script_path if (session_script_path is None): self._session_script_path = "" else: self._session_script_path = session_script_path # We get a reference to the main game node's queue so we can # give it messages. self._game_node_queue = queue # Set up personalization manager so we can get personalized # stories for this participant. self._personalization_man = ss_personalization_manager(session, participant, database, percent_correct_to_level) # Set up script parser. self._script_parser = ss_script_parser() # These are other script parsers we may use later. self._story_parser = None self._repeat_parser = None # If we have a repeating script, we will need to save its filename so # we can re-load it when we repeat it. self._repeating_script_name = "" # Get session script from script parser and give to the script # parser. Story scripts we will get later from the # personalization manager. try: self._script_parser.load_script(self._script_path + self._session_script_path + self._script_parser.get_session_script(session)) except IOError: self._logger.exception("Script parser could not open session " + "script!") # Pass exception up so whoever wanted a script handler knows # they didn't get a script. raise # Initialize flags and counters: # Set up counter for how many stories have been told this session. self._stories_told = 0 # When we start, we are not currently telling a story or # repeating a script, or at the end of the game. self._doing_story = False self._repeating = False self._end_game = False # When we start, we are not asking a question, and so there is no # current question type or number. self._current_question_type = "" self._current_question_num = 0 # For counting repetitions of a repeating script. self._repetitions = 0 # The script will tell us the max number of repetitions. self._max_repetitions = 1 # The script will tell us the max number of stories. self._max_stories = 1 # The maximum number of incorrect user responses before the # game moves on (can also be set in the script). self._max_incorrect_responses = 2 # Set the maximum game time, in minutes. This can also be set # in the game script. self._max_game_time = datetime.timedelta(minutes=10) # Sometimes we may need to know what the last user response we # waited for was, and how long we waited. self._last_response_to_get = None self._last_response_timeout = None # Save start time so we can check whether we've run out of time. self._start_time = datetime.datetime.now() # Initialize total time paused. self._total_time_paused = datetime.timedelta(seconds=0) # Initialize pause start time in case someone calls the resume # game timer function before the pause game function. self._pause_start_time = None def iterate_once(self): """ Play the next commands from the script """ try: # We check whether we've reached the game time limit when # we load new stories or when we are about to start a # repeating script over again. # Get next line from story script. if self._doing_story and self._story_parser is not None: self._logger.debug("Getting next line from story script.") line = self._story_parser.next_line() # If not in a story, get next line from repeating script. elif self._repeating and self._repeat_parser is not None: self._logger.debug("Getting next line from repeating script.") line = self._repeat_parser.next_line() # If not repeating, get next line from main session script. else: self._logger.debug("Getting next line from main session script.") line = self._script_parser.next_line() # We didn't read a line! # If we get a stop iteration exception, we're at the end of the # file and will stop iterating over lines. except StopIteration as e: # If we were doing a story, now we're done, go back to # the previous script. if self._doing_story: self._logger.info("Finished story " + str(self._stories_told + 1) + " of " + str(self._max_stories) + "!") self._doing_story = False self._stories_told += 1 # If we were repeating a script, increment counter. elif self._repeating: self._repetitions += 1 self._logger.info("Finished repetition " + str(self._repetitions) + " of " + str(self._max_repetitions) + "!") # If we've done enough repetitions, or if we've run out # of game time, go back to the main session script (set # the repeating flag to false). if (self._repetitions >= self._max_repetitions) \ or self._end_game \ or ((datetime.datetime.now() - self._start_time) \ - self._total_time_paused >= self._max_game_time): self._logger.info("Done repeating!") self._repeating = False # Otherwise, we need to repeat again. Reload the repeating # script. else: # Create a script parser for the filename provided, # assume it is in the session_scripts directory. self._repeat_parser = ss_script_parser() try: self._repeat_parser.load_script(self._script_path + self._session_script_path + self._repeating_script_name) except IOError: self._logger.exception("Script parser could not open " + "session script to repeat! Skipping REPEAT line.") sself._repeating = False return # Otherwise we're at the end of the main script. else: self._logger.info("No more script lines to get!") # Pass on the stop iteration exception, with additional # information about the player's performance during the # game, formatted as a json object. emotion, tom, order = self._personalization_man. \ get_performance_this_session() performance = {} if emotion is not None: performance["child-emotion-question-accuracy"] = \ emotion if tom is not None: performance["child-tom-question-accuracy"] = \ tom if order is not None: performance["child-order-question-accuracy"] = \ order e.performance = json.dumps(performance) raise except ValueError: # We may get this exception if we try to get the next line # but the script file is closed. If that happens, something # probably went wrong with ending playback of a story script # or a repeating script. End repeating and end the current # story so we go back to the main session script. if self._doing_story: self._doing_story = False if self._repeating: self._repeating = False # Oh no got some unexpected error! Raise it again so we can # figure out what happened and deal with it during debugging. except Exception as e: self._logger.exception("Unexpected exception! Error: %s", e) raise # We got a line: parse it! else: # Make sure we got a line before we try parsing it. We # might not get a line if the file has closed or if # next_line has some other problem. if not line: self._logger.warning("[iterate_once] Tried to get next line, " + "but got None!") return # Got a line - print for debugging. self._logger.debug("LINE: " + repr(line)) # Parse line! # Split on tabs. elements = line.rstrip().split('\t') self._logger.debug("... " + str(len(elements)) + " elements: \n... " + str(elements)) if len(elements) < 1: self._logger.info("Line had no elements! Going to next line...") return # Do different stuff depending on what the first element is. ######################################################### # Some STORY lines have only one part to the command. elif len(elements) == 1: # For STORY lines, play back the next story for this # participant. if "STORY" in elements[0]: self._logger.debug("STORY") # If line indicates we need to start a story, do so. self._doing_story = True # Create a script parser for the filename provided, # assuming it is in the story scripts directory. self._story_parser = ss_script_parser() try: self._story_parser.load_script(self._script_path + self._story_script_path + self._personalization_man.get_next_story_script()) except IOError: self._logger.exception("Script parser could not open " + "story script! Skipping STORY line.") self._doing_story = False except AttributeError: self._logger.exception("Script parser could not open " + "story script because no script was loaded! " + "Skipping STORY line.") self._doing_story = False except NoStoryFound: self._logger.exception("Script parser could not get \ the next story script because no script was \ found by the personalization manager! \ Skipping STORY line.") self._doing_story = False # Line has 2+ elements, so check the other commands. ######################################################### # For STORY SETUP lines, pick the next story to play so # we can load its graphics and play back the story. elif "STORY" in elements[0] and "SETUP" in elements[1]: self._logger.debug("STORY SETUP") # Pick the next story to play. self._personalization_man.pick_next_story() ######################################################### # For ROBOT lines, send command to the robot. elif "ROBOT" in elements[0]: self._logger.debug("ROBOT") # Play a randomly selected story intro from the list. if "STORY_INTRO" in elements[1]: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._story_intros[ random.randint(0,len(self._story_intros)-1)]) # Play a randomly selected story closing from the list. elif "STORY_CLOSING" in elements[1]: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._story_closings[ random.randint(0,len(self._story_closings)-1)]) # Send a command to the robot, with properties. elif len(elements) > 2: self._ros_node.send_robot_command(elements[1], response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=elements[2]) # Send a command to the robot, without properties. else: self._ros_node.send_robot_command(elements[1], "") ######################################################### # For OPAL lines, send command to Opal game elif "OPAL" in elements[0]: self._logger.debug("OPAL") if "LOAD_ALL" in elements[1] and len(elements) >= 3: # Load all objects listed in file -- the file is # assumed to have properties for one object on each # line. to_load = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) for obj in to_load: self._ros_node.send_opal_command("LOAD_OBJECT", obj) # Get the next story and load graphics into game. elif "LOAD_STORY" in elements[1]: self._load_next_story() # Load answers for game. elif "LOAD_ANSWERS" in elements[1] and len(elements) >= 3: self._load_answers(elements[2]) # Send an opal command, with properties. elif len(elements) > 2: self._ros_node.send_opal_command(elements[1], elements[2]) # Send an opal command, without properties. else: self._ros_node.send_opal_command(elements[1]) ######################################################### # For PAUSE lines, sleep for the specified number of # seconds before continuing script playback. elif "PAUSE" in elements[0] and len(elements) >= 2: self._logger.debug("PAUSE") try: time.sleep(int(elements[1])) except ValueError: self._logger.exception("Not pausing! PAUSE command was " + "given an invalid argument (should be an int)!") ######################################################### # For ADD lines, get a list of robot commands that can be # used in response to particular triggers from the specified # file and save them for later use -- all ADD lines should # have 3 elements. elif "ADD" in elements[0] and len(elements) >= 3: self._logger.debug("ADD") # Read list of responses from the specified file into the # appropriate variable. try: if "INCORRECT_RESPONSES" in elements[1]: self._incorrect_responses = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._incorrect_responses))) if "CORRECT_RESPONSES" in elements[1]: self._correct_responses = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._correct_responses))) elif "START_RESPONSES" in elements[1]: self._start_responses = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._start_responses))) elif "NO_RESPONSES" in elements[1]: self._no_responses = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._no_responses))) elif "ANSWER_FEEDBACK" in elements[1]: self._answer_feedback = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._answer_feedback))) elif "STORY_INTROS" in elements[1]: self._story_intros = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._story_intros))) elif "STORY_CLOSINGS" in elements[1]: self._story_closings = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._story_closings))) elif "TIMEOUT_CLOSINGS" in elements[1]: self._timeout_closings = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("Got " + str(len(self._timeout_closings))) elif "MAX_STORIES_REACHED" in elements[1]: self._max_stories_reached = self._read_list_from_file( self._script_path + self._session_script_path + elements[2]) self._logger.debug("... Got " + str(len(self._max_stories_reached))) except IOError: self._logger.exception("Failed to add responses!") else: self._logger.info("Added " + elements[1]) ######################################################### # For SET lines, set the specified constant. elif "SET" in elements[0] and len(elements) >= 3: self._logger.debug("SET") if "MAX_INCORRECT_RESPONSES" in elements[1]: self._max_incorrect_responses = int(elements[2]) self._logger.info("Set MAX_INCORRECT_RESPONSES to " + elements[2]) elif "MAX_GAME_TIME" in elements[1]: self._max_game_time = datetime.timedelta(minutes= int(elements[2])) self._logger.info("Set MAX_GAME_TIME to " + elements[2]) elif "MAX_STORIES" in elements[1]: self._max_stories = int(elements[2]) self._logger.info("Set MAX_STORIES to " + elements[2]) ######################################################### # For WAIT lines, wait for the specified user response, # or for a timeout. elif "WAIT" in elements[0] and len(elements) >= 3: self._logger.debug("WAIT") self.wait_for_response(elements[1], int(elements[2])) ######################################################### # For QUESTION lines, save the question type and question number # for later use. elif "QUESTION" in elements[0] and len(elements) >= 3: self._current_question_type = elements[1] self._current_question_num = int(elements[2]) self._logger.info("Current question: type " + elements[1] + ", num " + elements[2]) ######################################################### # For REPEAT lines, repeat lines in the specified script # file the specified number of times. elif "REPEAT" in elements[0] and len(elements) >= 3: self._logger.debug("REPEAT") self._repeating = True self._repetitions = 0 # Create a script parser for the filename provided, # assume it is in the session_scripts directory. self._repeat_parser = ss_script_parser() self._repeating_script_name = elements[2] try: self._repeat_parser.load_script(self._script_path + self._session_script_path + elements[2]) except IOError: self._logger.exception("Script parser could not open " + "session script to repeat! Skipping REPEAT line.") self._repeating = False return # Figure out how many times we should repeat the script. if "MAX_STORIES" in elements[1]: try: self._max_repetitions = self._max_stories except AttributeError: self._logger.exception("Tried to set MAX_REPETITIONS to" + " MAX_STORIES, but MAX_STORIES has not been " + "set . Setting to 1 repetition instead.") self._max_repetitions = 1 else: self._max_repetitions = int(elements[1]) self._logger.debug("Going to repeat " + elements[2] + " " + str(self._max_repetitions) + " time(s).") def _read_list_from_file(self, filename): """ Read a list of robot responses from a file, return a list of the lines from the file """ # Open script for reading. try: fh = open(filename, "r") return fh.readlines() except IOError as e: self._logger.exception("Cannot open file: " + filename) # Pass exception up so anyone trying to add a response list # from a script knows it didn't work. raise def wait_for_response(self, response_to_get, timeout): """ Wait for a user response or wait until the specified time has elapsed. If the response is incorrect, allow multiple attempts up to the maximum number of incorrect responses. """ for i in range(0, self._max_incorrect_responses): self._logger.info("Waiting for user response...") # Save the response we were trying to get in case we need # to try again. self._last_response_to_get = response_to_get self._last_response_timeout = timeout # Wait for the specified type of response, or until the # specified time has elapsed. response, answer = self._ros_node.wait_for_response(response_to_get, datetime.timedelta(seconds=int(timeout))) # After waiting for a response, need to play back an # appropriate robot response. # If we didn't receive a response, then it was probably # because we didn't send a valid response to wait for. # This is different from a TIMEOUT since we didn't time # out -- we just didn't get a response of any kind. if not response: self._logger.info("Done waiting -- did not get valid response!") return False # If we received no user response before timing out, send a # TIMEOUT message and pause the game. elif "TIMEOUT" in response: # Announce we timed out. self._ros_node.send_game_state("TIMEOUT") # Pause game and wait to be told whether we should try # waiting again for a response or whether we should # skip it and move on. Queue up the pause command so the # main game loop can take action. self._game_node_queue.put("PAUSE") # Announce the game is pausing. self._ros_node.send_game_state("PAUSE") # Indicate that we did not get a response. # We don't break and let the user try again because the # external game monitor deals with TIMEOUT events, and # will tell us whether to try waiting again or to just # skip waiting for this response. return False # If response was INCORRECT, randomly select a robot # response to an incorrect user action. elif "INCORRECT" in response: # Record incorrect response in the db. self._personalization_man.record_user_response( self._current_question_num, self._current_question_type, answer) try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._incorrect_responses[random.randint(0, len(self._incorrect_responses)-1)]) except AttributeError: self._logger.exception("Could not play an incorrect " + "response. Maybe none were loaded?") # Don't break so we allow the user a chance to respond # again. # If response was NO, randomly select a robot response to # the user selecting no. elif "NO" in response: try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._no_responses[random.randint(0, len(self._no_responses)-1)]) except AttributeError: self._logger.exception("Could not play a response to " + "user's NO. Maybe none were loaded?") # Don't break so we allow the user a chance to respond # again. # If response was CORRECT, randomly select a robot response # to a correct user action, highlight the correct answer, # and break out of response loop. elif "CORRECT" in response: # Record correct response in the db. self._personalization_man.record_user_response( self._current_question_num, self._current_question_type, answer) try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._correct_responses[random.randint(0, len(self._correct_responses)-1)]) self._ros_node.send_opal_command("SHOW_CORRECT") self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._answer_feedback[random.randint(0, len(self._answer_feedback)-1)]) # Pause after speaking before hiding correct again time.sleep(self.ANSWER_FEEDBACK_PAUSE_TIME) self._ros_node.send_opal_command("HIDE_CORRECT") except AttributeError: self._logger.exception("Could not play a correct " + "response or could not play robot's answer" + " feedback. Maybe none were loaded?") # Break from the for loop so we don't give the user # a chance to respond again. break # If response was START, randomly select a robot response to # the user selecting START, and break out of response loop. elif "START" in response: try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._start_responses[random.randint(0, len(self._start_responses)-1)]) except AttributeError: self._logger.exception("Could not play response to" + "user's START. Maybe none were loaded?") # Break from the for loop so we don't give the user # a chance to respond again. break # We exhausted our allowed number of user responses, so have # the robot do something instead of waiting more. else: # If user was never correct, play robot's correct answer # feedback and show which answer was correct in the game. if "CORRECT" in response_to_get: try: self._ros_node.send_opal_command("SHOW_CORRECT") self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int( self.WAIT_TIME)), properties=self._answer_feedback[random.randint(0, len(self._answer_feedback)-1)]) # Pause after speaking before hiding correct again. time.sleep(self.ANSWER_FEEDBACK_PAUSE_TIME) self._ros_node.send_opal_command("HIDE_CORRECT") except AttributeError: self._logger.exception("Could not play robot's answer" + " feedback! Maybe none were loaded?") # If user never selects START (which is used to ask the user # if they are ready to play), stop all stories and repeating # scripts, continue with main script so we go to the end. elif "START" in response_to_get: self._repeating = False self.story = False # We got a user response and responded to it! return True def skip_wait_for_response(self): """ Skip waiting for a response; treat the skipped response as a NO or INCORRECT response. """ # If the response to wait for was CORRECT or INCORRECT, # randomly select a robot response to an incorrect user # action. if "CORRECT" in self._last_response_to_get: try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int(self.WAIT_TIME)), properties=self._incorrect_responses[random.randint(0, \ len(self._incorrect_responses)-1)]) except AttributeError: self._logger.exception("Could not play an incorrect " + "response. Maybe none were loaded?") # If response to wait for was YES or NO, randomly select a # robot response for a NO user action. elif "NO" in self._last_response_to_get: try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int(self.WAIT_TIME)), properties=self._no_responses[random.randint(0, len(self._no_responses)-1)]) except AttributeError: self._logger.exception("Could not play a response to " + "user's NO. Maybe none were loaded?") def set_end_game(self): """ End the game gracefully -- stop any stories or repeating scripts, go back to main session script and finish. """ # For now, we just need to set a flag indicating we should end # the game. When we check whether we should load another story # or repeat a repeating script, this flag will be used to skip # back to the main session script, to the end of the game. self._end_game = True def set_start_level(self, level): """ When the game starts, a level to start at can be provided. Pass this to the personalization manager to deal with, since it deals with picking the levels of stories to play. """ self._personalization_man.set_start_level(level) def pause_game_timer(self): """ Track how much time we spend paused so when we check whether we have reached the max game time, we don't include time spent paused. """ self._pause_start_time = datetime.datetime.now() def resume_game_timer(self): """ Add how much time we spent paused to our total time spent paused. """ # Since this function could theoretically be called before we # get a call to pause_game_timer, we have to check that there # is a pause start time, and then later, reset it so we can't # add the same pause length multiple times to our total pause # time. if self._pause_start_time is not None: self._total_time_paused += datetime.datetime.now() \ - self._pause_start_time # Reset pause start time. self._pause_start_time = None def wait_for_last_response_again(self): """ Wait for the same response that we just waited for again, with the same parameters for the response and the timeout. """ return self.wait_for_response(self._last_response_to_get, self._last_response_timeout) def _load_answers(self, answer_list): """ Load the answer graphics for this story """ # We are given a list of words that indicate what the answer # options are. By convention, the first word is probably the # correct answer; the others are incorrect answers. However, # we won't set this now because this convention may not hold. # We expect the SET_CORRECT OpalCommand to be used to set # which answers are correct or incorrect. # split the list of answers on commas. answers = answer_list.strip().split(',') # Shuffle answers to display them in a random order. random.shuffle(answers) # Load in the graphic for each answer. for answer in answers: toload = {}<|fim▁hole|> toload["draggable"] = False toload["isAnswerSlot"] = True self._ros_node.send_opal_command("LOAD_OBJECT", json.dumps(toload)) def _load_next_story(self): """ Get the next story, set up the game scene with scene and answer slots, and load scene graphics. """ # If we've told the max number of stories, or if we've reached # the max game time, don't load another story even though we # were told to load one -- instead, play error message from # robot saying we have to be done now. if self._stories_told >= self._max_stories \ or ((datetime.datetime.now() - self._start_time) \ - self._total_time_paused >= self._max_game_time) or self._end_game: self._logger.info("We were told to load another story, but we've " + "already played the maximum number of stories or we ran" " out of time! Skipping and ending now.") self._doing_story = False try: self._ros_node.send_robot_command("DO", response="ROBOT_NOT_SPEAKING", timeout=datetime.timedelta(seconds=int(self.WAIT_TIME)), properties=self._max_stories_reached [random.randint(0, len(self._no_responses)-1)]) except AttributeError: self._logger.exception("Could not play a max stories reached " + "response. Maybe none were loaded?") # We were either told to play another story because a # repeating script loads a story and the max number of # repetitions is greater than the max number of stories, # so more stories were requested than can be played, or # because we ran out of time and were supposed to play more # stories than we have time for. Either way, stop the # repeating script if there is one. self._repeating = False return # Get the details for the next story. try: scenes, in_order, num_answers = \ self._personalization_man.get_next_story_details() except NoStoryFound: # If no story was found, we can't load the story! self._logger.exception("Cannot load story - no story to load was" + " found!") self._doing_story = False return # Set up the story scene in the game. setup = {} setup["numScenes"] = len(scenes) setup["scenesInOrder"] = in_order setup["numAnswers"] = num_answers self._ros_node.send_opal_command("SETUP_STORY_SCENE", json.dumps(setup)) # Load the scene graphics. for scene in scenes: toload = {} toload["name"] = "scenes/" + scene toload["tag"] = "PlayObject" toload["slot"] = scenes.index(scene) + 1 if not in_order: toload["correctSlot"] = scenes.index(scene) + 1 toload["draggable"] = False if in_order else True toload["isAnswerSlot"] = False self._ros_node.send_opal_command("LOAD_OBJECT", json.dumps(toload)) # Tell the personalization manager that we loaded the story so # it can keep track of which stories have been played. self._personalization_man.record_story_loaded()<|fim▁end|>
# Remove whitespace from name before using it. toload["name"] = answer.strip() toload["tag"] = "PlayObject" toload["slot"] = answers.index(answer) + 1
<|file_name|>resources.py<|end_file_name|><|fim▁begin|>from .util import to_datetime, to_iso from .http import request from .exceptions import KloudlessException as KException from . import config import inspect import json import requests import six import warnings class BaseResource(dict): # {'key': (serializer, deserializer)} _serializers = { 'created': (to_iso, to_datetime), 'modified': (to_iso, to_datetime), 'expiration': (to_iso, to_datetime), 'expiry': (to_iso, to_datetime), 'token_expiry': (to_iso, to_datetime), 'refresh_token_expiry': (to_iso, to_datetime), } _path_segment = None _parent_resource_class = None # requests.Session's connection pool could cause failures due to the lack # of keep-alives causing the connection to drop unexpectedly. # Use `requests` to be safe, but alter if better performance is preferable. _api_session = requests def __init__(self, id=None, parent_resource=None, configuration=None): if not configuration: configuration = {} self._configuration = config.merge(configuration) self['id'] = id # Saved state, as returned by the Kloudless API. self._previous_data = {} # Keys that used to be present that no longer are post-save. # Useful for more helpful error messages. self._removed_keys = set() self._parent_resource = parent_resource if self._parent_resource_class is not None: if self._parent_resource is None: raise KException( "A %s object or ID must be specified as this " "%s object's parent." % (self._parent_resource_class, self.__class__.__name__)) def populate(self, data): """ data: Response from Kloudless with data on this object. """ removed = set(self.keys()) - set(data.keys()) self._removed_keys |= removed id = self['id'] self.clear() for k, v in data.items(): if k in self._serializers: data[k] = self._serializers[k][1](v) for k, v in six.iteritems(data): super(BaseResource, self).__setitem__( k, self.__class__.create_from_data( v, parent_resource=self._parent_resource, configuration=self._configuration)) if 'id' not in self: self['id'] = id # Update our state. self._previous_data = self.serialize(self) @classmethod def create_from_data(cls, data, parent_resource=None, configuration=None): if isinstance(data, list): return [cls.create_from_data( d, parent_resource=parent_resource, configuration=configuration) for d in data] elif isinstance(data, dict) and not isinstance(data, BaseResource): data = data.copy() klass = cls data_type = None if data.get('api') and data.get('type'): data_type = data['api'] + '_' + data['type'] if data_type in resources: klass = resources[data_type] elif data.get('type') in resources: klass = resources[data['type']] instance = klass(id=data.get('id'), parent_resource=parent_resource, configuration=configuration) instance.populate(data) return instance else: return data @classmethod def serialize(cls, resource_data): """ Converts values in the BaseResource object into primitive types. This helps convert the entire object to JSON. resource_data: Either the resource object, or a dict with the data to populate the resource. """ serialized = {} for k, v in six.iteritems(resource_data): if isinstance(v, BaseResource): serialized[k] = v.serialize(v) elif k in cls._serializers: serialized[k] = cls._serializers[k][0](v) else: serialized[k] = v return serialized @classmethod def list_path(cls, parent_resource): raise NotImplementedError("Subclasses must implement list_path.") def detail_path(self): if not self['id']: raise KException("The detail_path cannot be obtained since the ID " "is unknown.") return "%s/%s" % (self.list_path(self._parent_resource), self['id']) # Getter/Setter methods def __setattr__(self, k, v): if k[0] == '_' or k in self.__dict__: return super(BaseResource, self).__setattr__(k, v) else: self[k] = v def __getattr__(self, k): if k[0] == '_': raise AttributeError(k) try: return self[k] except KeyError as e: raise AttributeError(*e.args) def __setitem__(self, k, v): super(BaseResource, self).__setitem__(k, v) def __getitem__(self, k): try: return super(BaseResource, self).__getitem__(k) except KeyError: if k in self._removed_keys: raise KeyError( "%r. The key %s was previously present but no longer is. " "This is due to the object being updated with new " "information returned from the Kloudless API, probably " "due to the object being saved. Here are the current " "attributes of this object: %s" % (k, k, ', '.join(self.keys()))) else: raise def __delitem__(self, k): raise TypeError( "Items cannot be deleted. Please set them to None instead if you " "wish to clear them.") class AnnotatedList(list): """ Given a deserialized response of all(), the objects returned by the API will be made iterable, and the other attributes will become attributes of this AnnotatedList object. """ def __init__(self, all_data): if isinstance(all_data, list): return all_data objects = None for k, v in six.iteritems(all_data): if k in ['objects', 'permissions', 'properties'] and isinstance(v, list): objects = v else: setattr(self, k, v) if objects is None: raise KException("No lists were found!") list.__init__(self, objects) def allow_proxy(func): func.allow_proxy = True return func class ListMixin(object): @classmethod @allow_proxy def all(cls, parent_resource=None, configuration=None, headers=None, **params): response = request(cls._api_session.get, cls.list_path(parent_resource), configuration=configuration, headers=headers, params=params) data = cls.create_from_data( response.json(), parent_resource=parent_resource, configuration=configuration) return AnnotatedList(data) class RetrieveMixin(object): @classmethod @allow_proxy def retrieve(cls, id, parent_resource=None, configuration=None, headers=None, **params): instance = cls(id=id, parent_resource=parent_resource, configuration=configuration) response = request(cls._api_session.get, instance.detail_path(), configuration=configuration, headers=headers, params=params) instance.populate(response.json()) return instance def refresh(self, headers=None): """ Retrieves and sets new metadata for the resource. """ response = request(self._api_session.get, self.detail_path(), configuration=self._configuration, headers=headers) self.populate(response.json()) class ReadMixin(RetrieveMixin, ListMixin): pass class CreateMixin(object): @classmethod @allow_proxy def create(cls, data=None, params=None, method='post', parent_resource=None, configuration=None, headers=None): """ params: A dict containing query parameters. data: A dict containing data. """ method = getattr(cls._api_session, method) if not data: data = {} if type(data) in [list, tuple]: data = [cls.serialize(data_obj) for data_obj in data] else: data = cls.serialize(data) if not params: params = {} response = request(method, cls.list_path(parent_resource), configuration=configuration, headers=headers, data=data, params=params) return cls.create_from_data( response.json(), parent_resource=parent_resource, configuration=configuration) class UpdateMixin(object): def _data_to_save(self, new_data): """ Override this for any specific checks or additions to data. """ return new_data def save(self, headers=None, **params): data = self.serialize(self) new_data = {} for k, v in six.iteritems(data): if k not in self._previous_data or self._previous_data[k] != v: # Attribute is new or was updated new_data[k] = v new_data = self._data_to_save(new_data) if new_data: if self['id'] is None: if hasattr(self.__class__, 'create'): raise KException("No ID provided. Use create() to create " "new resources instead.") else: raise KException("No ID provided to identify the resource " "to update.") response = request(self._api_session.patch, self.detail_path(), configuration=self._configuration, headers=headers, data=new_data, params=params) self.populate(response.json()) # For some resources (eg: File/Folder), the parent resource could # be different. Check for that. # This assumes that if the metadata contains an 'account' key, # it maps to the correct Account ID. We update our parent # resource with the ID and it's metadata if it is different. res_type = resource_types[self.__class__] if (self._parent_resource and res_type in ['file', 'folder', 'link']): parent_res_type = resource_types[self._parent_resource_class] if (hasattr(self, parent_res_type) and self._parent_resource.id != self[parent_res_type]): self._parent_resource.id = self[parent_res_type] self._parent_resource.refresh() return True return False class DeleteMixin(object): def delete(self, headers=None, **params): request(self._api_session.delete, self.detail_path(), configuration=self._configuration, headers=headers, params=params) self.populate({}) class CopyMixin(object): def _copy(self, headers=None, **data): """ Copy the file/folder to another location. """ response = request(self._api_session.post, "%s/copy" % self.detail_path(), configuration=self._configuration, headers=headers, data=data) return self.__class__.create_from_data( response.json(), parent_resource=self._parent_resource, configuration=self._configuration) class WriteMixin(CreateMixin, UpdateMixin, DeleteMixin): pass class ResourceProxy(object): """ Create a proxy object. Whenever a function is called on it that is present on the underlying model, we attempt to call the underlying model. This is useful because resources can add in parameters like the parent_resource if it has not been specified yet. The Account resource does this. """ def __init__(self, klass, parent_resource=None, configuration=None): self.klass = klass self.parent_resource = parent_resource self.configuration = configuration def __getattr__(self, name): method = getattr(self.klass, name, None) def proxy_method(self, *args, **kwargs): self.update_kwargs(kwargs) return method(*args, **kwargs) if inspect.ismethod(method): if getattr(method, 'allow_proxy', False): return proxy_method.__get__(self) else: return method else: raise AttributeError(name) def __call__(self, *args, **kwargs): self.update_kwargs(kwargs) return self.klass(*args, **kwargs) def update_kwargs(self, kwargs): if 'parent_resource' not in kwargs: kwargs['parent_resource'] = self.parent_resource if 'configuration' not in kwargs: kwargs['configuration'] = self.configuration class Proxy: def _get_proxy(self, resource_name): if not getattr(self, '_proxies', None): setattr(self, '_proxies', {}) resource = resources[resource_name] if self._proxies.get(resource_name) is None: self._proxies[resource_name] = ResourceProxy( resource, parent_resource=self, configuration=self._configuration) return self._proxies[resource_name] class Account(BaseResource, ReadMixin, WriteMixin, Proxy): def __init__(self, *args, **kwargs): super(Account, self).__init__(*args, **kwargs) @classmethod def list_path(cls, parent_resource): return 'accounts' @classmethod def serialize_account(cls, resource_data): account_properties = ['active', 'account', 'service', 'token', 'token_secret', 'refresh_token', 'token_expiry', 'refresh_token_expiry'] serialized = {} for k, v in six.iteritems(resource_data): if isinstance(v, BaseResource): serialized[k] = v.serialize_account(v) elif k not in account_properties: continue elif k in cls._serializers: serialized[k] = cls._serializers[k][0](v) else: serialized[k] = v return serialized def save(self, headers=None, **params): # TODO: add in fields token, token_secret, refresh_token request(self._api_session.patch, self.detail_path(), configuration=self._configuration, headers=headers, data=self.serialize_account(self), params=params) def convert(self, headers=None, data=None, params=None): # Deprecated in favor of encode_raw_id params = {} if params is None else params data = {} if data is None else data convert_path = "%s/%s" % (self.detail_path(), 'storage/convert_id') response = request(self._api_session.post, convert_path, configuration=self._configuration, headers=headers, data=data, params=params) return response.json() def encode_raw_id(self, data=None, params=None, headers=None): path = "%s/encode_raw_id" % self.detail_path() return request( self._api_session.post, path, data=data or {}, params=params or {}, headers=headers, configuration=self._configuration).json() def raw(self, raw_uri='', raw_method='GET', data=None, params=None, headers=None): """ raw_uri: Upstream URI to make the pass-through API request to. raw_method: HTTP Method to make the pass-through request with. params: A dict containing query parameters. data: A dict containing data. """ data = data or {} params = params or {} headers = headers or {} headers['X-Kloudless-Raw-URI'] = raw_uri headers['X-Kloudless-Raw-Method'] = raw_method return request( self._api_session.post, "%s/raw" % self.detail_path(), data=data, headers=headers, params=params, configuration=self._configuration) @property def links(self): return self._get_proxy('link') @property def files(self): return self._get_proxy('file') @property def folders(self): return self._get_proxy('folder') @property def search(self): return self._get_proxy('search') @property def recent(self): return self._get_proxy('recent') @property def calendars(self): return self._get_proxy('calendars') @property def events(self): return self._get_proxy('events') @property def multipart(self): return self._get_proxy('multipart') @property def users(self): return self._get_proxy('user') @property def groups(self): return self._get_proxy('group') @property def crm_objects(self): return self._get_proxy('crm_object') @property def crm_accounts(self): return self._get_proxy('crm_account') @property def crm_contacts(self): return self._get_proxy('crm_contact') @property def crm_leads(self): return self._get_proxy('crm_lead') @property def crm_opportunities(self): return self._get_proxy('crm_opportunity') @property def crm_campaigns(self): return self._get_proxy('crm_campaign') @property def crm_tasks(self): return self._get_proxy('crm_task') @property def crm_batch(self): return self._get_proxy('crm_batch') @property def crm_recent(self): return self._get_proxy('crm_recent') @property def crm_search(self): return self._get_proxy('crm_search') @property def crm_events(self): return self._get_proxy('crm_events') class AccountBaseResource(BaseResource): _parent_resource_class = Account def __init__(self, *accounts, **kwargs): """ accounts should only be a list with 1 account in it. """ if accounts: kwargs['parent_resource'] = accounts[0] super(AccountBaseResource, self).__init__(**kwargs) @classmethod def list_path(cls, account): account_path = account.detail_path() return "%s/%s" % (account_path, cls._path_segment) class FileSystem(BaseResource, Proxy): _path_segment = None @property def permissions(self): return self._get_proxy('permission') class FileSystemBaseResource(BaseResource): _parent_resource_class = FileSystem def __init__(self, *files, **kwargs): if files: kwargs['parent_resource'] = files[0] super(FileSystemBaseResource, self).__init__(**kwargs) @classmethod def list_path(cls, file): file_path = file.detail_path() return "%s/%s" % (file_path, cls._path_segment) class File(AccountBaseResource, RetrieveMixin, DeleteMixin, UpdateMixin, CopyMixin, FileSystem): _path_segment = 'storage/files' @property def properties(self): return self._get_proxy('property') @classmethod @allow_proxy def create(cls, file_name='', parent_id='root', file_data='', params=None, headers=None, parent_resource=None, configuration=None): """ This handles file uploads. `file_data` can be either a string with file data in it or a file-like object. """ all_headers = {<|fim▁hole|> }), 'Content-Type': 'application/octet-stream', } all_headers.update(headers or {}) response = request(cls._api_session.post, cls.list_path(parent_resource), data=file_data, params=params, headers=all_headers, configuration=configuration) return cls.create_from_data( response.json(), parent_resource=parent_resource, configuration=configuration) def update(self, file_data='', params=None, headers=None): """ This overwites the file specified by 'file_id' with the contents of `file_data`. `file_data` can be either a string with file data in it or a file-like object. """ headers = headers or {} headers.setdefault('Content-Type', 'application/octet-stream') response = request(self._api_session.put, self.detail_path(), data=file_data, params=params, headers=headers, configuration=self._configuration) self.populate(response.json()) return True def contents(self, headers=None): """ This handles file downloads. It returns a requests.Response object with contents: from contextlib import closing with closing(account.files(id=file_id).contents()) as r: # Do things with response here data = r.content For more information, see the documentation for requests.Response's Body content workflow. """ response = request(self._api_session.get, "%s/contents" % self.detail_path(), configuration=self._configuration, headers=headers, stream=True) return response def copy_file(self, headers=None, **data): return self._copy(headers=headers, **data) @classmethod @allow_proxy def upload_url(cls, data=None, params=None, parent_resource=None, configuration=None, headers=None): upload_url_path = "%s/%s" % (cls.list_path(parent_resource), 'upload_url') response = request(cls._api_session.post, upload_url_path, configuration=configuration, data=data or {}, params=params or {}, headers=headers) return response.json() class Folder(AccountBaseResource, RetrieveMixin, DeleteMixin, UpdateMixin, CreateMixin, CopyMixin, FileSystem): _path_segment = 'storage/folders' def __init__(self, *args, **kwargs): kwargs.setdefault('id', 'root') super(Folder, self).__init__(*args, **kwargs) def contents(self, headers=None): response = request(self._api_session.get, "%s/contents" % self.detail_path(), configuration=self._configuration, headers=headers) data = self.create_from_data( response.json(), parent_resource=self._parent_resource, configuration=self._configuration) return AnnotatedList(data) def copy_folder(self, headers=None, **data): return self._copy(headers=headers, **data) class Link(AccountBaseResource, ReadMixin, WriteMixin): _path_segment = 'storage/links' class Search(AccountBaseResource, ListMixin): _path_segment = 'storage/search' class Recent(AccountBaseResource, ListMixin): _path_segment = 'storage/recent' class Calendar(AccountBaseResource, ReadMixin, WriteMixin, Proxy): _path_segment = 'cal/calendars' @property def events(self): return self._get_proxy('calendar_events') class CalendarEvents(Calendar): _path_segment = 'events' class Events(AccountBaseResource, ListMixin): _path_segment = 'events' @classmethod @allow_proxy def latest_cursor(cls, parent_resource=None, configuration=None, headers=None): response = request(cls._api_session.get, "%s/latest" % cls.list_path(parent_resource), configuration=configuration, headers=headers) data = response.json() if 'cursor' in data: return data['cursor'] else: return data class Multipart(AccountBaseResource, RetrieveMixin, CreateMixin, DeleteMixin): """ Multipart Uploads. Create the multipart upload first, prior to uploading chunks of data. Complete the upload once all chunks have been uploaded. """ _path_segment = 'storage/multipart' def upload_chunk(self, part_number=None, data='', parent_resource=None, configuration=None, headers=None, **params): """ This handles uploading chunks of the file, after a multipart upload has been initiated. `part_number` `data` can be either a string with file data in it or a file-like object. """ params.update({'part_number': part_number}) headers = headers or {} headers.setdefault('Content-Type', 'application/octet-stream') request(self._api_session.put, self.detail_path(), data=data, params=params, headers=headers, configuration=configuration) return True def complete(self, headers=None, **params): """ Completes the multipart upload and returns a File object. """ response = request(self._api_session.post, "%s/complete" % self.detail_path(), params=params, configuration=self._configuration, headers=headers) return File.create_from_data( response.json(), parent_resource=self._parent_resource, configuration=self._configuration) class Permission(FileSystemBaseResource, ListMixin, CreateMixin): _path_segment = 'permissions' @classmethod @allow_proxy def all(cls, parent_resource=None, configuration=None, headers=None, **params): response = request(cls._api_session.get, cls.list_path(parent_resource), configuration=configuration, headers=headers, params=params) response_json = response.json() permissions = response_json.get('permissions') for perm in permissions: perm['type'] = 'permission' response_json['permissions'] = permissions data = cls.create_from_data( response_json, parent_resource=parent_resource, configuration=configuration) return AnnotatedList(data) @classmethod @allow_proxy def create(cls, params=None, parent_resource=None, configuration=None, data=None, headers=None): return super(Permission, cls).create(params=params, parent_resource=parent_resource, configuration=configuration, method='put', data=data, headers=headers) @classmethod @allow_proxy def update(cls, params=None, parent_resource=None, configuration=None, data=None, headers=None): return super(Permission, cls).create(params=params, parent_resource=parent_resource, configuration=configuration, method='patch', data=data, headers=headers) class Property(FileSystemBaseResource, ListMixin, CreateMixin): _path_segment = 'properties' @classmethod @allow_proxy def update(cls, parent_resource=None, configuration=None, headers=None, data=None, **params): """ Updates custom properties associated with this file. 'data' should be a list of dicts containing key/value pairs. """ return super(Property, cls).create(params=params, parent_resource=parent_resource, configuration=configuration, method='patch', data=data, headers=headers) @classmethod @allow_proxy def delete_all(cls, parent_resource=None, configuration=None, headers=None): """ Deletes all custom properties associated with this file. """ request(cls._api_session.delete, cls.list_path(parent_resource), configuration=configuration, headers=headers) return True class User(AccountBaseResource, ReadMixin): _path_segment = 'team/users' def get_groups(self, headers=None, **params): response = request(self._api_session.get, "%s/%s" % (self.detail_path(), "memberships"), configuration=self._configuration, headers=headers, params=params) data = Group.create_from_data( response.json(), parent_resource=self._parent_resource, configuration=self._configuration) return AnnotatedList(data) class Group(AccountBaseResource, ReadMixin): _path_segment = 'team/groups' def get_users(self, headers=None, **params): response = request(self._api_session.get, "%s/%s" % (self.detail_path(), "members"), configuration=self._configuration, headers=headers, params=params) data = User.create_from_data( response.json(), parent_resource=self._parent_resource, configuration=self._configuration) return AnnotatedList(data) class CRMObject(AccountBaseResource, ListMixin, CreateMixin, RetrieveMixin, UpdateMixin, DeleteMixin): _path_segment = 'crm/objects' raw_type = None def __init__(self, *args, **kwargs): super(CRMObject, self).__init__(*args, **kwargs) @classmethod @allow_proxy def all(cls, parent_resource=None, configuration=None, headers=None, **params): if cls.raw_type is not None: params['raw_type'] = cls.raw_type return super(CRMObject, cls).all(parent_resource=parent_resource, configuration=configuration, headers=headers, **params) @classmethod @allow_proxy def create(cls, params=None, parent_resource=None, configuration=None, headers=None, method='post', data=None): params = {} if params is None else params if cls.raw_type is not None: params['raw_type'] = cls.raw_type return super(CRMObject, cls).create(params=params, parent_resource=parent_resource, configuration=configuration, headers=headers, method=method, data=data) @classmethod @allow_proxy def retrieve(cls, id, parent_resource=None, configuration=None, headers=None, **params): if cls.raw_type is not None: params['raw_type'] = cls.raw_type return super(CRMObject, cls).retrieve(id, parent_resource=parent_resource, configuration=configuration, headers=headers, **params) def save(self, **params): # TODO: change serializer if self.raw_type is not None: params['raw_type'] = self.raw_type super(CRMObject, self).save(**params) def delete(self, **params): if self.raw_type is not None: params['raw_type'] = self.raw_type super(CRMObject, self).delete(**params) class CRMAccount(CRMObject): _path_segment = 'crm/accounts' raw_type = 'Account' class CRMContact(CRMObject): _path_segment = 'crm/contacts' raw_type = 'Contact' class CRMLead(CRMObject): _path_segment = 'crm/leads' raw_type = 'Lead' class CRMOpportunity(CRMObject): _path_segment = 'crm/opportunities' raw_type = 'Opportunity' class CRMCampaign(CRMObject): _path_segment = 'crm/campaigns' raw_type = 'Campaign' class CRMTask(CRMObject): _path_segment = 'crm/tasks' raw_type = 'Task' class CRMBatchRequest(AccountBaseResource, CreateMixin): _path_segment = 'crm/batch' class CRMSearch(AccountBaseResource, ListMixin): _path_segment = 'crm/search' class Application(BaseResource, ReadMixin, WriteMixin, Proxy): def __init__(self, *args, **kwargs): super(Application, self).__init__(*args, **kwargs) @classmethod def list_path(cls, parent_resource): return 'applications' @property def apikeys(self): return self._get_proxy('apikey') @property def webhooks(self): return self._get_proxy('webhook') class ApplicationBaseResource(BaseResource): _parent_resource_class = Application def __init__(self, *applications, **kwargs): if applications: kwargs['parent_resource'] = applications[0] super(ApplicationBaseResource, self).__init__(**kwargs) @classmethod def list_path(cls, application): application_path = application.detail_path() return "%s/%s" % (application_path, cls._path_segment) class ApiKey(ApplicationBaseResource, ListMixin, CreateMixin, DeleteMixin): _path_segment = 'apikeys' def detail_path(self): if not self['key']: raise KException("The detail_path cannot be obtained since the key" " is unknown.") return "%s/%s" % (self.list_path(self._parent_resource), self['key']) class WebHook(ApplicationBaseResource, ListMixin, CreateMixin, RetrieveMixin, DeleteMixin): _path_segment = 'webhooks' def detail_path(self): if not self['id']: raise KException("The detail_path cannot be obtained since the id " "is unknown.") return "%s/%s" % (self.list_path(self._parent_resource), self['id']) resources = { 'account': Account, 'file': File, 'folder': Folder, 'link': Link, 'search': Search, 'recent': Recent, 'calendars': Calendar, 'calendar_events': CalendarEvents, 'events': Events, 'multipart': Multipart, 'permission': Permission, 'property': Property, 'user': User, 'group': Group, # CRM Endpoint 'crm_object': CRMObject, 'crm_account': CRMAccount, 'crm_contact': CRMContact, 'crm_lead': CRMLead, 'crm_opportunity': CRMOpportunity, 'crm_campaign': CRMCampaign, 'crm_task': CRMTask, 'crm_batch': CRMBatchRequest, 'crm_search': CRMSearch, # Application Endpoint 'application': Application, 'apikey': ApiKey, 'webhook': WebHook, } resource_types = {v: k for k, v in six.iteritems(resources)}<|fim▁end|>
'X-Kloudless-Metadata': json.dumps({ 'name': file_name, 'parent_id': parent_id,
<|file_name|>main.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ Date: 2/2/2017 Team: Satoshi Nakamoto @Authors: Alex Levering and Hector Muro<|fim▁hole|>* NLTK * Folium * Geocoder * psycopg2 TO DO BEFOREHAND: The following steps are non-automatable and have to be performed manually. * Have the NLTK vader lexicon locally (nltk.download("vader_lexicon")) * Have PostGIS installed on PostgreSQL * Set the file paths specified below to wherever your folder is * Upgrade folium to the latest version (0.2.1) """ # Naming options for tables, intermediates and outputs are available in the wrapper. if __name__ == "__main__": """ The tool is not supplied with Tweets out-of-the-box. Set 'gather_data' to True and leave it running for a while. If loop is false it will terminate in a minute or so and create a map from the results automatically This tool was tested and intended for OSGeo Live installs used in the GeoScripting course. """ import tweetAnalysisWrapper tweetAnalysisWrapper.performTweetResearch(folder_path = r"/home/user/git/SatoshiNakamotoGeoscripting/Final_assignment", defaultdb = "postgres", # Making a new database requires connecting to an existing database user = "user", # PostgreSQL username (user is default value on OSGeo Live) password = "user", # PostgreSQL password (user is default on OSGeo Live) ouputdb = "tweet_research", # Specify the output database that is to be created tweet_table_name = "tweets", # Output table where the Tweets are stored gather_data = True, # When True: Will gather data from the Twitter stream search_terms = ["Trump"], # Twitter terms to search for loop_gathering = False, # When True: Will not stop gathering when terminated - use for prolonged gathering APP_KEY = "", # Get these from developer.twitter.com when you make your application APP_SECRET = "", OAUTH_TOKEN = "", OAUTH_TOKEN_SECRET = "")<|fim▁end|>
Non-standard dependencies: * Twython
<|file_name|>response.js<|end_file_name|><|fim▁begin|>'use strict'; // Load modules const Http = require('http'); const Stream = require('stream'); // Declare internals const internals = {}; exports = module.exports = class Response extends Http.ServerResponse { constructor(req, onEnd) { super({ method: req.method, httpVersionMajor: 1, httpVersionMinor: 1 }); this._shot = { headers: null, trailers: {}, payloadChunks: [] }; this._headers = {}; // This forces node@8 to always render the headers this.assignSocket(internals.nullSocket()); this.once('finish', () => { const res = internals.payload(this); res.raw.req = req; process.nextTick(() => onEnd(res)); }); } writeHead() { const result = super.writeHead.apply(this, arguments); this._shot.headers = Object.assign({}, this._headers); // Should be .getHeaders() since node v7.7 // Add raw headers ['Date', 'Connection', 'Transfer-Encoding'].forEach((name) => { const regex = new RegExp('\\r\\n' + name + ': ([^\\r]*)\\r\\n'); const field = this._header.match(regex); if (field) { this._shot.headers[name.toLowerCase()] = field[1]; } }); return result; } write(data, encoding, callback) { super.write(data, encoding, callback); this._shot.payloadChunks.push(new Buffer(data, encoding)); return true; // Write always returns false when disconnected } end(data, encoding, callback) { if (data) { this.write(data, encoding); } super.end(callback); this.emit('finish'); } destroy() { } addTrailers(trailers) { for (const key in trailers) { this._shot.trailers[key.toLowerCase().trim()] = trailers[key].toString().trim(); } } }; internals.payload = function (response) { // Prepare response object const res = { raw: { res: response },<|fim▁hole|> trailers: {} }; // Prepare payload and trailers const rawBuffer = Buffer.concat(response._shot.payloadChunks); res.rawPayload = rawBuffer; res.payload = rawBuffer.toString(); res.trailers = response._shot.trailers; return res; }; // Throws away all written data to prevent response from buffering payload internals.nullSocket = function () { return new Stream.Writable({ write(chunk, encoding, callback) { setImmediate(callback); } }); };<|fim▁end|>
headers: response._shot.headers, statusCode: response.statusCode, statusMessage: response.statusMessage,
<|file_name|>publish-card.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Input } from '@angular/core';<|fim▁hole|> @Component({ moduleId: module.id, selector: 'app-publish-card', templateUrl: 'publish-card.component.html', styleUrls: ['publish-card.component.css'], directives: [ROUTER_DIRECTIVES] }) export class PublishCardComponent implements OnInit { @Input() publishingItem: PublishingItem; constructor(private _publishingItemsService: PublishingItemsService) {} ngOnInit() { } public delete() { // NOTE: normally we would have to emit an event and notify the container element about // the deletion but since we are using websockets, the server will send a notification instead. this._publishingItemsService.delete(this.publishingItem); } }<|fim▁end|>
import { ROUTER_DIRECTIVES } from '@angular/router'; import { PublishingItemsService, PublishingItem } from '../shared/index';
<|file_name|>pip3.py<|end_file_name|><|fim▁begin|>import subprocess def release(): <|fim▁hole|><|fim▁end|>
subprocess.call(["python3", "setup.py", "sdist", "upload"])
<|file_name|>maternal_arv_post_mod_manager.py<|end_file_name|><|fim▁begin|>from django.db import models <|fim▁hole|> self, arv_code, report_datetime, visit_instance, appt_status, visit_definition_code, subject_identifier_as_pk): MaternalVisit = models.get_model('mb_maternal', 'MaternalVisit') MaternalArvPost = models.get_model('mb_maternal', 'MaternalArvPost') maternal_visit = MaternalVisit.objects.get_by_natural_key( report_datetime, visit_instance, appt_status, visit_definition_code, subject_identifier_as_pk) maternal_arv_post = MaternalArvPost.objects.get(maternal_visit=maternal_visit) return self.get(arv_code=arv_code, maternal_arv_post=maternal_arv_post)<|fim▁end|>
class MaternalArvPostModManager(models.Manager): def get_by_natural_key(
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2013 Rackspace # # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and<|fim▁hole|> __all__ = [ 'get_pool', 'run_function', 'join_pool' ] import os import sys try: import gevent gevent gevent_available = True except ImportError: gevent_available = False DEFAULT_BACKEND = 'noop' BACKEND = DEFAULT_BACKEND USE_GEVENT = os.getenv('RAXCLI_USE_GEVENT') if USE_GEVENT and gevent_available: BACKEND = 'gevent' module_name = 'raxcli.concurrency.backends.%s_backend' % (BACKEND) current_module = sys.modules[__name__] backend_module = __import__(module_name, fromlist=['raxcli.concurrency.backends']) for key in __all__: func = getattr(backend_module, key) setattr(current_module, key, func) backend_initialize = getattr(backend_module, 'initialize') backend_initialize()<|fim▁end|>
# limitations under the License.
<|file_name|>matches.component.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core'; <|fim▁hole|>@Component({ selector: 'matches', template: `Matches page content` }) export class MatchesComponent { constructor() {} }<|fim▁end|>
<|file_name|>private_export.d.ts<|end_file_name|><|fim▁begin|>import * as browser from './browser'; import * as browserDomAdapter from './browser/browser_adapter'; import * as location from './browser/location/browser_platform_location'; import * as testability from './browser/testability'; import * as ng_probe from './dom/debug/ng_probe'; import * as dom_adapter from './dom/dom_adapter'; import * as dom_renderer from './dom/dom_renderer'; import * as dom_events from './dom/events/dom_events'; import * as hammer_gesture from './dom/events/hammer_gestures'; import * as key_events from './dom/events/key_events'; import * as shared_styles_host from './dom/shared_styles_host'; export declare var __platform_browser_private__: { _BrowserPlatformLocation?: location.BrowserPlatformLocation; BrowserPlatformLocation: typeof location.BrowserPlatformLocation; _DomAdapter?: dom_adapter.DomAdapter; DomAdapter: typeof dom_adapter.DomAdapter; _BrowserDomAdapter?: browserDomAdapter.BrowserDomAdapter; BrowserDomAdapter: typeof browserDomAdapter.BrowserDomAdapter; _BrowserGetTestability?: testability.BrowserGetTestability; BrowserGetTestability: typeof testability.BrowserGetTestability;<|fim▁hole|> DomRootRenderer: typeof dom_renderer.DomRootRenderer; _DomRootRenderer_?: dom_renderer.DomRootRenderer; DomRootRenderer_: typeof dom_renderer.DomRootRenderer_; _DomSharedStylesHost?: shared_styles_host.DomSharedStylesHost; DomSharedStylesHost: typeof shared_styles_host.DomSharedStylesHost; _SharedStylesHost?: shared_styles_host.SharedStylesHost; SharedStylesHost: typeof shared_styles_host.SharedStylesHost; ELEMENT_PROBE_PROVIDERS: typeof ng_probe.ELEMENT_PROBE_PROVIDERS; _DomEventsPlugin?: dom_events.DomEventsPlugin; DomEventsPlugin: typeof dom_events.DomEventsPlugin; _KeyEventsPlugin?: key_events.KeyEventsPlugin; KeyEventsPlugin: typeof key_events.KeyEventsPlugin; _HammerGesturesPlugin?: hammer_gesture.HammerGesturesPlugin; HammerGesturesPlugin: typeof hammer_gesture.HammerGesturesPlugin; initDomAdapter: typeof browser.initDomAdapter; INTERNAL_BROWSER_PLATFORM_PROVIDERS: typeof browser.INTERNAL_BROWSER_PLATFORM_PROVIDERS; BROWSER_SANITIZATION_PROVIDERS: typeof browser.BROWSER_SANITIZATION_PROVIDERS; };<|fim▁end|>
getDOM: typeof dom_adapter.getDOM; setRootDomAdapter: typeof dom_adapter.setRootDomAdapter; _DomRootRenderer?: dom_renderer.DomRootRenderer;
<|file_name|>server.go<|end_file_name|><|fim▁begin|>// Copyright 2015, Google Inc. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. // Package grpcvtgateservice provides the gRPC glue for vtgate package grpcvtgateservice import ( "google.golang.org/grpc" mproto "github.com/youtube/vitess/go/mysql/proto" "github.com/youtube/vitess/go/vt/callerid" "github.com/youtube/vitess/go/vt/callinfo" "github.com/youtube/vitess/go/vt/key" "github.com/youtube/vitess/go/vt/servenv" tproto "github.com/youtube/vitess/go/vt/tabletserver/proto" "github.com/youtube/vitess/go/vt/topo" "github.com/youtube/vitess/go/vt/vtgate" "github.com/youtube/vitess/go/vt/vtgate/proto" "github.com/youtube/vitess/go/vt/vtgate/vtgateservice" "golang.org/x/net/context" pb "github.com/youtube/vitess/go/vt/proto/vtgate" pbs "github.com/youtube/vitess/go/vt/proto/vtgateservice" ) // VTGate is the public structure that is exported via gRPC type VTGate struct { server vtgateservice.VTGateService } // Execute is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Execute(ctx context.Context, request *pb.ExecuteRequest) (response *pb.ExecuteResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.Execute(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteShards is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteShards(ctx context.Context, request *pb.ExecuteShardsRequest) (response *pb.ExecuteShardsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteShards(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, request.Shards, request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteShardsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteKeyspaceIds is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteKeyspaceIds(ctx context.Context, request *pb.ExecuteKeyspaceIdsRequest) (response *pb.ExecuteKeyspaceIdsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteKeyspaceIds(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyspaceIds(request.KeyspaceIds), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteKeyspaceIdsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteKeyRanges is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteKeyRanges(ctx context.Context, request *pb.ExecuteKeyRangesRequest) (response *pb.ExecuteKeyRangesResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteKeyRanges(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyRanges(request.KeyRanges), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteKeyRangesResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteEntityIds is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteEntityIds(ctx context.Context, request *pb.ExecuteEntityIdsRequest) (response *pb.ExecuteEntityIdsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResult) executeErr := vtg.server.ExecuteEntityIds(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, request.EntityColumnName, proto.ProtoToEntityIds(request.EntityKeyspaceIds), request.TabletType, proto.ProtoToSession(request.Session), request.NotInTransaction, reply) response = &pb.ExecuteEntityIdsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Result = mproto.QueryResultToProto3(reply.Result) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteBatchShards is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) ExecuteBatchShards(ctx context.Context, request *pb.ExecuteBatchShardsRequest) (response *pb.ExecuteBatchShardsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResultList) executeErr := vtg.server.ExecuteBatchShards(ctx, proto.ProtoToBoundShardQueries(request.Queries), request.TabletType, request.AsTransaction, proto.ProtoToSession(request.Session), reply) response = &pb.ExecuteBatchShardsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Results = tproto.QueryResultListToProto3(reply.List) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // ExecuteBatchKeyspaceIds is the RPC version of // vtgateservice.VTGateService method func (vtg *VTGate) ExecuteBatchKeyspaceIds(ctx context.Context, request *pb.ExecuteBatchKeyspaceIdsRequest) (response *pb.ExecuteBatchKeyspaceIdsResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.QueryResultList) executeErr := vtg.server.ExecuteBatchKeyspaceIds(ctx, proto.ProtoToBoundKeyspaceIdQueries(request.Queries), request.TabletType, request.AsTransaction, proto.ProtoToSession(request.Session), reply) response = &pb.ExecuteBatchKeyspaceIdsResponse{ Error: vtgate.VtGateErrorToVtRPCError(executeErr, reply.Error), } if executeErr == nil { response.Results = tproto.QueryResultListToProto3(reply.List) response.Session = proto.SessionToProto(reply.Session) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, executeErr } // StreamExecute is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) StreamExecute(request *pb.StreamExecuteRequest, stream pbs.Vitess_StreamExecuteServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecute(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // StreamExecuteShards is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) StreamExecuteShards(request *pb.StreamExecuteShardsRequest, stream pbs.Vitess_StreamExecuteShardsServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecuteShards(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, request.Shards, request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteShardsResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // StreamExecuteKeyspaceIds is the RPC version of // vtgateservice.VTGateService method func (vtg *VTGate) StreamExecuteKeyspaceIds(request *pb.StreamExecuteKeyspaceIdsRequest, stream pbs.Vitess_StreamExecuteKeyspaceIdsServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecuteKeyspaceIds(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyspaceIds(request.KeyspaceIds), request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteKeyspaceIdsResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } // StreamExecuteKeyRanges is the RPC version of // vtgateservice.VTGateService method func (vtg *VTGate) StreamExecuteKeyRanges(request *pb.StreamExecuteKeyRangesRequest, stream pbs.Vitess_StreamExecuteKeyRangesServer) (err error) { defer vtg.server.HandlePanic(&err) ctx := callerid.NewContext(callinfo.GRPCCallInfo(stream.Context()), request.CallerId, callerid.NewImmediateCallerID("grpc client")) return vtg.server.StreamExecuteKeyRanges(ctx, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.Keyspace, key.ProtoToKeyRanges(request.KeyRanges), request.TabletType, func(value *proto.QueryResult) error { return stream.Send(&pb.StreamExecuteKeyRangesResponse{ Result: mproto.QueryResultToProto3(value.Result), }) }) } <|fim▁hole|> request.CallerId, callerid.NewImmediateCallerID("grpc client")) outSession := new(proto.Session) beginErr := vtg.server.Begin(ctx, outSession) response = &pb.BeginResponse{ Error: vtgate.VtGateErrorToVtRPCError(beginErr, ""), } if beginErr == nil { response.Session = proto.SessionToProto(outSession) return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, beginErr } // Commit is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Commit(ctx context.Context, request *pb.CommitRequest) (response *pb.CommitResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) commitErr := vtg.server.Commit(ctx, proto.ProtoToSession(request.Session)) response = &pb.CommitResponse{ Error: vtgate.VtGateErrorToVtRPCError(commitErr, ""), } if commitErr == nil { return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, commitErr } // Rollback is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Rollback(ctx context.Context, request *pb.RollbackRequest) (response *pb.RollbackResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) rollbackErr := vtg.server.Rollback(ctx, proto.ProtoToSession(request.Session)) response = &pb.RollbackResponse{ Error: vtgate.VtGateErrorToVtRPCError(rollbackErr, ""), } if rollbackErr == nil { return response, nil } if *vtgate.RPCErrorOnlyInReply { return response, nil } return nil, rollbackErr } // SplitQuery is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) SplitQuery(ctx context.Context, request *pb.SplitQueryRequest) (response *pb.SplitQueryResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx), request.CallerId, callerid.NewImmediateCallerID("grpc client")) reply := new(proto.SplitQueryResult) if err := vtg.server.SplitQuery(ctx, request.Keyspace, string(request.Query.Sql), tproto.Proto3ToBindVariables(request.Query.BindVariables), request.SplitColumn, int(request.SplitCount), reply); err != nil { return nil, err } return proto.SplitQueryPartsToProto(reply.Splits), nil } // GetSrvKeyspace is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) GetSrvKeyspace(ctx context.Context, request *pb.GetSrvKeyspaceRequest) (response *pb.GetSrvKeyspaceResponse, err error) { defer vtg.server.HandlePanic(&err) sk, err := vtg.server.GetSrvKeyspace(ctx, request.Keyspace) if err != nil { return nil, err } return &pb.GetSrvKeyspaceResponse{ SrvKeyspace: topo.SrvKeyspaceToProto(sk), }, nil } func init() { vtgate.RegisterVTGates = append(vtgate.RegisterVTGates, func(vtGate vtgateservice.VTGateService) { if servenv.GRPCCheckServiceMap("vtgateservice") { pbs.RegisterVitessServer(servenv.GRPCServer, &VTGate{vtGate}) } }) } // RegisterForTest registers the gRPC implementation on the gRPC // server. Useful for unit tests only, for real use, the init() // function does the registration. func RegisterForTest(s *grpc.Server, service vtgateservice.VTGateService) { pbs.RegisterVitessServer(s, &VTGate{service}) }<|fim▁end|>
// Begin is the RPC version of vtgateservice.VTGateService method func (vtg *VTGate) Begin(ctx context.Context, request *pb.BeginRequest) (response *pb.BeginResponse, err error) { defer vtg.server.HandlePanic(&err) ctx = callerid.NewContext(callinfo.GRPCCallInfo(ctx),
<|file_name|>ConstraintDefinitionsTest.java<|end_file_name|><|fim▁begin|>/** * Jakarta Bean Validation TCK * * License: Apache License, Version 2.0 * See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>. */ package org.hibernate.beanvalidation.tck.tests.constraints.constraintdefinition; import static org.hibernate.beanvalidation.tck.util.ConstraintViolationAssert.assertNoViolations; import static org.hibernate.beanvalidation.tck.util.ConstraintViolationAssert.assertThat; import static org.hibernate.beanvalidation.tck.util.ConstraintViolationAssert.violationOf; import static org.testng.Assert.assertEquals; import java.util.Set; import jakarta.validation.ConstraintViolation; import jakarta.validation.Validator; import jakarta.validation.constraints.Size; import jakarta.validation.groups.Default; import jakarta.validation.metadata.ConstraintDescriptor; import org.hibernate.beanvalidation.tck.beanvalidation.Sections; import org.hibernate.beanvalidation.tck.tests.AbstractTCKTest; import org.hibernate.beanvalidation.tck.util.TestUtil; import org.jboss.arquillian.container.test.api.Deployment; import org.jboss.shrinkwrap.api.spec.WebArchive;<|fim▁hole|>import org.jboss.test.audit.annotations.SpecAssertion; import org.jboss.test.audit.annotations.SpecVersion; import org.testng.annotations.Test; /** * @author Hardy Ferentschik * @author Guillaume Smet */ @SpecVersion(spec = "beanvalidation", version = "3.0.0") public class ConstraintDefinitionsTest extends AbstractTCKTest { @Deployment public static WebArchive createTestArchive() { return webArchiveBuilder() .withTestClassPackage( ConstraintDefinitionsTest.class ) .build(); } @Test @SpecAssertion(section = Sections.CONSTRAINTSDEFINITIONIMPLEMENTATION_CONSTRAINTDEFINITION_PROPERTIES, id = "a") @SpecAssertion(section = Sections.CONSTRAINTSDEFINITIONIMPLEMENTATION_MULTIPLECONSTRAINTS, id = "a") public void testConstraintWithCustomAttributes() { Validator validator = TestUtil.getValidatorUnderTest(); Set<ConstraintDescriptor<?>> descriptors = validator.getConstraintsForClass( Person.class ) .getConstraintsForProperty( "lastName" ) .getConstraintDescriptors(); assertEquals( descriptors.size(), 2, "There should be two constraints on the lastName property." ); for ( ConstraintDescriptor<?> descriptor : descriptors ) { assertEquals( descriptor.getAnnotation().annotationType().getName(), AlwaysValid.class.getName(), "Wrong annotation type." ); } Set<ConstraintViolation<Person>> constraintViolations = validator.validate( new Person( "John", "Doe" ) ); assertThat( constraintViolations ).containsOnlyViolations( violationOf( AlwaysValid.class ) ); } @Test @SpecAssertion(section = Sections.CONSTRAINTSDEFINITIONIMPLEMENTATION_MULTIPLECONSTRAINTS, id = "a") @SpecAssertion(section = Sections.CONSTRAINTSDEFINITIONIMPLEMENTATION_MULTIPLECONSTRAINTS, id = "b") public void testRepeatableConstraint() { Validator validator = TestUtil.getValidatorUnderTest(); Set<ConstraintDescriptor<?>> descriptors = validator.getConstraintsForClass( Movie.class ) .getConstraintsForProperty( "title" ) .getConstraintDescriptors(); assertEquals( descriptors.size(), 2, "There should be two constraints on the title property." ); for ( ConstraintDescriptor<?> descriptor : descriptors ) { assertEquals( descriptor.getAnnotation().annotationType().getName(), Size.class.getName(), "Wrong annotation type." ); } Set<ConstraintViolation<Movie>> constraintViolations = validator.validate( new Movie( "Title" ) ); assertNoViolations( constraintViolations ); constraintViolations = validator.validate( new Movie( "A" ) ); assertThat( constraintViolations ).containsOnlyViolations( violationOf( Size.class ) ); constraintViolations = validator.validate( new Movie( "A movie title far too long that does not respect the constraint" ) ); assertThat( constraintViolations ).containsOnlyViolations( violationOf( Size.class ) ); } @Test @SpecAssertion(section = Sections.CONSTRAINTSDEFINITIONIMPLEMENTATION_CONSTRAINTDEFINITION_PROPERTIES_GROUPS, id = "d") public void testDefaultGroupAssumedWhenNoGroupsSpecified() { Validator validator = TestUtil.getValidatorUnderTest(); ConstraintDescriptor<?> descriptor = validator.getConstraintsForClass( Person.class ) .getConstraintsForProperty( "firstName" ) .getConstraintDescriptors() .iterator() .next(); Set<Class<?>> groups = descriptor.getGroups(); assertEquals( groups.size(), 1, "The group set should only contain one entry." ); assertEquals( groups.iterator().next(), Default.class, "The Default group should be returned." ); } }<|fim▁end|>
<|file_name|>sendannouncementrequest.cpp<|end_file_name|><|fim▁begin|>/* Copyright 2013-2021 Paul Colby This file is part of QtAws. QtAws is free software: you can redistribute it and/or modify it under the terms of the GNU Lesser General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. QtAws is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details. You should have received a copy of the GNU Lesser General Public License along with the QtAws. If not, see <http://www.gnu.org/licenses/>. */ #include "sendannouncementrequest.h" #include "sendannouncementrequest_p.h" #include "sendannouncementresponse.h" #include "alexaforbusinessrequest_p.h" namespace QtAws { namespace AlexaForBusiness { /*! * \class QtAws::AlexaForBusiness::SendAnnouncementRequest * \brief The SendAnnouncementRequest class provides an interface for AlexaForBusiness SendAnnouncement requests. * * \inmodule QtAwsAlexaForBusiness * * Alexa for Business helps you use Alexa in your organization. Alexa for Business provides you with the tools to manage * Alexa devices, enroll your users, and assign skills, at scale. You can build your own context-aware voice skills using * the Alexa Skills Kit and the Alexa for Business API operations. You can also make these available as private skills for * your organization. Alexa for Business makes it efficient to voice-enable your products and services, thus providing * context-aware voice experiences for your customers. Device makers building with the Alexa Voice Service (AVS) can create * fully integrated solutions, register their products with Alexa for Business, and manage them as shared devices in their * organization. * * \sa AlexaForBusinessClient::sendAnnouncement */ /*! * Constructs a copy of \a other. */ SendAnnouncementRequest::SendAnnouncementRequest(const SendAnnouncementRequest &other) : AlexaForBusinessRequest(new SendAnnouncementRequestPrivate(*other.d_func(), this)) { } /*! * Constructs a SendAnnouncementRequest object. */ SendAnnouncementRequest::SendAnnouncementRequest() : AlexaForBusinessRequest(new SendAnnouncementRequestPrivate(AlexaForBusinessRequest::SendAnnouncementAction, this)) { } /*! * \reimp */ bool SendAnnouncementRequest::isValid() const { return false; } /*! * Returns a SendAnnouncementResponse object to process \a reply. * * \sa QtAws::Core::AwsAbstractClient::send */ QtAws::Core::AwsAbstractResponse * SendAnnouncementRequest::response(QNetworkReply * const reply) const { return new SendAnnouncementResponse(*this, reply); } /*! * \class QtAws::AlexaForBusiness::SendAnnouncementRequestPrivate * \brief The SendAnnouncementRequestPrivate class provides private implementation for SendAnnouncementRequest. * \internal * * \inmodule QtAwsAlexaForBusiness */ /*! * Constructs a SendAnnouncementRequestPrivate object for AlexaForBusiness \a action, * with public implementation \a q. */<|fim▁hole|>SendAnnouncementRequestPrivate::SendAnnouncementRequestPrivate( const AlexaForBusinessRequest::Action action, SendAnnouncementRequest * const q) : AlexaForBusinessRequestPrivate(action, q) { } /*! * Constructs a copy of \a other, with public implementation \a q. * * This copy-like constructor exists for the benefit of the SendAnnouncementRequest * class' copy constructor. */ SendAnnouncementRequestPrivate::SendAnnouncementRequestPrivate( const SendAnnouncementRequestPrivate &other, SendAnnouncementRequest * const q) : AlexaForBusinessRequestPrivate(other, q) { } } // namespace AlexaForBusiness } // namespace QtAws<|fim▁end|>
<|file_name|>infer.test.ts<|end_file_name|><|fim▁begin|>/// <reference types="@sanity/types/parts" /> import Schema from '@sanity/schema' import {ObjectSchemaType, Rule, SanityDocument} from '@sanity/types' import createSchema from 'part:@sanity/base/schema-creator' import client from 'part:@sanity/base/client' import inferFromSchema from '../src/inferFromSchema' import validateDocument from '../src/validateDocument' jest.mock('part:@sanity/base/client', () => { const mockClient = { fetch: jest.fn(), withConfig: jest.fn(() => mockClient), } return mockClient }) describe('schema validation inference', () => { describe('object with `options.list` and `value` field', () => { const listOptions = [ {value: '#f00', title: 'Red'}, {value: '#0f0', title: 'Green'}, {value: '#00f', title: 'Blue'}, ] const schema = Schema.compile({ types: [ { name: 'colorList', type: 'object', fields: [ {name: 'value', type: 'string'}, {name: 'title', type: 'string'}, ], options: { list: listOptions, }, }, ], }) test('allowed value', async () => { const type = inferFromSchema(schema).get('colorList')<|fim▁hole|> await expectNoError(type.validation as Rule[], listOptions[0]) }) test('disallowed value', async () => { const type = inferFromSchema(schema).get('colorList') await expectError( type.validation as Rule[], {value: '#ccc', title: 'Gray'}, 'Value did not match any allowed value' ) }) }) describe('field validations', () => { const fieldValidationInferReproDoc = { name: 'fieldValidationInferReproDoc', type: 'document', title: 'FieldValidationRepro', // eslint-disable-next-line @typescript-eslint/no-shadow validation: (Rule: Rule) => Rule.fields({ stringField: (fieldRule) => fieldRule.required(), }), fields: [ { name: 'stringField', type: 'string', title: 'Field of someObjectType with validation', description: 'First field should be required', }, ], } const schema = Schema.compile({ types: [fieldValidationInferReproDoc], }) test('field validations defined on an object type does not affect the field type validation', () => { const documentType = inferFromSchema(schema).get( 'fieldValidationInferReproDoc' ) as ObjectSchemaType const fieldWithoutValidation = documentType.fields.find( (field) => field.name === 'stringField' ) // The first field should only have the validation rules that comes with its type expect( (fieldWithoutValidation?.type.validation as Rule[]).flatMap( // eslint-disable-next-line dot-notation (validation) => validation['_rules'] ) ).toEqual([{flag: 'type', constraint: 'String'}]) }) }) describe('slug validation', () => { const slugField = { type: 'document', name: 'documentWithSlug', title: 'Document with Slug', fields: [ { name: 'slugField', type: 'slug', }, ], } const schema = createSchema({ types: [slugField], }) const mockDocument: SanityDocument = { _id: 'mockDocument', _type: 'documentWithSlug', slugField: {current: 'example-value'}, _createdAt: '2021-08-26T18:47:55.497Z', _updatedAt: '2021-08-26T18:47:55.497Z', _rev: 'example-rev', } afterEach(() => { ;(client.fetch as jest.Mock).mockReset() }) test('slug is valid if uniqueness queries returns true', async () => { ;(client.fetch as jest.Mock).mockImplementation(() => Promise.resolve( // return true to mock a unique result (valid) true ) ) await expect(validateDocument(mockDocument, schema)).resolves.toEqual([]) expect(client.fetch).toHaveBeenCalledTimes(1) expect((client.fetch as jest.Mock).mock.calls[0]).toEqual([ '!defined(*[_type == $docType && !(_id in [$draft, $published]) && slugField.current == $slug][0]._id)', { docType: 'documentWithSlug', draft: 'drafts.mockDocument', published: 'mockDocument', slug: 'example-value', }, { tag: 'validation.slug-is-unique', }, ]) }) test('slug is invalid if uniqueness queries returns false', async () => { ;(client.fetch as jest.Mock).mockReset() ;(client.fetch as jest.Mock).mockImplementation(() => Promise.resolve( // return false to mock a non-unique result (invalid) false ) ) await expect(validateDocument(mockDocument, schema)).resolves.toMatchObject([ { type: 'validation', path: ['slugField'], level: 'error', item: {message: 'Slug is already in use', paths: []}, }, ]) expect(client.fetch).toHaveBeenCalledTimes(1) expect((client.fetch as jest.Mock).mock.calls[0]).toEqual([ '!defined(*[_type == $docType && !(_id in [$draft, $published]) && slugField.current == $slug][0]._id)', { docType: 'documentWithSlug', draft: 'drafts.mockDocument', published: 'mockDocument', slug: 'example-value', }, { tag: 'validation.slug-is-unique', }, ]) }) }) describe('reference validation', () => { const documentWithReference = { type: 'document', name: 'documentWithReference', title: 'Document with Reference', fields: [ { name: 'referenceField', type: 'reference', to: [{type: 'documentWithReference'}], }, { name: 'referenceFieldWeak', type: 'reference', weak: true, to: [{type: 'documentWithReference'}], }, ], } const schema = createSchema({ types: [documentWithReference], }) const mockDocument: SanityDocument = { _id: 'mockDocument', _type: 'documentWithReference', _createdAt: '2021-08-26T18:47:55.497Z', _updatedAt: '2021-08-26T18:47:55.497Z', _rev: 'example-rev', } afterEach(() => { ;(client.fetch as jest.Mock).mockReset() }) test('referenced document must exist (unless weak)', async () => { const mockGetDocumentExists = jest.fn(() => Promise.resolve(false)) await expect( validateDocument( { ...mockDocument, referenceField: { _ref: 'example-id', }, }, schema, {getDocumentExists: mockGetDocumentExists} ) ).resolves.toMatchObject([ { item: {message: /.+/}, level: 'error', path: ['referenceField'], type: 'validation', }, ]) expect(mockGetDocumentExists.mock.calls).toMatchObject([[{id: 'example-id'}]]) }) test('reference is valid if schema type is marked as weak', async () => { await expect( validateDocument( { ...mockDocument, referenceFieldWeak: {_ref: 'example-id'}, }, schema ) ).resolves.toEqual([]) }) test('throws if `getDocumentExists` is not present', async () => { const result = await validateDocument( { ...mockDocument, referenceField: { _ref: 'example-id', }, }, schema, {getDocumentExists: undefined} ) expect(result).toHaveLength(1) expect( result[0].item.message.includes( '`getDocumentExists` was not provided in validation context' ) ).toBe(true) }) test('reference is valid if schema type is strong and document does exists', async () => { const mockGetDocumentExists = jest.fn(() => Promise.resolve(true)) await expect( validateDocument( { ...mockDocument, referenceField: { _ref: 'example-id', }, }, schema, {getDocumentExists: mockGetDocumentExists} ) ).resolves.toEqual([]) expect(mockGetDocumentExists.mock.calls).toMatchObject([[{id: 'example-id'}]]) }) }) }) async function expectNoError(validations: Rule[], value: unknown) { const errors = (await Promise.all(validations.map((rule) => rule.validate(value)))).flat() if (errors.length === 0) { // This shouldn't actually be needed, but counts against an assertion in jest-terms expect(errors).toHaveLength(0) return } const messages = errors.map((err) => err.item && err.item.message).join('\n\n- ') throw new Error(`Expected no errors, but found ${errors.length}:\n- ${messages}`) } async function expectError( validations: Rule[], value: unknown, message: string | undefined, level = 'error' ) { const errors = (await Promise.all(validations.map((rule) => rule.validate(value)))).flat() if (!errors.length) { throw new Error(`Expected error matching "${message}", but no errors were returned.`) } const matches = errors.filter((err) => err.item && err.item.message.includes(message)) if (matches.length === 0) { const messages = errors.map((err) => err.item && err.item.message).join('\n\n- ') throw new Error(`Expected error matching "${message}" not found. Errors found:\n- ${messages}`) } const levelMatch = matches.find((err) => err.level === level) if (!levelMatch) { throw new Error(`Expected error to have level "${level}", got ${matches[0].level}`) } // This shouldn't actually be needed, but counts against an assertion in jest-terms expect(levelMatch.item.message).toMatch(message) }<|fim▁end|>
<|file_name|>cmd_interface_02_server_clients.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding: utf-8 # # StreamBuddy - a video and data streaming serviweng zieleinfahrtce. # Copyright (c) 2015, Tobias Bleiker & Dumeni Manatschal # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/> # # Source on github: # https://github.com/tbleiker/StreamBug # import multiprocessing import time import zmq from streambug import cmd_interface from streambug import mplogger # set up logging mplogger.setup(debug=True) log = mplogger.getLogger() def server_thread(zeromq_context, address, port_pull, port_pub): server = cmd_interface.Server(zeromq_context, address, port_pull, port_pub) server.start() server.join() def f1_thread(name, role, zeromq_context, address, port_pub, port_pull): client = cmd_interface.Client(name, role, zeromq_context, address, port_pub, port_pull) def test(): return 'test successful' client.add_command('test', test, 'simple test') client.start() # send an update and join client time.sleep(0.5) log.info('### Test 1: Send an update.') client.send_update('This is an update message.') client.join() def c1_thread(name, role, zeromq_context, address, port_pub, port_pull): client = cmd_interface.Client(name, role, zeromq_context, address, port_pub, port_pull) def update_func(msg): log.info('Got update message: {msg}'.format(msg=msg)) client.set_update_func(update_func) client.start() time.sleep(2) log.info('### Test 2: Get server status.') client.get_server_status() time.sleep(0.5) log.info('### Test 3: Request help.') client.get_help('F1') <|fim▁hole|> time.sleep(0.5) log.info('### Test 4: Send command test1.') ret = client.send_cmd('F1', 'test') log.info('Got: {ret}'.format(ret=ret)) if __name__ == '__main__': zeromq_context = zmq.Context() client_c1 = multiprocessing.Process(name='Client-c1', target=c1_thread, args=('c1', 'commander', zeromq_context, '0.0.0.0', 7001, 7000)) client_f1 = multiprocessing.Process(name='Client-f1', target=f1_thread, args=('f1', 'follower', zeromq_context, '0.0.0.0', 7001, 7000)) server = multiprocessing.Process(name='Server', target=server_thread, args=(zeromq_context, '0.0.0.0', 7001, 7000)) server.start() time.sleep(0.5) log.info('### Starting clients...') client_f1.start() time.sleep(0.1) client_c1.start() time.sleep(0.1) client_c1.join() client_f1.terminate() server.terminate()<|fim▁end|>
<|file_name|>configuration-edit-ctrl.js<|end_file_name|><|fim▁begin|>'use strict'; var app = angular.module('Fablab'); app.controller('GlobalConfigurationEditController', function ($scope,$route, $location, ConfigurationService, NotificationService) { $scope.selected = {configuration: undefined}; $scope.loadConfiguration = function (id) { ConfigurationService.get(id, function (data) { $scope.configuration = data; }); }; $scope.save = function () { var configurationCurrent = angular.copy($scope.configuration); ConfigurationService.save(configurationCurrent, function (data) { $scope.configuration = data;<|fim▁hole|> NotificationService.notify("success", "configuration.notification.saved"); $route.reload(); $location.path("configurations"); }); }; } ); app.controller('ConfigurationEditController', function ($scope, $routeParams, $controller) { $controller('GlobalConfigurationEditController', {$scope: $scope}); $scope.newConfiguration = false; $scope.loadConfiguration($routeParams.id); } );<|fim▁end|>
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>var gulp = require("gulp"); var $ = require("gulp-load-plugins"); <|fim▁hole|> .pipe(gulp.dest("./.tmp")); });<|fim▁end|>
gulp.task("html", function () { gulp.src("./src/index.html")
<|file_name|>ServerRestMessage.java<|end_file_name|><|fim▁begin|>/* * Copyright 2016 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.github.antennaesdk.common.messages; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import java.util.HashMap; import java.util.Map; import java.util.UUID;<|fim▁hole|> * <code>ServerRestMessage</code> carries a REST api call to the mobile-broker. * Broker executes this call, and returns the result via <code>ClientMessage</code>. * * @see ClientMessage */ public class ServerRestMessage { // from where the message originates. // it can be from a user or from a server (bot) private ClientAddress from; // rest resource path such as "/api/books" // another example would be "/api/books?id=383763" // another example would be "/api/books/383763" private String path; // represents the "protocol//host:port" such as "https://toys.company.com:8443" // or port can be optional such as "https://toys.company.com" private String host; // represents REST method such as "GET", "POST", "PUT", "DELETE" etc // TODO: use an enum instead of string private String method; // actual message ( this the payload if its POST/PUT call ) // this is optional private String payLoad; // the headers for a REST message private Map<String, String> headers = new HashMap<>(); // The name/value pairs of multipart entities. Implies a multipart request. private Map<String, String> multipartEntities; // unique identified to track the request on the client side. private String requestId; // TODO: use TypeAdapterFactory instead of passing the type. private String classType = ServerRestMessage.class.getName(); // getters and setters public ServerRestMessage(){ requestId = UUID.randomUUID().toString(); } public ServerRestMessage( String requestId ){ this.requestId = requestId; } public ClientAddress getFrom() { return from; } public void setFrom(ClientAddress from) { this.from = from; } public String getPath() { return path; } public void setPath(String path) { this.path = path; } public String getHost() { return host; } public void setHost(String host) { this.host = host; } public String getMethod() { return method; } public void setMethod(String method) { this.method = method; } public String getPayLoad() { return payLoad; } public void setPayLoad(String payLoad) { this.payLoad = payLoad; } public Map<String, String> getHeaders() { return headers; } public void setHeaders(Map<String, String> headers) { this.headers = headers; } public void setMultipartEntities(Map<String, String> multipartEntities) { this.multipartEntities = multipartEntities; } public Map<String, String> getMultipartEntities() { return multipartEntities; } public String getRequestId() { return requestId; } public void setRequestId(String requestId) { this.requestId = requestId; } // utility methods public String toJson(){ Gson gson = new Gson(); String json = gson.toJson(this); return json; } public String toJsonPretty(){ Gson gson = new GsonBuilder().setPrettyPrinting().create(); String json = gson.toJson(this); return json; } public static ServerRestMessage fromJson(String json ){ Gson gson = new Gson(); ServerRestMessage result = gson.fromJson( json, ServerRestMessage.class); return result; } }<|fim▁end|>
/**
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import sys, time, os from django.conf import settings from django.core import mail from django.core.mail.backends import locmem from django.db import DEFAULT_DB_ALIAS from django.test import signals from django.template import Template from django.utils.translation import deactivate from django.utils.unittest import skipIf class ContextList(list): """A wrapper that provides direct key access to context items contained in a list of context objects. """ def __getitem__(self, key): if isinstance(key, basestring): for subcontext in self: if key in subcontext: return subcontext[key] raise KeyError(key) else: return super(ContextList, self).__getitem__(key) def __contains__(self, key):<|fim▁hole|> value = self[key] except KeyError: return False return True def instrumented_test_render(self, context): """ An instrumented Template render method, providing a signal that can be intercepted by the test system Client """ signals.template_rendered.send(sender=self, template=self, context=context) return self.nodelist.render(context) def setup_test_environment(): """Perform any global pre-test setup. This involves: - Installing the instrumented test renderer - Set the email backend to the locmem email backend. - Setting the active locale to match the LANGUAGE_CODE setting. """ Template.original_render = Template._render Template._render = instrumented_test_render mail.original_SMTPConnection = mail.SMTPConnection mail.SMTPConnection = locmem.EmailBackend mail.original_email_backend = settings.EMAIL_BACKEND settings.EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend' mail.outbox = [] deactivate() def teardown_test_environment(): """Perform any global post-test teardown. This involves: - Restoring the original test renderer - Restoring the email sending functions """ Template._render = Template.original_render del Template.original_render mail.SMTPConnection = mail.original_SMTPConnection del mail.original_SMTPConnection settings.EMAIL_BACKEND = mail.original_email_backend del mail.original_email_backend del mail.outbox def get_runner(settings): test_path = settings.TEST_RUNNER.split('.') # Allow for Python 2.5 relative paths if len(test_path) > 1: test_module_name = '.'.join(test_path[:-1]) else: test_module_name = '.' test_module = __import__(test_module_name, {}, {}, test_path[-1]) test_runner = getattr(test_module, test_path[-1]) return test_runner def skipIfDBEngine(engine, reason=None): """ Decorator to skip tests on a given database engine. Note that you can pass a single engine or an iterable here """ if not reason: reason = "not supported on this database" settings_engine = settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'] if isinstance(engine, basestring): return skipIf(settings_engine == engine, reason) return skipIf(settings_engine in engine, reason)<|fim▁end|>
try:
<|file_name|>IronArrow.cpp<|end_file_name|><|fim▁begin|>// © 2014 - 2017 Soverance Studios // http://www.soverance.com // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // http://www.apache.org/licenses/LICENSE-2.0 // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "Ethereal.h" #include "IronArrow.h" #define LOCTEXT_NAMESPACE "EtherealText" // Sets default values AIronArrow::AIronArrow(const FObjectInitializer& ObjectInitializer) : Super(ObjectInitializer) { // Get Assets, References Obtained Via Right Click in Editor static ConstructorHelpers::FObjectFinder<USkeletalMesh> SkeletalMeshObject(TEXT("SkeletalMesh'/Game/VFX/sphere_skeletal.sphere_skeletal'")); static ConstructorHelpers::FObjectFinder<UStaticMesh> StaticMeshObject(TEXT("StaticMesh'/Game/Weapons/Ranged/Ammo/IronArrow.IronArrow'")); static ConstructorHelpers::FObjectFinder<UStaticMesh> OffhandMeshObject(TEXT("StaticMesh'/Game/VFX/sphere.sphere'")); static ConstructorHelpers::FObjectFinder<UTexture2D> LargeIconObject(TEXT("Texture2D'/Game/Blueprints/Widgets/UI-Images/Icons_Gear/WeaponIcon_IronArrows.WeaponIcon_IronArrows'")); static ConstructorHelpers::FObjectFinder<UTexture2D> SmallIconObject(TEXT("Texture2D'/Game/Blueprints/Widgets/UI-Images/Icons_Gear/WeaponIcon_IronArrows_small.WeaponIcon_IronArrows_small'")); Name = EMasterGearList::GL_IronArrow; NameText = LOCTEXT("IronArrowName", "Iron Arrow"); Type = EMasterGearTypes::GT_Ammo; TypeText = LOCTEXT("IronArrowType", "Ammunition"); Description = "Arrows forged by Vulcan blacksmiths."; Price = 15000; MPCost = 0.0f; ATK = 10.0f; DEF = 0.0f; SPD = 0.0f; HP = 0.0f; MP = 0.0f; LargeIcon = LargeIconObject.Object; SmallIcon = SmallIconObject.Object; <|fim▁hole|> SM_WeaponStaticMesh = StaticMeshObject.Object; SM_WeaponOffhandMesh = StaticMeshObject.Object; // Set Mesh WeaponSkeletalMesh->SetSkeletalMesh(SK_WeaponSkeletalMesh); WeaponSkeletalMesh->SetHiddenInGame(true); WeaponStaticMesh->SetStaticMesh(SM_WeaponStaticMesh); //WeaponStaticMesh->SetWorldScale3D(FVector(30, 30, 30)); // scale correction //WeaponStaticMesh->SetRelativeLocation(FVector(20, -2, -2)); // location correction //WeaponStaticMesh->SetRelativeRotation(FRotator(-80, 180, 0)); // location correction WeaponOffhandMesh->SetStaticMesh(SM_WeaponOffhandMesh); WeaponOffhandMesh->SetHiddenInGame(true); } // Called when the game starts or when spawned void AIronArrow::BeginPlay() { Super::BeginPlay(); } #undef LOCTEXT_NAMESPACE<|fim▁end|>
SK_WeaponSkeletalMesh = SkeletalMeshObject.Object;
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The implementation of the DOM. //! //! The DOM is comprised of interfaces (defined by specifications using //! [WebIDL](https://heycam.github.io/webidl/)) that are implemented as Rust //! structs in submodules of this module. Its implementation is documented //! below. //! //! A DOM object and its reflector //! ============================== //! //! The implementation of an interface `Foo` in Servo's DOM involves two //! related but distinct objects: //! //! * the **DOM object**: an instance of the Rust struct `dom::foo::Foo` //! (marked with the `#[dom_struct]` attribute) on the Rust heap; //! * the **reflector**: a `JSObject` allocated by SpiderMonkey, that owns the //! DOM object. //! //! Memory management //! ================= //! //! Reflectors of DOM objects, and thus the DOM objects themselves, are managed //! by the SpiderMonkey Garbage Collector. Thus, keeping alive a DOM object //! is done through its reflector. //! //! For more information, see: //! //! * rooting pointers on the stack: //! the [`Root`](bindings/js/struct.Root.html) smart pointer; //! * tracing pointers in member fields: the [`JS`](bindings/js/struct.JS.html), //! [`MutNullableJS`](bindings/js/struct.MutNullableJS.html) and //! [`MutHeap`](bindings/js/struct.MutHeap.html) smart pointers and //! [the tracing implementation](bindings/trace/index.html); //! * rooting pointers from across thread boundaries or in channels: the //! [`Trusted`](bindings/refcounted/struct.Trusted.html) smart pointer; //! * extracting pointers to DOM objects from their reflectors: the //! [`Unrooted`](bindings/js/struct.Unrooted.html) smart pointer. //! //! Inheritance //! =========== //! //! Rust does not support struct inheritance, as would be used for the //! object-oriented DOM APIs. To work around this issue, Servo stores an //! instance of the superclass in the first field of its subclasses. (Note that //! it is stored by value, rather than in a smart pointer such as `JS<T>`.) //! //! This implies that a pointer to an object can safely be cast to a pointer //! to all its classes. //! //! This invariant is enforced by the lint in //! `plugins::lints::inheritance_integrity`. //! //! Interfaces which either derive from or are derived by other interfaces //! implement the `Castable` trait, which provides three methods `is::<T>()`, //! `downcast::<T>()` and `upcast::<T>()` to cast across the type hierarchy //! and check whether a given instance is of a given type. //! //! ```ignore //! use dom::bindings::inheritance::Castable; //! use dom::element::Element; //! use dom::htmlelement::HTMLElement; //! use dom::htmlinputelement::HTMLInputElement; //! //! if let Some(elem) = node.downcast::<Element> { //! if elem.is::<HTMLInputElement>() { //! return elem.upcast::<HTMLElement>(); //! } //! } //! ``` //! //! Furthermore, when discriminating a given instance against multiple //! interface types, code generation provides a convenient TypeId enum //! which can be used to write `match` expressions instead of multiple //! calls to `Castable::is::<T>`. The `type_id()` method of an instance is //! provided by the farthest interface it derives from, e.g. `EventTarget` //! for `HTMLMediaElement`. For convenience, that method is also provided //! on the `Node` interface to avoid unnecessary upcasts to `EventTarget`. //! //! ```ignore //! use dom::bindings::inheritance::{EventTargetTypeId, NodeTypeId}; //! //! match *node.type_id() { //! EventTargetTypeId::Node(NodeTypeId::CharacterData(_)) => ..., //! EventTargetTypeId::Node(NodeTypeId::Element(_)) => ..., //! ..., //! } //! ``` //! //! Construction //! ============ //! //! DOM objects of type `T` in Servo have two constructors: //! //! * a `T::new_inherited` static method that returns a plain `T`, and //! * a `T::new` static method that returns `Root<T>`. //! //! (The result of either method can be wrapped in `Result`, if that is //! appropriate for the type in question.) //! //! The latter calls the former, boxes the result, and creates a reflector //! corresponding to it by calling `dom::bindings::utils::reflect_dom_object` //! (which yields ownership of the object to the SpiderMonkey Garbage Collector). //! This is the API to use when creating a DOM object. //! //! The former should only be called by the latter, and by subclasses' //! `new_inherited` methods. //! //! DOM object constructors in JavaScript correspond to a `T::Constructor` //! static method. This method is always fallible. //! //! Destruction //! =========== //! //! When the SpiderMonkey Garbage Collector discovers that the reflector of a //! DOM object is garbage, it calls the reflector's finalization hook. This //! function deletes the reflector's DOM object, calling its destructor in the //! process. //! //! Mutability and aliasing //! ======================= //! //! Reflectors are JavaScript objects, and as such can be freely aliased. As //! Rust does not allow mutable aliasing, mutable borrows of DOM objects are //! not allowed. In particular, any mutable fields use `Cell` or `DOMRefCell` //! to manage their mutability. //! //! `Reflector` and `Reflectable` //! ============================= //! //! Every DOM object has a `Reflector` as its first (transitive) member field. //! This contains a `*mut JSObject` that points to its reflector. //! //! The `FooBinding::Wrap` function creates the reflector, stores a pointer to //! the DOM object in the reflector, and initializes the pointer to the reflector //! in the `Reflector` field. //! //! The `Reflectable` trait provides a `reflector()` method that returns the //! DOM object's `Reflector`. It is implemented automatically for DOM structs //! through the `#[dom_struct]` attribute. //! //! Implementing methods for a DOM object //! ===================================== //! //! * `dom::bindings::codegen::Bindings::FooBindings::FooMethods` for methods //! defined through IDL; //! * `&self` public methods for public helpers; //! * `&self` methods for private helpers. //! //! Accessing fields of a DOM object //! ================================ //! //! All fields of DOM objects are private; accessing them from outside their //! module is done through explicit getter or setter methods. //! //! Inheritance and casting //! ======================= //! //! For all DOM interfaces `Foo` in an inheritance chain, a //! `dom::bindings::inheritance::FooCast` provides methods to cast //! to other types in the inheritance chain. For example: //! //! ```ignore //! # use script::dom::bindings::inheritance::{NodeCast, HTMLElementCast}; //! # use script::dom::element::Element; //! # use script::dom::node::Node; //! # use script::dom::htmlelement::HTMLElement; //! fn f(element: &Element) { //! let base = element.upcast::<Node>(); //! let derived = element.downcast::<HTMLElement>(); //! } //! ``` //! //! Adding a new DOM interface //! ========================== //! //! Adding a new interface `Foo` requires at least the following: //! //! * adding the new IDL file at `components/script/dom/webidls/Foo.webidl`; //! * creating `components/script/dom/foo.rs`; //! * listing `foo.rs` in `components/script/dom/mod.rs`; //! * defining the DOM struct `Foo` with a `#[dom_struct]` attribute, a //! superclass or `Reflector` member, and other members as appropriate; //! * implementing the //! `dom::bindings::codegen::Bindings::FooBindings::FooMethods` trait for //! `&'a Foo`; //! * adding/updating the match arm in create_element in //! `components/script/dom/create.rs` (only applicable to new `HTMLElement`s) //! //! More information is available in the [bindings module](bindings/index.html). //! //! Accessing DOM objects from layout //! ================================= //! //! Layout code can access the DOM through the //! [`LayoutJS`](bindings/js/struct.LayoutJS.html) smart pointer. This does not //! keep the DOM object alive; we ensure that no DOM code (Garbage Collection //! in particular) runs while the layout thread is accessing the DOM. //! //! Methods accessible to layout are implemented on `LayoutJS<Foo>` using //! `LayoutFooHelpers` traits. #[macro_use] pub mod macros; pub mod types { include!(concat!(env!("OUT_DIR"), "/InterfaceTypes.rs")); } pub mod activation; pub mod attr; mod create; #[allow(unsafe_code)] #[deny(missing_docs, non_snake_case)] pub mod bindings; pub mod blob; pub mod browsingcontext; pub mod canvasgradient; pub mod canvaspattern; pub mod canvasrenderingcontext2d; pub mod characterdata; pub mod closeevent; pub mod comment; pub mod console; pub mod crypto; pub mod css; pub mod cssstyledeclaration; pub mod customevent; pub mod dedicatedworkerglobalscope; pub mod document; pub mod documentfragment; pub mod documenttype; pub mod domexception; pub mod domimplementation; pub mod domparser; pub mod dompoint; pub mod dompointreadonly; pub mod domquad; pub mod domrect; pub mod domrectlist; pub mod domrectreadonly; pub mod domstringmap; pub mod domtokenlist; pub mod element; pub mod errorevent; pub mod event; pub mod eventdispatcher; pub mod eventsource; pub mod eventtarget; pub mod file; pub mod filelist; pub mod filereader; pub mod formdata; pub mod htmlanchorelement; pub mod htmlappletelement; pub mod htmlareaelement; pub mod htmlaudioelement; pub mod htmlbaseelement; pub mod htmlbodyelement; pub mod htmlbrelement; pub mod htmlbuttonelement; pub mod htmlcanvaselement; pub mod htmlcollection; pub mod htmldataelement; pub mod htmldatalistelement; pub mod htmldetailselement; pub mod htmldialogelement; pub mod htmldirectoryelement; pub mod htmldivelement; pub mod htmldlistelement; pub mod htmlelement; pub mod htmlembedelement; pub mod htmlfieldsetelement; pub mod htmlfontelement; pub mod htmlformcontrolscollection; pub mod htmlformelement; pub mod htmlframeelement; pub mod htmlframesetelement; pub mod htmlheadelement; pub mod htmlheadingelement; pub mod htmlhrelement; pub mod htmlhtmlelement; pub mod htmliframeelement; pub mod htmlimageelement; pub mod htmlinputelement; pub mod htmllabelelement; pub mod htmllegendelement; pub mod htmllielement; pub mod htmllinkelement; pub mod htmlmapelement; pub mod htmlmediaelement; pub mod htmlmetaelement; pub mod htmlmeterelement; pub mod htmlmodelement; pub mod htmlobjectelement; pub mod htmlolistelement; pub mod htmloptgroupelement; pub mod htmloptionelement; pub mod htmloutputelement; pub mod htmlparagraphelement; pub mod htmlparamelement; pub mod htmlpreelement; pub mod htmlprogresselement; pub mod htmlquoteelement; pub mod htmlscriptelement; pub mod htmlselectelement; pub mod htmlsourceelement; pub mod htmlspanelement; pub mod htmlstyleelement; pub mod htmltablecaptionelement; pub mod htmltablecellelement; pub mod htmltablecolelement; pub mod htmltabledatacellelement; pub mod htmltableelement; pub mod htmltableheadercellelement; pub mod htmltablerowelement; pub mod htmltablesectionelement; pub mod htmltemplateelement; pub mod htmltextareaelement; pub mod htmltimeelement; pub mod htmltitleelement; pub mod htmltrackelement; pub mod htmlulistelement; pub mod htmlunknownelement; pub mod htmlvideoelement; pub mod imagedata; pub mod keyboardevent; pub mod location; pub mod messageevent; pub mod mouseevent; pub mod namednodemap; pub mod navigator; pub mod navigatorinfo; pub mod node; pub mod nodeiterator; pub mod nodelist; pub mod performance; pub mod performancetiming; pub mod processinginstruction; pub mod progressevent; pub mod range; pub mod screen; pub mod servohtmlparser; pub mod servoxmlparser; pub mod storage;<|fim▁hole|>pub mod textdecoder; pub mod textencoder; pub mod touch; pub mod touchevent; pub mod touchlist; pub mod treewalker; pub mod uievent; pub mod url; pub mod urlhelper; pub mod urlsearchparams; pub mod userscripts; pub mod validitystate; pub mod values; pub mod virtualmethods; pub mod webglactiveinfo; pub mod webglbuffer; pub mod webglcontextevent; pub mod webglframebuffer; pub mod webglobject; pub mod webglprogram; pub mod webglrenderbuffer; pub mod webglrenderingcontext; pub mod webglshader; pub mod webglshaderprecisionformat; pub mod webgltexture; pub mod webgluniformlocation; pub mod websocket; pub mod window; pub mod worker; pub mod workerglobalscope; pub mod workerlocation; pub mod workernavigator; pub mod xmldocument; pub mod xmlhttprequest; pub mod xmlhttprequesteventtarget; pub mod xmlhttprequestupload;<|fim▁end|>
pub mod storageevent; pub mod testbinding; pub mod testbindingproxy; pub mod text;
<|file_name|>s2i_dropcaps.go<|end_file_name|><|fim▁begin|>package builds import ( "fmt" kapi "k8s.io/kubernetes/pkg/api" g "github.com/onsi/ginkgo" o "github.com/onsi/gomega" buildapi "github.com/openshift/origin/pkg/build/api" exutil "github.com/openshift/origin/test/extended/util" ) var _ = g.Describe("[builds][Slow] Capabilities should be dropped for s2i builders", func() { defer g.GinkgoRecover() var ( s2ibuilderFixture = exutil.FixturePath("..", "extended", "testdata", "s2i-dropcaps", "rootable-ruby") rootAccessBuildFixture = exutil.FixturePath("..", "extended", "testdata", "s2i-dropcaps", "root-access-build.yaml") oc = exutil.NewCLI("build-s2i-dropcaps", exutil.KubeConfigPath()) ) g.JustBeforeEach(func() { g.By("waiting for builder service account") err := exutil.WaitForBuilderAccount(oc.KubeREST().ServiceAccounts(oc.Namespace())) o.Expect(err).NotTo(o.HaveOccurred()) }) g.Describe("s2i build with a rootable builder", func() { g.It("should not be able to switch to root with an assemble script", func() { g.By("calling oc new-build for rootable-builder") err := oc.Run("new-build").Args("--binary", "--name=rootable-ruby").Execute() o.Expect(err).NotTo(o.HaveOccurred()) g.By("starting the rootable-ruby build with --wait flag") err = oc.Run("start-build").Args("rootable-ruby", fmt.Sprintf("--from-dir=%s", s2ibuilderFixture), "--wait").Execute() // debug for failures on jenkins if err != nil { exutil.DumpBuildLogs("rootable-ruby", oc) } o.Expect(err).NotTo(o.HaveOccurred()) g.By("creating a build that tries to gain root access via su") err = oc.Run("create").Args("-f", rootAccessBuildFixture).Execute() o.Expect(err).NotTo(o.HaveOccurred()) g.By("start the root-access-build with the --wait flag") err = oc.Run("start-build").Args("root-access-build", "--wait").Execute() // debug for failures on jenkins if err == nil { exutil.DumpBuildLogs("root-access-build", oc) } o.Expect(err).To(o.HaveOccurred()) g.By("verifying the build status") builds, err := oc.REST().Builds(oc.Namespace()).List(kapi.ListOptions{}) o.Expect(err).NotTo(o.HaveOccurred()) o.Expect(builds.Items).ToNot(o.BeEmpty()) // Find the build var build *buildapi.Build for i := range builds.Items { if builds.Items[i].Name == "root-access-build-1" { build = &builds.Items[i]<|fim▁hole|> } } o.Expect(build).NotTo(o.BeNil()) o.Expect(build.Status.Phase).Should(o.BeEquivalentTo(buildapi.BuildPhaseFailed)) }) }) })<|fim▁end|>
break
<|file_name|>UrlEntity.js<|end_file_name|><|fim▁begin|>var mongoose = require('mongoose'); var urlSchema = require('../schemas/UrlSchema'); var Url = mongoose.model('Url', urlSchema);<|fim▁hole|> var link = new Url({ url, urlMinifie }); link.save(function(err) { if (err) return handleError(err); }); }; var getUrls = function() { return Url.find().exec(); }; module.exports = {Url, insertUrl, getUrls};<|fim▁end|>
var insertUrl = function(url, urlMinifie) {
<|file_name|>lxqt-config-appearance_pl_PL.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" encoding="utf-8"?> <!DOCTYPE TS> <TS version="2.1" language="pl_PL"> <context> <name>FontsConfig</name> <message> <location filename="../fontsconfig.ui" line="23"/> <source>Font</source> <translation>Czcionka</translation> </message> <message> <location filename="../fontsconfig.ui" line="30"/> <source>Default font for user interface</source> <translation>Domyślna czcionka dla interfejsu użytkownika</translation> </message> <message> <location filename="../fontsconfig.ui" line="36"/> <source>Font name:</source> <translation>Czcionka:</translation> </message> <message> <location filename="../fontsconfig.ui" line="46"/> <source>Style:</source> <translation>Styl:</translation> </message> <message> <location filename="../fontsconfig.ui" line="53"/> <source>Point size:</source> <translation>Rozmiar:</translation> </message> <message> <location filename="../fontsconfig.ui" line="64"/> <source>Normal</source> <translation>Normalna</translation> </message> <message> <location filename="../fontsconfig.ui" line="69"/> <source>Bold</source> <translation>Pogrubiona</translation> </message> <message> <location filename="../fontsconfig.ui" line="74"/> <source>Italic</source> <translation>Kursywa</translation> </message> <message> <location filename="../fontsconfig.ui" line="79"/> <source>Bold Italic</source> <translation>Pogrubiona kursywa</translation> </message> <message> <location filename="../fontsconfig.ui" line="90"/> <source>The following settings only affect newly started applications</source> <translation>Poniższe ustawienia dotyczą tylko nowo otwartych aplikacji</translation> </message> <message> <location filename="../fontsconfig.ui" line="96"/> <source>Use antialias fonts</source> <translation>Wygładzanie czcionek (antyaliasing)</translation> </message> <message> <location filename="../fontsconfig.ui" line="103"/> <source>Font hinting style:</source> <translation>Styl hintingu:</translation> </message> <message> <location filename="../fontsconfig.ui" line="114"/> <location filename="../fontsconfig.ui" line="179"/> <source>None</source> <translation>Brak</translation> </message> <message> <location filename="../fontsconfig.ui" line="119"/> <source>Slight</source> <translation>Delikatny</translation> </message> <message> <location filename="../fontsconfig.ui" line="124"/> <source>Medium</source> <translation>Średni</translation> </message> <message> <location filename="../fontsconfig.ui" line="129"/> <source>Full</source> <translation>Pełny</translation> </message> <message> <location filename="../fontsconfig.ui" line="137"/> <source>Font hinting</source> <translation>Hinting czcionek</translation> </message> <message> <location filename="../fontsconfig.ui" line="144"/> <source>Resolution (DPI):</source> <translation>Rozdzielczość (DPI):</translation> </message> <message> <location filename="../fontsconfig.ui" line="151"/> <source>Autohint</source> <translation>Autohinting</translation> </message> <message> <location filename="../fontsconfig.ui" line="168"/> <source>Subpixel antialiasing:</source> <translation>Antyaliasing subpikselowy:</translation> </message> <message> <location filename="../fontsconfig.ui" line="184"/> <source>RGB</source> <translation>RGB</translation> </message><|fim▁hole|> <translation>BGR</translation> </message> <message> <location filename="../fontsconfig.ui" line="194"/> <source>VRGB</source> <translation>VRGB</translation> </message> <message> <location filename="../fontsconfig.ui" line="199"/> <source>VBGR</source> <translation>VBGR</translation> </message> </context> <context> <name>IconThemeConfig</name> <message> <location filename="../iconthemeconfig.ui" line="14"/> <source>LXQt Appearance Configuration</source> <translation>Konfiguracja wyglądu LXQt</translation> </message> <message> <location filename="../iconthemeconfig.ui" line="26"/> <source>Icons Theme</source> <translation>Motyw ikon</translation> </message> </context> <context> <name>LXQtThemeConfig</name> <message> <source>LXQt Theme</source> <translation type="vanished">Motyw LXQt</translation> </message> </context> <context> <name>LxQtThemeConfig</name> <message> <location filename="../lxqtthemeconfig.ui" line="23"/> <source>LXQt Theme</source> <translation>Motyw LXQt</translation> </message> </context> <context> <name>QObject</name> <message> <location filename="../main.cpp" line="45"/> <source>LXQt Appearance Configuration</source> <translation>Konfiguracja wyglądu LXQt</translation> </message> <message> <location filename="../main.cpp" line="51"/> <source>Widget Style</source> <translation>Widżety</translation> </message> <message> <location filename="../main.cpp" line="55"/> <source>Icons Theme</source> <translation>Motyw ikon</translation> </message> <message> <location filename="../main.cpp" line="59"/> <source>LXQt Theme</source> <translation>Motyw LXQt</translation> </message> <message> <location filename="../main.cpp" line="63"/> <source>Font</source> <translation>Czcionka</translation> </message> <message> <location filename="../main.cpp" line="68"/> <source>Cursor</source> <translation>Kursor</translation> </message> </context> <context> <name>StyleConfig</name> <message> <location filename="../styleconfig.ui" line="23"/> <source>Widget Style</source> <translation>Styl widżetów</translation> </message> <message> <location filename="../styleconfig.ui" line="54"/> <source>Toolbar button style:</source> <translation>Styl paska narzędziowego:</translation> </message> <message> <location filename="../styleconfig.ui" line="62"/> <source>Only display the icon</source> <translation>Tylko ikony</translation> </message> <message> <location filename="../styleconfig.ui" line="67"/> <source>Only display the text</source> <translation>Tylko tekst</translation> </message> <message> <location filename="../styleconfig.ui" line="72"/> <source>The text appears beside the icon</source> <translation>Tekst obok ikon</translation> </message> <message> <location filename="../styleconfig.ui" line="77"/> <source>The text appears under the icon</source> <translation>Tekst poniżej ikon</translation> </message> <message> <location filename="../styleconfig.ui" line="82"/> <source>Default</source> <translation>Domyśłny</translation> </message> <message> <location filename="../styleconfig.ui" line="90"/> <source>Activate item on single click</source> <translation>Aktywacja pojedyńczym kliknięciem</translation> </message> </context> </TS><|fim▁end|>
<message> <location filename="../fontsconfig.ui" line="189"/> <source>BGR</source>
<|file_name|>day12.rs<|end_file_name|><|fim▁begin|>use std::str::Chars; const INPUT: &'static str = include_str!("data/day12.txt"); const NUMERICAL: [char; 11] = ['-', '0', '1', '2', '3', '4', '5', '6', '7', '8', '9']; pub fn part1() -> i32 { calculate_part1(INPUT) } pub fn part2() -> i32 { calculate_part2(INPUT) } fn calculate_part1(input: &str) -> i32 { let mut reading_number = false; let mut buff = String::new(); let mut total = 0; for c in input.chars() { if NUMERICAL.contains(&c) { reading_number = true; buff.push(c); } else if reading_number { reading_number = false; total += buff.parse().expect("Could not parse value"); buff.clear(); } } total } fn calculate_part2(input: &str) -> i32 { calculate_recursive(&mut input.chars()) } fn calculate_recursive(input: &mut Chars) -> i32 { let mut reading_number = false; let mut buff = String::new(); let mut total = 0; let mut red = Red::new(); while let Some(c) = input.next() { red.input(c); if !red.check() { let mut stack = 1; // read until close current structure while let Some(c) = input.next() { if stack == 0 { break } else if c == '{' { stack += 1; } else if c == '}' { stack -= 1; } } total = 0; break } if NUMERICAL.contains(&c) { reading_number = true; buff.push(c); } else if reading_number { reading_number = false; total += buff.parse().expect("Could not parse value"); buff.clear(); } if c == '{' { total += calculate_recursive(input); } else if c == '}' { return total } } total } struct Red { pub level: u32, } impl Red { fn new() -> Self { Red { level: 0, } } pub fn input(&mut self, c: char) { if self.level == 6 { return } if c == ':' && self.level == 0 || c == '"' && self.level == 1 || c == 'r' && self.level == 2 || c == 'e' && self.level == 3 || c == 'd' && self.level == 4 || c == '"' && self.level == 5 { self.level += 1; } else if self.level < 6 { self.level = 0; } } pub fn check(&self) -> bool { self.level < 5 } } #[cfg(test)] mod tests { #[test] fn part1_test1() { assert_eq!(6, super::calculate_part1("[1,2,3]")); } #[test] fn part1_test2() { assert_eq!(6, super::calculate_part1(r#"{"a":2,"b":4}"#)); } #[test] fn part1_test3() { assert_eq!(3, super::calculate_part1(r#"[[[3]]]"#)); } #[test] fn part1_test4() { assert_eq!(3, super::calculate_part1(r#"{"a":{"b":4},"c":-1}"#)); } #[test] fn part1_test5() { assert_eq!(0, super::calculate_part1(r#"{"a":[-1,1]}"#)); } #[test] fn part1_test6() { assert_eq!(0, super::calculate_part1(r#"[-1,{"a":1}]"#)); } #[test] fn part1_test7() { assert_eq!(0, super::calculate_part1(r#"[]"#)); } #[test] fn part1_test8() { assert_eq!(0, super::calculate_part1(r#"{}"#)); } #[test] fn part2_test1() { assert_eq!(6, super::calculate_part2("[1,2,3]")); } #[test] fn part2_test2() { assert_eq!(6, super::calculate_part2(r#"{"a":2,"b":4}"#)); } #[test] fn part2_test3() { assert_eq!(3, super::calculate_part2(r#"[[[3]]]"#)); } #[test] fn part2_test4() { assert_eq!(3, super::calculate_part2(r#"{"a":{"b":4},"c":-1}"#)); } #[test] fn part2_test5() { assert_eq!(0, super::calculate_part2(r#"{"a":[-1,1]}"#)); } #[test] fn part2_test6() { assert_eq!(0, super::calculate_part2(r#"[-1,{"a":1}]"#)); } #[test] fn part2_test7() { assert_eq!(0, super::calculate_part2(r#"[]"#)); } <|fim▁hole|> fn part2_test8() { assert_eq!(0, super::calculate_part2(r#"{}"#)); } #[test] fn part2_test_red1() { assert_eq!(4, super::calculate_part2(r#"[1,{"c":"red","b":2},3]"#)); } #[test] fn part2_test_red2() { assert_eq!(0, super::calculate_part2(r#"{"d":"red","e":[1,2,3,4],"f":5}"#)); } #[test] fn part2_test_red3() { assert_eq!(6, super::calculate_part2(r#"[1,"red",5]"#)); } }<|fim▁end|>
#[test]
<|file_name|>flow.py<|end_file_name|><|fim▁begin|>import re from lxml import etree from nxpy.util import tag_pattern, whitespace_pattern class Flow(object): def __init__(self): self.routes = [] def export(self): flow = etree.Element('flow') if len(self.routes): for route in self.routes: flow.append(route.export()) return flow else: return False def build(self, node): for child in node: nodeName_ = tag_pattern.match(child.tag).groups()[-1] self.buildChildren(child, nodeName_) def buildChildren(self, child_, nodeName_, from_subclass=False): if nodeName_ == 'route': obj_ = Route() obj_.build(child_) self.routes.append(obj_) class Route(object): def __init__(self): self.name = '' self.operation = None self.match = { "destination": [], "source": [], "protocol": [], "port": [], "destination-port": [], "source-port": [], "icmp-code": [], "icmp-type": [], "tcp-flags": [], "packet-length": [], "dscp": [], "fragment": [] } ''' Match is a dict with list values example: self. match = { "destination": [<ip-prefix(es)>], "source": [<ip-prefix(es)>], "protocol": [<numeric-expression(s)>], "port": [<numeric-expression(s)>], "destination-port": [<numeric-expression(s)>] "source-port": [<numeric-expression(s)>], "icmp-code": [<numeric-expression(s)>], "icmp-type": [<numeric-expression(s)>], "tcp-flags": [<bitwise-expression(s)>], "packet-length": [<numeric-expression(s)>], "dscp": [<numeric-expression(s)>], "fragment": [ "dont-fragment" "not-a-fragment" "is-fragment" "first-fragment" "last-fragment" ] ''' self.then = { "accept": False, "discard": False, "community": False, "next-term": False, "rate-limit": False, "sample": False, "routing-instance": False } '''Then is a dict (have to see about this in the future: self.then = { "accept": True/False, "discard": True/False, "community": "<name>"/False, "next-term": True/False,<|fim▁hole|> "sample": True/False, "routing-instance": "<RouteTarget extended community>" } ''' def export(self): if self.operation: ro = etree.Element('route', {'operation': self.operation}) else: ro = etree.Element('route') if self.name: etree.SubElement(ro, "name").text = self.name match = etree.Element("match") for key in self.match: if self.match[key]: for value in self.match[key]: etree.SubElement(match, key).text = value if match.getchildren(): ro.append(match) then = etree.Element("then") for key in self.then: if self.then[key]: if self.then[key] is not True and self.then[key] is not False: etree.SubElement(then, key).text = self.then[key] else: etree.SubElement(then, key) if then.getchildren(): ro.append(then) if ro.getchildren(): return ro else: return False def build(self, node): for child in node: nodeName_ = tag_pattern.match(child.tag).groups()[-1] self.buildChildren(child, nodeName_) def buildChildren(self, child_, nodeName_, from_subclass=False): if nodeName_ == 'name': name_ = child_.text name_ = re.sub(whitespace_pattern, " ", name_).strip() self.name = name_ elif nodeName_ == 'match': for grandChild_ in child_: grandChildName_ = tag_pattern.match( grandChild_.tag).groups()[-1] grandChildText = grandChild_.text grandChildText = re.sub( whitespace_pattern, " ", grandChildText).strip() self.match[grandChildName_].append(grandChildText) elif nodeName_ == 'then': for grandChild_ in child_: grandChildName_ = tag_pattern.match( grandChild_.tag).groups()[-1] self.then[grandChildName_] = True<|fim▁end|>
"rate-limit": <rate>/False,
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright 2015-2018 Camptocamp SA # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html)<|fim▁hole|>from . import mass_reconcile from . import advanced_reconciliation<|fim▁end|>
<|file_name|>factories.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from django.db.models import signals from elections.models import ( Election, ModerationHistory, ElectionType, ElectedRole, ModerationStatus, ModerationStatuses, ) from organisations.tests.factories import ( OrganisationFactory, OrganisationDivisionFactory, DivisionGeographyFactory, ) class ElectionTypeFactory(factory.django.DjangoModelFactory): class Meta: model = ElectionType django_get_or_create = ("election_type",) name = "Local elections" election_type = "local" # default_voting_system class ElectedRoleFactory(factory.django.DjangoModelFactory): class Meta: model = ElectedRole django_get_or_create = ("election_type",) election_type = factory.SubFactory(ElectionTypeFactory) organisation = factory.SubFactory(OrganisationFactory) elected_title = "Councillor" elected_role_name = "Councillor" @factory.django.mute_signals(signals.post_save) class ElectionFactory(factory.django.DjangoModelFactory): class Meta: model = Election django_get_or_create = ("election_id",) @classmethod def _get_manager(cls, model_class): return model_class.private_objects election_id = factory.Sequence(lambda n: "local.place-name-%d.2017-03-23" % n) election_title = factory.Sequence(lambda n: "Election %d" % n) election_type = factory.SubFactory(ElectionTypeFactory) poll_open_date = "2017-03-23" organisation = factory.SubFactory(OrganisationFactory) elected_role = factory.SubFactory(ElectedRoleFactory) division = factory.SubFactory(OrganisationDivisionFactory) division_geography = factory.SubFactory(DivisionGeographyFactory) organisation_geography = None seats_contested = 1 seats_total = 1 group = factory.SubFactory( "elections.tests.factories.ElectionFactory", election_id="local.2017-03-23", group=None, group_type="election", ) group_type = None class ModerationStatusFactory(factory.django.DjangoModelFactory): class Meta: model = ModerationStatus django_get_or_create = ("short_label",) short_label = ModerationStatuses.approved.value long_label = "long label" class ModerationHistoryFactory(factory.django.DjangoModelFactory): class Meta: model = ModerationHistory election = factory.SubFactory(ElectionFactory) status = factory.SubFactory(ModerationStatusFactory) created = datetime.datetime.now() modified = datetime.datetime.now() class ElectionWithStatusFactory(ElectionFactory): moderation_status = factory.RelatedFactory( ModerationHistoryFactory, "election", status__short_label=ModerationStatuses.approved.value, ) def related_status(status): return factory.RelatedFactory( ModerationHistoryFactory, "election", status__short_label=ModerationStatuses(status.capitalize()).value, )<|fim▁end|>
import datetime import factory
<|file_name|>exc.py<|end_file_name|><|fim▁begin|><|fim▁hole|> class PasteError(AmnesiaError): def __init__(self, container): super() self.container = container def __str__(self): return 'Paste into container {} failed'.format(self.container.id)<|fim▁end|>
# -*- coding: utf-8 -*- from amnesia.exc import AmnesiaError
<|file_name|>util.js<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google Inc. All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import { isBlank, isPresent, isPrimitive, isStrictStringMap } from './facade/lang'; import * as o from './output/output_ast'; export var MODULE_SUFFIX = ''; var CAMEL_CASE_REGEXP = /([A-Z])/g; export function camelCaseToDashCase(input) { return input.replace(CAMEL_CASE_REGEXP, function () { var m = []; for (var _i = 0; _i < arguments.length; _i++) { m[_i - 0] = arguments[_i]; } return '-' + m[1].toLowerCase(); }); } export function splitAtColon(input, defaultValues) { return _splitAt(input, ':', defaultValues); } export function splitAtPeriod(input, defaultValues) { return _splitAt(input, '.', defaultValues); } function _splitAt(input, character, defaultValues) { var characterIndex = input.indexOf(character); if (characterIndex == -1) return defaultValues; return [input.slice(0, characterIndex).trim(), input.slice(characterIndex + 1).trim()]; } export function sanitizeIdentifier(name) { return name.replace(/\W/g, '_'); } export function visitValue(value, visitor, context) { if (Array.isArray(value)) { return visitor.visitArray(value, context); } if (isStrictStringMap(value)) { return visitor.visitStringMap(value, context); } if (isBlank(value) || isPrimitive(value)) { return visitor.visitPrimitive(value, context); } return visitor.visitOther(value, context); } export var ValueTransformer = (function () { function ValueTransformer() { } ValueTransformer.prototype.visitArray = function (arr, context) { var _this = this; return arr.map(function (value) { return visitValue(value, _this, context); }); }; ValueTransformer.prototype.visitStringMap = function (map, context) { var _this = this; var result = {}; Object.keys(map).forEach(function (key) { result[key] = visitValue(map[key], _this, context); }); return result; }; ValueTransformer.prototype.visitPrimitive = function (value, context) { return value; }; ValueTransformer.prototype.visitOther = function (value, context) { return value; }; return ValueTransformer; }()); export function assetUrl(pkg, path, type) { if (path === void 0) { path = null; } if (type === void 0) { type = 'src'; } if (path == null) { return "asset:@angular/lib/" + pkg + "/index"; } else { return "asset:@angular/lib/" + pkg + "/src/" + path; } } export function createDiTokenExpression(token) { if (isPresent(token.value)) { return o.literal(token.value); }<|fim▁hole|> else if (token.identifierIsInstance) { return o.importExpr(token.identifier) .instantiate([], o.importType(token.identifier, [], [o.TypeModifier.Const])); } else { return o.importExpr(token.identifier); } } export var SyncAsyncResult = (function () { function SyncAsyncResult(syncResult, asyncResult) { if (asyncResult === void 0) { asyncResult = null; } this.syncResult = syncResult; this.asyncResult = asyncResult; if (!asyncResult) { this.asyncResult = Promise.resolve(syncResult); } } return SyncAsyncResult; }()); //# sourceMappingURL=util.js.map<|fim▁end|>
<|file_name|>test428.js<|end_file_name|><|fim▁begin|>if (typeof exports === 'object') { var assert = require('assert'); var alasql = require('..'); } /* Test for issue #379 */ var test = 428; describe('Test ' + test + ' UUID()', function () { before(function () { alasql('CREATE DATABASE test' + test + ';USE test' + test); }); after(function () { alasql('DROP DATABASE test' + test); }); it('1. Simple test GUID', function (done) { var res = alasql('=UUID()'); assert( !!res.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i) ); done();<|fim▁hole|> }); it('2. DEFAULT GUID', function (done) { alasql('CREATE TABLE one (a INT, b STRING DEFAULT UUID())'); alasql('INSERT INTO one(a) VALUES (1)'); var res = alasql('SELECT * FROM one'); assert( !!res[0].b.match(/^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i) ); done(); }); });<|fim▁end|>
<|file_name|>liveness-move-in-loop.rs<|end_file_name|><|fim▁begin|>fn main() { let y: Box<isize> = 42.into(); let mut x: Box<isize>; loop { println!("{}", y); loop { loop { loop { x = y; //~ ERROR use of moved value x.clone(); }<|fim▁hole|> } } } }<|fim▁end|>
<|file_name|>peer_storage.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0. use fail::fail_point; use std::cell::{Cell, RefCell}; use std::collections::VecDeque; use std::sync::atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering}; use std::sync::mpsc::{self, Receiver, TryRecvError}; use std::sync::Arc; use std::time::Instant; use std::{cmp, error, u64}; use engine_traits::CF_RAFT; use engine_traits::{Engines, KvEngine, Mutable, Peekable}; use keys::{self, enc_end_key, enc_start_key}; use kvproto::metapb::{self, Region}; use kvproto::raft_serverpb::{ MergeState, PeerState, RaftApplyState, RaftLocalState, RaftSnapshotData, RegionLocalState, }; use protobuf::Message; use raft::eraftpb::{ConfState, Entry, HardState, Snapshot}; use raft::{self, Error as RaftError, RaftState, Ready, Storage, StorageError}; use crate::store::fsm::GenSnapTask; use crate::store::util; use crate::store::ProposalContext; use crate::{Error, Result}; use engine_traits::{RaftEngine, RaftLogBatch}; use into_other::into_other; use tikv_util::worker::Scheduler; use tikv_util::{box_err, box_try, debug, defer, error, info, warn}; use super::metrics::*; use super::worker::RegionTask; use super::{SnapEntry, SnapKey, SnapManager, SnapshotStatistics}; // When we create a region peer, we should initialize its log term/index > 0, // so that we can force the follower peer to sync the snapshot first. pub const RAFT_INIT_LOG_TERM: u64 = 5; pub const RAFT_INIT_LOG_INDEX: u64 = 5; const MAX_SNAP_TRY_CNT: usize = 5; /// The initial region epoch version. pub const INIT_EPOCH_VER: u64 = 1; /// The initial region epoch conf_version. pub const INIT_EPOCH_CONF_VER: u64 = 1; // One extra slot for VecDeque internal usage. const MAX_CACHE_CAPACITY: usize = 1024 - 1; const SHRINK_CACHE_CAPACITY: usize = 64; pub const JOB_STATUS_PENDING: usize = 0; pub const JOB_STATUS_RUNNING: usize = 1; pub const JOB_STATUS_CANCELLING: usize = 2; pub const JOB_STATUS_CANCELLED: usize = 3; pub const JOB_STATUS_FINISHED: usize = 4; pub const JOB_STATUS_FAILED: usize = 5; /// Possible status returned by `check_applying_snap`. #[derive(Debug, Clone, Copy, PartialEq)] pub enum CheckApplyingSnapStatus { /// A snapshot is just applied. Success, /// A snapshot is being applied. Applying, /// No snapshot is being applied at all or the snapshot is canceled Idle, } #[derive(Debug)] pub enum SnapState { Relax, Generating { canceled: Arc<AtomicBool>, index: Arc<AtomicU64>, receiver: Receiver<Snapshot>, }, Applying(Arc<AtomicUsize>), ApplyAborted, } impl PartialEq for SnapState { fn eq(&self, other: &SnapState) -> bool { match (self, other) { (&SnapState::Relax, &SnapState::Relax) | (&SnapState::ApplyAborted, &SnapState::ApplyAborted) | (&SnapState::Generating { .. }, &SnapState::Generating { .. }) => true, (&SnapState::Applying(ref b1), &SnapState::Applying(ref b2)) => { b1.load(Ordering::Relaxed) == b2.load(Ordering::Relaxed) } _ => false, } } } #[inline] pub fn first_index(state: &RaftApplyState) -> u64 { state.get_truncated_state().get_index() + 1 } #[inline] pub fn last_index(state: &RaftLocalState) -> u64 { state.get_last_index() } pub const ENTRY_MEM_SIZE: usize = std::mem::size_of::<Entry>(); struct EntryCache { cache: VecDeque<Entry>, hit: Cell<i64>, miss: Cell<i64>, mem_size_change: i64, } impl EntryCache { fn first_index(&self) -> Option<u64> { self.cache.front().map(|e| e.get_index()) } fn fetch_entries_to( &self, begin: u64, end: u64, mut fetched_size: u64, max_size: u64, ents: &mut Vec<Entry>, ) { if begin >= end { return; } assert!(!self.cache.is_empty()); let cache_low = self.cache.front().unwrap().get_index(); let start_idx = begin.checked_sub(cache_low).unwrap() as usize; let limit_idx = end.checked_sub(cache_low).unwrap() as usize; let mut end_idx = start_idx; self.cache .iter() .skip(start_idx) .take_while(|e| { let cur_idx = end_idx as u64 + cache_low; assert_eq!(e.get_index(), cur_idx); let m = u64::from(e.compute_size()); fetched_size += m; if fetched_size == m { end_idx += 1; fetched_size <= max_size && end_idx < limit_idx } else if fetched_size <= max_size { end_idx += 1; end_idx < limit_idx } else { false } }) .count(); // Cache either is empty or contains latest log. Hence we don't need to fetch log // from rocksdb anymore. assert!(end_idx == limit_idx || fetched_size > max_size); let (first, second) = tikv_util::slices_in_range(&self.cache, start_idx, end_idx); ents.extend_from_slice(first); ents.extend_from_slice(second); } fn append(&mut self, tag: &str, entries: &[Entry]) { if entries.is_empty() { return; } if let Some(cache_last_index) = self.cache.back().map(|e| e.get_index()) { let first_index = entries[0].get_index(); if cache_last_index >= first_index { if self.cache.front().unwrap().get_index() >= first_index { self.update_mem_size_change_before_clear(); self.cache.clear(); } else { let left = self.cache.len() - (cache_last_index - first_index + 1) as usize; self.mem_size_change -= self .cache .iter() .skip(left) .map(|e| (e.data.capacity() + e.context.capacity()) as i64) .sum::<i64>(); self.cache.truncate(left); } if self.cache.len() + entries.len() < SHRINK_CACHE_CAPACITY && self.cache.capacity() > SHRINK_CACHE_CAPACITY { let old_capacity = self.cache.capacity(); self.cache.shrink_to_fit(); self.mem_size_change += self.get_cache_vec_mem_size_change( self.cache.capacity() as i64, old_capacity as i64, ) } } else if cache_last_index + 1 < first_index { panic!( "{} unexpected hole: {} < {}", tag, cache_last_index, first_index ); } } let mut start_idx = 0; if let Some(len) = (self.cache.len() + entries.len()).checked_sub(MAX_CACHE_CAPACITY) { if len < self.cache.len() { let mut drained_cache_entries_size = 0; self.cache.drain(..len).for_each(|e| { drained_cache_entries_size += (e.data.capacity() + e.context.capacity()) as i64 }); self.mem_size_change -= drained_cache_entries_size; } else { start_idx = len - self.cache.len(); self.update_mem_size_change_before_clear(); self.cache.clear(); } } let old_capacity = self.cache.capacity(); let mut entries_mem_size = 0; for e in &entries[start_idx..] { self.cache.push_back(e.to_owned()); entries_mem_size += (e.data.capacity() + e.context.capacity()) as i64; } self.mem_size_change += self .get_cache_vec_mem_size_change(self.cache.capacity() as i64, old_capacity as i64) + entries_mem_size; } pub fn compact_to(&mut self, idx: u64) { let cache_first_idx = self.first_index().unwrap_or(u64::MAX); if cache_first_idx > idx { return; } let mut drained_cache_entries_size = 0; let cache_last_idx = self.cache.back().unwrap().get_index(); // Use `cache_last_idx + 1` to make sure cache can be cleared completely // if necessary. self.cache .drain(..(cmp::min(cache_last_idx + 1, idx) - cache_first_idx) as usize) .for_each(|e| { drained_cache_entries_size += (e.data.capacity() + e.context.capacity()) as i64 }); self.mem_size_change -= drained_cache_entries_size; if self.cache.len() < SHRINK_CACHE_CAPACITY && self.cache.capacity() > SHRINK_CACHE_CAPACITY { let old_capacity = self.cache.capacity(); // So the peer storage doesn't have much writes since the proposal of compaction, // we can consider this peer is going to be inactive. self.cache.shrink_to_fit(); self.mem_size_change += self .get_cache_vec_mem_size_change(self.cache.capacity() as i64, old_capacity as i64) } } fn update_mem_size_change_before_clear(&mut self) { self.mem_size_change -= self .cache .iter() .map(|e| (e.data.capacity() + e.context.capacity()) as i64) .sum::<i64>(); } fn get_cache_vec_mem_size_change(&self, new_capacity: i64, old_capacity: i64) -> i64 { ENTRY_MEM_SIZE as i64 * (new_capacity - old_capacity) } fn get_total_mem_size(&self) -> i64 { let data_size: usize = self .cache .iter() .map(|e| e.data.capacity() + e.context.capacity()) .sum(); (ENTRY_MEM_SIZE * self.cache.capacity() + data_size) as i64 } fn flush_mem_size_change(&mut self) { RAFT_ENTRIES_CACHES_GAUGE.add(self.mem_size_change); self.mem_size_change = 0; } fn flush_stats(&self) { let hit = self.hit.replace(0); RAFT_ENTRY_FETCHES.hit.inc_by(hit); let miss = self.miss.replace(0); RAFT_ENTRY_FETCHES.miss.inc_by(miss); } #[inline] fn is_empty(&self) -> bool { self.cache.is_empty() } } impl Default for EntryCache { fn default() -> Self { let cache = VecDeque::default(); let size = ENTRY_MEM_SIZE * cache.capacity(); let mut entry_cache = EntryCache { cache, hit: Cell::new(0), miss: Cell::new(0), mem_size_change: size as i64, }; entry_cache.flush_mem_size_change(); entry_cache } } impl Drop for EntryCache { fn drop(&mut self) { self.flush_mem_size_change(); RAFT_ENTRIES_CACHES_GAUGE.sub(self.get_total_mem_size()); self.flush_stats(); } } pub trait HandleRaftReadyContext<WK, WR> where WK: Mutable, WR: RaftLogBatch, { /// Returns the mutable references of WriteBatch for both KvDB and RaftDB in one interface. fn wb_mut(&mut self) -> (&mut WK, &mut WR); fn kv_wb_mut(&mut self) -> &mut WK; fn raft_wb_mut(&mut self) -> &mut WR; fn sync_log(&self) -> bool; fn set_sync_log(&mut self, sync: bool); } fn storage_error<E>(error: E) -> raft::Error where E: Into<Box<dyn error::Error + Send + Sync>>, { raft::Error::Store(StorageError::Other(error.into())) } impl From<Error> for RaftError { fn from(err: Error) -> RaftError { storage_error(err) } } pub struct ApplySnapResult { // prev_region is the region before snapshot applied. pub prev_region: metapb::Region, pub region: metapb::Region, pub destroyed_regions: Vec<metapb::Region>, } /// Returned by `PeerStorage::handle_raft_ready`, used for recording changed status of /// `RaftLocalState` and `RaftApplyState`. pub struct InvokeContext { pub region_id: u64, /// Changed RaftLocalState is stored into `raft_state`. pub raft_state: RaftLocalState, /// Changed RaftApplyState is stored into `apply_state`. pub apply_state: RaftApplyState, last_term: u64, /// If the ready has new entries. pub has_new_entries: bool, /// The old region is stored here if there is a snapshot. pub snap_region: Option<Region>, /// The regions whose range are overlapped with this region pub destroyed_regions: Vec<metapb::Region>, } impl InvokeContext { pub fn new<EK: KvEngine, ER: RaftEngine>(store: &PeerStorage<EK, ER>) -> InvokeContext { InvokeContext { region_id: store.get_region_id(), raft_state: store.raft_state.clone(), apply_state: store.apply_state.clone(), last_term: store.last_term, has_new_entries: false, snap_region: None, destroyed_regions: vec![], } } #[inline] pub fn has_snapshot(&self) -> bool { self.snap_region.is_some() } #[inline] pub fn save_raft_state_to<W: RaftLogBatch>(&self, raft_wb: &mut W) -> Result<()> { raft_wb.put_raft_state(self.region_id, &self.raft_state)?; Ok(()) } #[inline] pub fn save_snapshot_raft_state_to( &self, snapshot_index: u64, kv_wb: &mut impl Mutable, ) -> Result<()> { let mut snapshot_raft_state = self.raft_state.clone(); snapshot_raft_state .mut_hard_state() .set_commit(snapshot_index); snapshot_raft_state.set_last_index(snapshot_index); kv_wb.put_msg_cf( CF_RAFT, &keys::snapshot_raft_state_key(self.region_id), &snapshot_raft_state, )?; Ok(()) } #[inline] pub fn save_apply_state_to(&self, kv_wb: &mut impl Mutable) -> Result<()> { kv_wb.put_msg_cf( CF_RAFT, &keys::apply_state_key(self.region_id), &self.apply_state, )?; Ok(()) } } pub fn recover_from_applying_state<EK: KvEngine, ER: RaftEngine>( engines: &Engines<EK, ER>, raft_wb: &mut ER::LogBatch, region_id: u64, ) -> Result<()> { let snapshot_raft_state_key = keys::snapshot_raft_state_key(region_id); let snapshot_raft_state: RaftLocalState = match box_try!(engines.kv.get_msg_cf(CF_RAFT, &snapshot_raft_state_key)) { Some(state) => state, None => { return Err(box_err!( "[region {}] failed to get raftstate from kv engine, \ when recover from applying state", region_id )); } }; let raft_state = box_try!(engines.raft.get_raft_state(region_id)).unwrap_or_default(); // if we recv append log when applying snapshot, last_index in raft_local_state will // larger than snapshot_index. since raft_local_state is written to raft engine, and // raft write_batch is written after kv write_batch, raft_local_state may wrong if // restart happen between the two write. so we copy raft_local_state to kv engine // (snapshot_raft_state), and set snapshot_raft_state.last_index = snapshot_index. // after restart, we need check last_index. if last_index(&snapshot_raft_state) > last_index(&raft_state) { raft_wb.put_raft_state(region_id, &snapshot_raft_state)?; } Ok(()) } fn init_applied_index_term<EK: KvEngine, ER: RaftEngine>( engines: &Engines<EK, ER>, region: &Region, apply_state: &RaftApplyState, ) -> Result<u64> { if apply_state.applied_index == RAFT_INIT_LOG_INDEX { return Ok(RAFT_INIT_LOG_TERM); } let truncated_state = apply_state.get_truncated_state(); if apply_state.applied_index == truncated_state.get_index() { return Ok(truncated_state.get_term()); } match engines .raft .get_entry(region.get_id(), apply_state.applied_index)? { Some(e) => Ok(e.term), None => Err(box_err!( "[region {}] entry at apply index {} doesn't exist, may lose data.", region.get_id(), apply_state.applied_index )), } } fn init_raft_state<EK: KvEngine, ER: RaftEngine>( engines: &Engines<EK, ER>, region: &Region, ) -> Result<RaftLocalState> { if let Some(state) = engines.raft.get_raft_state(region.get_id())? { return Ok(state); } let mut raft_state = RaftLocalState::default(); if util::is_region_initialized(region) { // new split region raft_state.last_index = RAFT_INIT_LOG_INDEX; raft_state.mut_hard_state().set_term(RAFT_INIT_LOG_TERM); raft_state.mut_hard_state().set_commit(RAFT_INIT_LOG_INDEX); engines.raft.put_raft_state(region.get_id(), &raft_state)?; } Ok(raft_state) } fn init_apply_state<EK: KvEngine, ER: RaftEngine>( engines: &Engines<EK, ER>, region: &Region, ) -> Result<RaftApplyState> { Ok( match engines .kv .get_msg_cf(CF_RAFT, &keys::apply_state_key(region.get_id()))? { Some(s) => s, None => { let mut apply_state = RaftApplyState::default(); if util::is_region_initialized(region) { apply_state.set_applied_index(RAFT_INIT_LOG_INDEX); let state = apply_state.mut_truncated_state(); state.set_index(RAFT_INIT_LOG_INDEX); state.set_term(RAFT_INIT_LOG_TERM); } apply_state } }, ) } fn init_last_term<EK: KvEngine, ER: RaftEngine>( engines: &Engines<EK, ER>, region: &Region, raft_state: &RaftLocalState, apply_state: &RaftApplyState, ) -> Result<u64> { let last_idx = raft_state.get_last_index(); if last_idx == 0 { return Ok(0); } else if last_idx == RAFT_INIT_LOG_INDEX { return Ok(RAFT_INIT_LOG_TERM); } else if last_idx == apply_state.get_truncated_state().get_index() { return Ok(apply_state.get_truncated_state().get_term()); } else { assert!(last_idx > RAFT_INIT_LOG_INDEX); } let entry = engines.raft.get_entry(region.get_id(), last_idx)?; match entry { None => Err(box_err!( "[region {}] entry at {} doesn't exist, may lose data.", region.get_id(), last_idx )), Some(e) => Ok(e.get_term()), } } fn validate_states<EK: KvEngine, ER: RaftEngine>( region_id: u64, engines: &Engines<EK, ER>, raft_state: &mut RaftLocalState, apply_state: &RaftApplyState, ) -> Result<()> { let last_index = raft_state.get_last_index(); let mut commit_index = raft_state.get_hard_state().get_commit(); let recorded_commit_index = apply_state.get_commit_index(); let state_str = || -> String { format!( "region {}, raft state {:?}, apply state {:?}", region_id, raft_state, apply_state ) }; // The commit index of raft state may be less than the recorded commit index. // If so, forward the commit index. if commit_index < recorded_commit_index { let entry = engines.raft.get_entry(region_id, recorded_commit_index)?; if entry.map_or(true, |e| e.get_term() != apply_state.get_commit_term()) { return Err(box_err!( "log at recorded commit index [{}] {} doesn't exist, may lose data, {}", apply_state.get_commit_term(), recorded_commit_index, state_str() )); } info!("updating commit index"; "region_id" => region_id, "old" => commit_index, "new" => recorded_commit_index); commit_index = recorded_commit_index; } // Invariant: applied index <= max(commit index, recorded commit index) if apply_state.get_applied_index() > commit_index { return Err(box_err!( "applied index > max(commit index, recorded commit index), {}", state_str() )); } // Invariant: max(commit index, recorded commit index) <= last index if commit_index > last_index { return Err(box_err!( "max(commit index, recorded commit index) > last index, {}", state_str() )); } // Since the entries must be persisted before applying, the term of raft state should also // be persisted. So it should be greater than the commit term of apply state. if raft_state.get_hard_state().get_term() < apply_state.get_commit_term() { return Err(box_err!( "term of raft state < commit term of apply state, {}", state_str() )); } raft_state.mut_hard_state().set_commit(commit_index); Ok(()) } pub struct PeerStorage<EK, ER> where EK: KvEngine, { pub engines: Engines<EK, ER>, peer_id: u64, region: metapb::Region, raft_state: RaftLocalState, apply_state: RaftApplyState, applied_index_term: u64, last_term: u64, snap_state: RefCell<SnapState>, gen_snap_task: RefCell<Option<GenSnapTask>>, region_sched: Scheduler<RegionTask<EK::Snapshot>>, snap_tried_cnt: RefCell<usize>, // Entry cache if `ER doesn't have an internal entry cache. cache: Option<EntryCache>, pub tag: String, } impl<EK, ER> Storage for PeerStorage<EK, ER> where EK: KvEngine, ER: RaftEngine, { fn initial_state(&self) -> raft::Result<RaftState> { self.initial_state() } fn entries( &self, low: u64, high: u64, max_size: impl Into<Option<u64>>, ) -> raft::Result<Vec<Entry>> { self.entries(low, high, max_size.into().unwrap_or(u64::MAX)) } fn term(&self, idx: u64) -> raft::Result<u64> { self.term(idx) } fn first_index(&self) -> raft::Result<u64> { Ok(self.first_index()) } fn last_index(&self) -> raft::Result<u64> { Ok(self.last_index()) } fn snapshot(&self, request_index: u64) -> raft::Result<Snapshot> { self.snapshot(request_index) } } impl<EK, ER> PeerStorage<EK, ER> where EK: KvEngine, ER: RaftEngine, { pub fn new( engines: Engines<EK, ER>, region: &metapb::Region, region_sched: Scheduler<RegionTask<EK::Snapshot>>, peer_id: u64, tag: String, ) -> Result<PeerStorage<EK, ER>> { debug!( "creating storage on specified path"; "region_id" => region.get_id(), "peer_id" => peer_id, "path" => ?engines.kv.path(), ); let mut raft_state = init_raft_state(&engines, region)?; let apply_state = init_apply_state(&engines, region)?; if let Err(e) = validate_states(region.get_id(), &engines, &mut raft_state, &apply_state) { return Err(box_err!("{} validate state fail: {:?}", tag, e)); } let last_term = init_last_term(&engines, region, &raft_state, &apply_state)?; let applied_index_term = init_applied_index_term(&engines, region, &apply_state)?; let cache = if engines.raft.has_builtin_entry_cache() { None } else { Some(EntryCache::default()) }; Ok(PeerStorage { engines, peer_id, region: region.clone(), raft_state, apply_state, snap_state: RefCell::new(SnapState::Relax), gen_snap_task: RefCell::new(None), region_sched, snap_tried_cnt: RefCell::new(0), tag, applied_index_term, last_term, cache, }) } pub fn is_initialized(&self) -> bool { util::is_region_initialized(self.region()) } pub fn initial_state(&self) -> raft::Result<RaftState> { let hard_state = self.raft_state.get_hard_state().clone(); if hard_state == HardState::default() { assert!( !self.is_initialized(), "peer for region {:?} is initialized but local state {:?} has empty hard \ state", self.region, self.raft_state ); return Ok(RaftState::new(hard_state, ConfState::default())); } Ok(RaftState::new( hard_state, util::conf_state_from_region(self.region()), )) } fn check_range(&self, low: u64, high: u64) -> raft::Result<()> { if low > high { return Err(storage_error(format!( "low: {} is greater that high: {}", low, high ))); } else if low <= self.truncated_index() { return Err(RaftError::Store(StorageError::Compacted)); } else if high > self.last_index() + 1 { return Err(storage_error(format!( "entries' high {} is out of bound lastindex {}", high, self.last_index() ))); } Ok(()) } pub fn entries(&self, low: u64, high: u64, max_size: u64) -> raft::Result<Vec<Entry>> { self.check_range(low, high)?; let mut ents = Vec::with_capacity((high - low) as usize); if low == high { return Ok(ents); } let region_id = self.get_region_id(); if let Some(ref cache) = self.cache { let cache_low = cache.first_index().unwrap_or(u64::MAX); if high <= cache_low { cache.miss.update(|m| m + 1); self.engines.raft.fetch_entries_to( region_id, low, high, Some(max_size as usize), &mut ents, )?; return Ok(ents); } let begin_idx = if low < cache_low { cache.miss.update(|m| m + 1); let fetched_count = self.engines.raft.fetch_entries_to( region_id, low, cache_low, Some(max_size as usize), &mut ents, )?; if fetched_count < (cache_low - low) as usize { // Less entries are fetched than expected. return Ok(ents); } cache_low } else { low }; cache.hit.update(|h| h + 1); let fetched_size = ents.iter().fold(0, |acc, e| acc + e.compute_size()); cache.fetch_entries_to(begin_idx, high, fetched_size as u64, max_size, &mut ents); } else { self.engines.raft.fetch_entries_to( region_id, low, high, Some(max_size as usize), &mut ents, )?; } Ok(ents) } pub fn term(&self, idx: u64) -> raft::Result<u64> { if idx == self.truncated_index() { return Ok(self.truncated_term()); } self.check_range(idx, idx + 1)?; if self.truncated_term() == self.last_term || idx == self.last_index() { return Ok(self.last_term); } let entries = self.entries(idx, idx + 1, raft::NO_LIMIT)?; Ok(entries[0].get_term()) } #[inline] pub fn first_index(&self) -> u64 { first_index(&self.apply_state) } #[inline] pub fn last_index(&self) -> u64 { last_index(&self.raft_state) } #[inline] pub fn last_term(&self) -> u64 { self.last_term } #[inline] pub fn applied_index(&self) -> u64 { self.apply_state.get_applied_index() } #[inline] pub fn set_applied_state(&mut self, apply_state: RaftApplyState) { self.apply_state = apply_state; } #[inline] pub fn set_applied_term(&mut self, applied_index_term: u64) { self.applied_index_term = applied_index_term; } #[inline] pub fn apply_state(&self) -> &RaftApplyState { &self.apply_state } #[inline] pub fn applied_index_term(&self) -> u64 { self.applied_index_term } #[inline] pub fn commit_index(&self) -> u64 { self.raft_state.get_hard_state().get_commit() } #[inline] pub fn set_commit_index(&mut self, commit: u64) { assert!(commit >= self.commit_index()); self.raft_state.mut_hard_state().set_commit(commit); } #[inline] pub fn hard_state(&self) -> &HardState { self.raft_state.get_hard_state() } #[inline] pub fn truncated_index(&self) -> u64 { self.apply_state.get_truncated_state().get_index() } #[inline] pub fn truncated_term(&self) -> u64 { self.apply_state.get_truncated_state().get_term() } pub fn region(&self) -> &metapb::Region { &self.region } pub fn set_region(&mut self, region: metapb::Region) { self.region = region; } pub fn raw_snapshot(&self) -> EK::Snapshot { self.engines.kv.snapshot() } fn validate_snap(&self, snap: &Snapshot, request_index: u64) -> bool { let idx = snap.get_metadata().get_index(); if idx < self.truncated_index() || idx < request_index { // stale snapshot, should generate again. info!( "snapshot is stale, generate again"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "snap_index" => idx, "truncated_index" => self.truncated_index(), "request_index" => request_index, ); STORE_SNAPSHOT_VALIDATION_FAILURE_COUNTER.stale.inc(); return false; } let mut snap_data = RaftSnapshotData::default(); if let Err(e) = snap_data.merge_from_bytes(snap.get_data()) { error!( "failed to decode snapshot, it may be corrupted"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "err" => ?e, ); STORE_SNAPSHOT_VALIDATION_FAILURE_COUNTER.decode.inc(); return false; } let snap_epoch = snap_data.get_region().get_region_epoch(); let latest_epoch = self.region().get_region_epoch(); if snap_epoch.get_conf_ver() < latest_epoch.get_conf_ver() { info!( "snapshot epoch is stale"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "snap_epoch" => ?snap_epoch, "latest_epoch" => ?latest_epoch, ); STORE_SNAPSHOT_VALIDATION_FAILURE_COUNTER.epoch.inc(); return false; } true } /// Gets a snapshot. Returns `SnapshotTemporarilyUnavailable` if there is no unavailable /// snapshot. pub fn snapshot(&self, request_index: u64) -> raft::Result<Snapshot> { let mut snap_state = self.snap_state.borrow_mut(); let mut tried_cnt = self.snap_tried_cnt.borrow_mut(); let (mut tried, mut last_canceled, mut snap) = (false, false, None); if let SnapState::Generating { ref canceled, ref receiver, .. } = *snap_state { tried = true; last_canceled = canceled.load(Ordering::SeqCst); match receiver.try_recv() { Err(TryRecvError::Empty) => { let e = raft::StorageError::SnapshotTemporarilyUnavailable; return Err(raft::Error::Store(e)); } Ok(s) if !last_canceled => snap = Some(s), Err(TryRecvError::Disconnected) | Ok(_) => {} } } if tried { *snap_state = SnapState::Relax; match snap { Some(s) => { *tried_cnt = 0; if self.validate_snap(&s, request_index) { return Ok(s); } } None => { warn!( "failed to try generating snapshot"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "times" => *tried_cnt, ); } } } if SnapState::Relax != *snap_state { panic!("{} unexpected state: {:?}", self.tag, *snap_state); } if *tried_cnt >= MAX_SNAP_TRY_CNT { let cnt = *tried_cnt; *tried_cnt = 0; return Err(raft::Error::Store(box_err!( "failed to get snapshot after {} times", cnt ))); } info!( "requesting snapshot"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "request_index" => request_index, ); if !tried || !last_canceled { *tried_cnt += 1; } let (sender, receiver) = mpsc::sync_channel(1); let canceled = Arc::new(AtomicBool::new(false)); let index = Arc::new(AtomicU64::new(0)); *snap_state = SnapState::Generating { canceled: canceled.clone(), index: index.clone(), receiver, }; let task = GenSnapTask::new(self.region.get_id(), index, canceled, sender); let mut gen_snap_task = self.gen_snap_task.borrow_mut(); assert!(gen_snap_task.is_none()); *gen_snap_task = Some(task); Err(raft::Error::Store( raft::StorageError::SnapshotTemporarilyUnavailable, )) } pub fn has_gen_snap_task(&self) -> bool { self.gen_snap_task.borrow().is_some() } pub fn mut_gen_snap_task(&mut self) -> &mut Option<GenSnapTask> { self.gen_snap_task.get_mut() } pub fn take_gen_snap_task(&mut self) -> Option<GenSnapTask> { self.gen_snap_task.get_mut().take() } // Append the given entries to the raft log using previous last index or self.last_index. // Return the new last index for later update. After we commit in engine, we can set last_index // to the return one. // WARNING: If this function returns error, the caller must panic otherwise the entry cache may // be wrong and break correctness. pub fn append<H: HandleRaftReadyContext<EK::WriteBatch, ER::LogBatch>>( &mut self, invoke_ctx: &mut InvokeContext, entries: Vec<Entry>, ready_ctx: &mut H, ) -> Result<u64> { let region_id = self.get_region_id(); debug!( "append entries"; "region_id" => region_id, "peer_id" => self.peer_id, "count" => entries.len(), ); let prev_last_index = invoke_ctx.raft_state.get_last_index(); if entries.is_empty() { return Ok(prev_last_index); } invoke_ctx.has_new_entries = true; let (last_index, last_term) = { let e = entries.last().unwrap(); (e.get_index(), e.get_term()) }; // WARNING: This code is correct based on the assumption that // if this function returns error, the TiKV will panic soon, // otherwise, the entry cache may be wrong and break correctness. if let Some(ref mut cache) = self.cache { cache.append(&self.tag, &entries); } ready_ctx.raft_wb_mut().append(region_id, entries)?; // Delete any previously appended log entries which never committed. // TODO: Wrap it as an engine::Error. ready_ctx .raft_wb_mut() .cut_logs(region_id, last_index + 1, prev_last_index); invoke_ctx.raft_state.set_last_index(last_index); invoke_ctx.last_term = last_term; Ok(last_index) } pub fn compact_to(&mut self, idx: u64) { if let Some(ref mut cache) = self.cache { cache.compact_to(idx); } else { let rid = self.get_region_id(); self.engines.raft.gc_entry_cache(rid, idx); } self.cancel_generating_snap(Some(idx)); } #[inline] pub fn is_cache_empty(&self) -> bool { self.cache.as_ref().map_or(true, |c| c.is_empty()) } pub fn maybe_gc_cache(&mut self, replicated_idx: u64, apply_idx: u64) { if self.engines.raft.has_builtin_entry_cache() { let rid = self.get_region_id(); self.engines.raft.gc_entry_cache(rid, apply_idx + 1); return; } let cache = self.cache.as_mut().unwrap(); if replicated_idx == apply_idx { // The region is inactive, clear the cache immediately. cache.compact_to(apply_idx + 1); return; } let cache_first_idx = match cache.first_index() { None => return, Some(idx) => idx, }; if cache_first_idx > replicated_idx + 1 { // Catching up log requires accessing fs already, let's optimize for // the common case. // Maybe gc to second least replicated_idx is better. cache.compact_to(apply_idx + 1); } } #[inline] pub fn flush_cache_metrics(&mut self) { if let Some(ref mut cache) = self.cache { cache.flush_mem_size_change(); cache.flush_stats(); return; } if let Some(stats) = self.engines.raft.flush_stats() { RAFT_ENTRIES_CACHES_GAUGE.set(stats.cache_size as i64); RAFT_ENTRY_FETCHES.hit.inc_by(stats.hit as i64); RAFT_ENTRY_FETCHES.miss.inc_by(stats.miss as i64); } } // Apply the peer with given snapshot. pub fn apply_snapshot( &mut self, ctx: &mut InvokeContext, snap: &Snapshot, kv_wb: &mut EK::WriteBatch, raft_wb: &mut ER::LogBatch, destroy_regions: &[metapb::Region], ) -> Result<()> { info!( "begin to apply snapshot"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, ); let mut snap_data = RaftSnapshotData::default(); snap_data.merge_from_bytes(snap.get_data())?; let region_id = self.get_region_id(); let region = snap_data.take_region(); if region.get_id() != region_id { return Err(box_err!( "mismatch region id {} != {}", region_id, region.get_id() )); } if self.is_initialized() { // we can only delete the old data when the peer is initialized. self.clear_meta(kv_wb, raft_wb)?; } // Write its source peers' `RegionLocalState` together with itself for atomicity for r in destroy_regions { write_peer_state(kv_wb, r, PeerState::Tombstone, None)?; } write_peer_state(kv_wb, &region, PeerState::Applying, None)?; let last_index = snap.get_metadata().get_index(); ctx.raft_state.set_last_index(last_index); ctx.last_term = snap.get_metadata().get_term(); ctx.apply_state.set_applied_index(last_index); // The snapshot only contains log which index > applied index, so // here the truncate state's (index, term) is in snapshot metadata. ctx.apply_state.mut_truncated_state().set_index(last_index); ctx.apply_state .mut_truncated_state() .set_term(snap.get_metadata().get_term()); info!( "apply snapshot with state ok"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "region" => ?region, "state" => ?ctx.apply_state, ); ctx.snap_region = Some(region); Ok(()) } /// Delete all meta belong to the region. Results are stored in `wb`. pub fn clear_meta( &mut self, kv_wb: &mut EK::WriteBatch, raft_wb: &mut ER::LogBatch, ) -> Result<()> { let region_id = self.get_region_id(); clear_meta(&self.engines, kv_wb, raft_wb, region_id, &self.raft_state)?; if !self.engines.raft.has_builtin_entry_cache() { self.cache = Some(EntryCache::default()); } Ok(()) } /// Delete all data belong to the region. /// If return Err, data may get partial deleted. pub fn clear_data(&self) -> Result<()> { let (start_key, end_key) = (enc_start_key(self.region()), enc_end_key(self.region())); let region_id = self.get_region_id(); box_try!( self.region_sched .schedule(RegionTask::destroy(region_id, start_key, end_key)) ); Ok(()) } /// Delete all data that is not covered by `new_region`. fn clear_extra_data( &self, old_region: &metapb::Region, new_region: &metapb::Region, ) -> Result<()> { let (old_start_key, old_end_key) = (enc_start_key(old_region), enc_end_key(old_region)); let (new_start_key, new_end_key) = (enc_start_key(new_region), enc_end_key(new_region)); if old_start_key < new_start_key { box_try!(self.region_sched.schedule(RegionTask::destroy( old_region.get_id(), old_start_key, new_start_key ))); } if new_end_key < old_end_key { box_try!(self.region_sched.schedule(RegionTask::destroy( old_region.get_id(), new_end_key, old_end_key ))); } Ok(()) } /// Delete all extra split data from the `start_key` to `end_key`. pub fn clear_extra_split_data(&self, start_key: Vec<u8>, end_key: Vec<u8>) -> Result<()> { box_try!(self.region_sched.schedule(RegionTask::destroy( self.get_region_id(), start_key, end_key ))); Ok(()) } pub fn get_raft_engine(&self) -> ER { self.engines.raft.clone() } /// Check whether the storage has finished applying snapshot. #[inline] pub fn is_applying_snapshot(&self) -> bool { matches!(*self.snap_state.borrow(), SnapState::Applying(_)) } #[inline] pub fn is_generating_snapshot(&self) -> bool { fail_point!("is_generating_snapshot", |_| { true }); matches!(*self.snap_state.borrow(), SnapState::Generating { .. }) } /// Check if the storage is applying a snapshot. #[inline] pub fn check_applying_snap(&mut self) -> CheckApplyingSnapStatus { let mut res = CheckApplyingSnapStatus::Idle; let new_state = match *self.snap_state.borrow() { SnapState::Applying(ref status) => { let s = status.load(Ordering::Relaxed); if s == JOB_STATUS_FINISHED { res = CheckApplyingSnapStatus::Success; SnapState::Relax } else if s == JOB_STATUS_CANCELLED { SnapState::ApplyAborted } else if s == JOB_STATUS_FAILED { // TODO: cleanup region and treat it as tombstone. panic!("{} applying snapshot failed", self.tag,); } else { return CheckApplyingSnapStatus::Applying; } } _ => return res, }; *self.snap_state.borrow_mut() = new_state; res } /// Cancel applying snapshot, return true if the job can be considered not be run again. pub fn cancel_applying_snap(&mut self) -> bool { let is_canceled = match *self.snap_state.borrow() { SnapState::Applying(ref status) => { if status .compare_exchange( JOB_STATUS_PENDING, JOB_STATUS_CANCELLING, Ordering::SeqCst, Ordering::SeqCst, ) .is_ok() { true } else if status .compare_exchange( JOB_STATUS_RUNNING, JOB_STATUS_CANCELLING, Ordering::SeqCst, Ordering::SeqCst, ) .is_ok() { return false; } else { false } } _ => return false, }; if is_canceled { *self.snap_state.borrow_mut() = SnapState::ApplyAborted; return true; } // now status can only be JOB_STATUS_CANCELLING, JOB_STATUS_CANCELLED, // JOB_STATUS_FAILED and JOB_STATUS_FINISHED. self.check_applying_snap() != CheckApplyingSnapStatus::Applying } /// Cancel generating snapshot. pub fn cancel_generating_snap(&mut self, compact_to: Option<u64>) { let snap_state = self.snap_state.borrow(); if let SnapState::Generating { ref canceled, ref index, .. } = *snap_state { if !canceled.load(Ordering::SeqCst) { if let Some(idx) = compact_to { let snap_index = index.load(Ordering::SeqCst); if snap_index == 0 || idx <= snap_index + 1 { return; } } canceled.store(true, Ordering::SeqCst); } } } #[inline] pub fn set_snap_state(&mut self, state: SnapState) { *self.snap_state.borrow_mut() = state } #[inline] pub fn is_snap_state(&self, state: SnapState) -> bool { *self.snap_state.borrow() == state } pub fn get_region_id(&self) -> u64 { self.region().get_id() } pub fn schedule_applying_snapshot(&mut self) { let status = Arc::new(AtomicUsize::new(JOB_STATUS_PENDING)); self.set_snap_state(SnapState::Applying(Arc::clone(&status))); let task = RegionTask::Apply { region_id: self.get_region_id(), status, }; // Don't schedule the snapshot to region worker. fail_point!("skip_schedule_applying_snapshot", |_| {}); // TODO: gracefully remove region instead. if let Err(e) = self.region_sched.schedule(task) { info!( "failed to to schedule apply job, are we shutting down?"; "region_id" => self.region.get_id(), "peer_id" => self.peer_id, "err" => ?e, ); } } /// Save memory states to disk. /// /// This function only write data to `ready_ctx`'s `WriteBatch`. It's caller's duty to write /// it explicitly to disk. If it's flushed to disk successfully, `post_ready` should be called /// to update the memory states properly. /// WARNING: If this function returns error, the caller must panic(details in `append` function). pub fn handle_raft_ready<H: HandleRaftReadyContext<EK::WriteBatch, ER::LogBatch>>( &mut self, ready_ctx: &mut H, ready: &mut Ready, destroy_regions: Vec<metapb::Region>, ) -> Result<InvokeContext> { let mut ctx = InvokeContext::new(self); let snapshot_index = if ready.snapshot().is_empty() { 0 } else { fail_point!("raft_before_apply_snap"); let (kv_wb, raft_wb) = ready_ctx.wb_mut(); self.apply_snapshot(&mut ctx, ready.snapshot(), kv_wb, raft_wb, &destroy_regions)?; fail_point!("raft_after_apply_snap"); ctx.destroyed_regions = destroy_regions; last_index(&ctx.raft_state) }; if !ready.entries().is_empty() { self.append(&mut ctx, ready.take_entries(), ready_ctx)?; } // Last index is 0 means the peer is created from raft message // and has not applied snapshot yet, so skip persistent hard state. if ctx.raft_state.get_last_index() > 0 { if let Some(hs) = ready.hs() { ctx.raft_state.set_hard_state(hs.clone()); } } // Save raft state if it has changed or there is a snapshot. if ctx.raft_state != self.raft_state || snapshot_index > 0 { ctx.save_raft_state_to(ready_ctx.raft_wb_mut())?; if snapshot_index > 0 { // in case of restart happen when we just write region state to Applying, // but not write raft_local_state to raft rocksdb in time. // we write raft state to default rocksdb, with last index set to snap index, // in case of recv raft log after snapshot. ctx.save_snapshot_raft_state_to(snapshot_index, ready_ctx.kv_wb_mut())?; } } // only when apply snapshot if snapshot_index > 0 { ctx.save_apply_state_to(ready_ctx.kv_wb_mut())?; } Ok(ctx) } /// Update the memory state after ready changes are flushed to disk successfully. pub fn post_ready(&mut self, ctx: InvokeContext) -> Option<ApplySnapResult> { self.raft_state = ctx.raft_state; self.apply_state = ctx.apply_state; self.last_term = ctx.last_term; // If we apply snapshot ok, we should update some infos like applied index too. let snap_region = match ctx.snap_region { Some(r) => r, None => return None, }; // cleanup data before scheduling apply task if self.is_initialized() { if let Err(e) = self.clear_extra_data(self.region(), &snap_region) { // No need panic here, when applying snapshot, the deletion will be tried // again. But if the region range changes, like [a, c) -> [a, b) and [b, c), // [b, c) will be kept in rocksdb until a covered snapshot is applied or // store is restarted. error!(?e; "failed to cleanup data, may leave some dirty data"; "region_id" => self.get_region_id(), "peer_id" => self.peer_id, ); } } // Note that the correctness depends on the fact that these source regions MUST NOT // serve read request otherwise a corrupt data may be returned. // For now, it is ensured by // 1. After `PrepareMerge` log is committed, the source region leader's lease will be // suspected immediately which makes the local reader not serve read request. // 2. No read request can be responsed in peer fsm during merging. // These conditions are used to prevent reading **stale** data in the past. // At present, they are also used to prevent reading **corrupt** data. for r in &ctx.destroyed_regions { if let Err(e) = self.clear_extra_data(r, &snap_region) { error!(?e; "failed to cleanup data, may leave some dirty data"; "region_id" => r.get_id(), ); } } self.schedule_applying_snapshot(); let prev_region = self.region().clone(); self.set_region(snap_region); Some(ApplySnapResult { prev_region, region: self.region().clone(), destroyed_regions: ctx.destroyed_regions, }) } } #[allow(dead_code)] fn get_sync_log_from_entry(entry: &Entry) -> bool { if entry.get_sync_log() { return true; } let ctx = entry.get_context(); if !ctx.is_empty() { let ctx = ProposalContext::from_bytes(ctx); if ctx.contains(ProposalContext::SYNC_LOG) { return true; } } false } /// Delete all meta belong to the region. Results are stored in `wb`. pub fn clear_meta<EK, ER>( engines: &Engines<EK, ER>, kv_wb: &mut EK::WriteBatch, raft_wb: &mut ER::LogBatch, region_id: u64, raft_state: &RaftLocalState, ) -> Result<()> where EK: KvEngine, ER: RaftEngine, { let t = Instant::now(); box_try!(kv_wb.delete_cf(CF_RAFT, &keys::region_state_key(region_id))); box_try!(kv_wb.delete_cf(CF_RAFT, &keys::apply_state_key(region_id))); box_try!(engines.raft.clean(region_id, raft_state, raft_wb)); info!( "finish clear peer meta"; "region_id" => region_id, "meta_key" => 1, "apply_key" => 1, "raft_key" => 1, "takes" => ?t.elapsed(), ); Ok(()) } pub fn do_snapshot<E>( mgr: SnapManager, engine: &E, kv_snap: E::Snapshot, region_id: u64, last_applied_index_term: u64, last_applied_state: RaftApplyState, for_balance: bool, ) -> raft::Result<Snapshot> where E: KvEngine, { debug!( "begin to generate a snapshot"; "region_id" => region_id, ); let msg = kv_snap .get_msg_cf(CF_RAFT, &keys::apply_state_key(region_id)) .map_err(into_other::<_, raft::Error>)?; let apply_state: RaftApplyState = match msg { None => { return Err(storage_error(format!( "could not load raft state of region {}", region_id ))); } Some(state) => state, }; assert_eq!(apply_state, last_applied_state); let key = SnapKey::new( region_id, last_applied_index_term, apply_state.get_applied_index(), ); mgr.register(key.clone(), SnapEntry::Generating); defer!(mgr.deregister(&key, &SnapEntry::Generating)); let state: RegionLocalState = kv_snap .get_msg_cf(CF_RAFT, &keys::region_state_key(key.region_id)) .and_then(|res| match res { None => Err(box_err!("region {} could not find region info", region_id)), Some(state) => Ok(state), }) .map_err(into_other::<_, raft::Error>)?; if state.get_state() != PeerState::Normal { return Err(storage_error(format!( "snap job for {} seems stale, skip.", region_id ))); } let mut snapshot = Snapshot::default(); // Set snapshot metadata. snapshot.mut_metadata().set_index(key.idx); snapshot.mut_metadata().set_term(key.term); let conf_state = util::conf_state_from_region(state.get_region()); snapshot.mut_metadata().set_conf_state(conf_state); let mut s = mgr.get_snapshot_for_building(&key)?; // Set snapshot data. let mut snap_data = RaftSnapshotData::default(); snap_data.set_region(state.get_region().clone()); let mut stat = SnapshotStatistics::new(); s.build( engine, &kv_snap, state.get_region(), &mut snap_data, &mut stat, )?; snap_data.mut_meta().set_for_balance(for_balance); let v = snap_data.write_to_bytes()?; snapshot.set_data(v); SNAPSHOT_KV_COUNT_HISTOGRAM.observe(stat.kv_count as f64); SNAPSHOT_SIZE_HISTOGRAM.observe(stat.size as f64); Ok(snapshot) } // When we bootstrap the region we must call this to initialize region local state first. pub fn write_initial_raft_state<W: RaftLogBatch>(raft_wb: &mut W, region_id: u64) -> Result<()> { let mut raft_state = RaftLocalState { last_index: RAFT_INIT_LOG_INDEX, ..Default::default() }; raft_state.mut_hard_state().set_term(RAFT_INIT_LOG_TERM); raft_state.mut_hard_state().set_commit(RAFT_INIT_LOG_INDEX); raft_wb.put_raft_state(region_id, &raft_state)?; Ok(()) } // When we bootstrap the region or handling split new region, we must // call this to initialize region apply state first. pub fn write_initial_apply_state<T: Mutable>(kv_wb: &mut T, region_id: u64) -> Result<()> { let mut apply_state = RaftApplyState::default(); apply_state.set_applied_index(RAFT_INIT_LOG_INDEX); apply_state .mut_truncated_state() .set_index(RAFT_INIT_LOG_INDEX); apply_state .mut_truncated_state() .set_term(RAFT_INIT_LOG_TERM); kv_wb.put_msg_cf(CF_RAFT, &keys::apply_state_key(region_id), &apply_state)?; Ok(()) } pub fn write_peer_state<T: Mutable>( kv_wb: &mut T, region: &metapb::Region, state: PeerState, merge_state: Option<MergeState>, ) -> Result<()> { let region_id = region.get_id(); let mut region_state = RegionLocalState::default(); region_state.set_state(state); region_state.set_region(region.clone()); if let Some(state) = merge_state { region_state.set_merge_state(state); } debug!( "writing merge state"; "region_id" => region_id, "state" => ?region_state, ); kv_wb.put_msg_cf(CF_RAFT, &keys::region_state_key(region_id), &region_state)?; Ok(()) } #[cfg(test)] mod tests { use crate::coprocessor::CoprocessorHost; use crate::store::fsm::apply::compact_raft_log; use crate::store::worker::RegionRunner; use crate::store::worker::RegionTask; use crate::store::{bootstrap_store, initial_region, prepare_bootstrap_cluster}; use engine_test::kv::{KvTestEngine, KvTestSnapshot, KvTestWriteBatch}; use engine_test::raft::{RaftTestEngine, RaftTestWriteBatch}; use engine_traits::Engines; use engine_traits::{Iterable, SyncMutable, WriteBatch, WriteBatchExt}; use engine_traits::{ALL_CFS, CF_DEFAULT}; use kvproto::raft_serverpb::RaftSnapshotData; use raft::eraftpb::HardState; use raft::eraftpb::{ConfState, Entry}; use raft::{Error as RaftError, StorageError}; use std::cell::RefCell; use std::path::Path; use std::sync::atomic::*; use std::sync::mpsc::*; use std::sync::*; use std::time::Duration; use tempfile::{Builder, TempDir}; use tikv_util::worker::{LazyWorker, Scheduler, Worker}; use super::*; fn new_storage( sched: Scheduler<RegionTask<KvTestSnapshot>>, path: &TempDir, ) -> PeerStorage<KvTestEngine, RaftTestEngine> { let kv_db = engine_test::kv::new_engine(path.path().to_str().unwrap(), None, ALL_CFS, None) .unwrap(); let raft_path = path.path().join(Path::new("raft")); let raft_db = engine_test::raft::new_engine(raft_path.to_str().unwrap(), None, CF_DEFAULT, None) .unwrap(); let engines = Engines::new(kv_db, raft_db); bootstrap_store(&engines, 1, 1).unwrap(); let region = initial_region(1, 1, 1); prepare_bootstrap_cluster(&engines, &region).unwrap(); PeerStorage::new(engines, &region, sched, 0, "".to_owned()).unwrap() } struct ReadyContext { kv_wb: KvTestWriteBatch, raft_wb: RaftTestWriteBatch, sync_log: bool, } impl ReadyContext { fn new(s: &PeerStorage<KvTestEngine, RaftTestEngine>) -> ReadyContext { ReadyContext { kv_wb: s.engines.kv.write_batch(), raft_wb: s.engines.raft.write_batch(), sync_log: false, } } } impl HandleRaftReadyContext<KvTestWriteBatch, RaftTestWriteBatch> for ReadyContext { fn wb_mut(&mut self) -> (&mut KvTestWriteBatch, &mut RaftTestWriteBatch) { (&mut self.kv_wb, &mut self.raft_wb) } fn kv_wb_mut(&mut self) -> &mut KvTestWriteBatch { &mut self.kv_wb } fn raft_wb_mut(&mut self) -> &mut RaftTestWriteBatch { &mut self.raft_wb } fn sync_log(&self) -> bool { self.sync_log } fn set_sync_log(&mut self, sync: bool) { self.sync_log = sync; } } fn new_storage_from_ents( sched: Scheduler<RegionTask<KvTestSnapshot>>, path: &TempDir, ents: &[Entry], ) -> PeerStorage<KvTestEngine, RaftTestEngine> { let mut store = new_storage(sched, path); let mut kv_wb = store.engines.kv.write_batch(); let mut ctx = InvokeContext::new(&store); let mut ready_ctx = ReadyContext::new(&store); store .append(&mut ctx, ents[1..].to_vec(), &mut ready_ctx) .unwrap(); ctx.apply_state .mut_truncated_state() .set_index(ents[0].get_index()); ctx.apply_state .mut_truncated_state() .set_term(ents[0].get_term()); ctx.apply_state .set_applied_index(ents.last().unwrap().get_index()); ctx.save_apply_state_to(&mut kv_wb).unwrap(); ready_ctx.raft_wb.write().unwrap(); kv_wb.write().unwrap(); store.raft_state = ctx.raft_state; store.apply_state = ctx.apply_state; store } fn append_ents(store: &mut PeerStorage<KvTestEngine, RaftTestEngine>, ents: &[Entry]) { let mut ctx = InvokeContext::new(store); let mut ready_ctx = ReadyContext::new(store); store .append(&mut ctx, ents.to_vec(), &mut ready_ctx) .unwrap(); ctx.save_raft_state_to(&mut ready_ctx.raft_wb).unwrap(); ready_ctx.raft_wb.write().unwrap(); store.raft_state = ctx.raft_state; } fn validate_cache(store: &PeerStorage<KvTestEngine, RaftTestEngine>, exp_ents: &[Entry]) { assert_eq!(store.cache.as_ref().unwrap().cache, exp_ents); for e in exp_ents { let key = keys::raft_log_key(store.get_region_id(), e.get_index()); let bytes = store.engines.raft.get_value(&key).unwrap().unwrap(); let mut entry = Entry::default(); entry.merge_from_bytes(&bytes).unwrap(); assert_eq!(entry, *e); } } fn new_entry(index: u64, term: u64) -> Entry { let mut e = Entry::default(); e.set_index(index); e.set_term(term); e } fn size_of<T: protobuf::Message>(m: &T) -> u32 { m.compute_size() } #[test] fn test_storage_term() { let ents = vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)]; let mut tests = vec![ (2, Err(RaftError::Store(StorageError::Compacted))), (3, Ok(3)), (4, Ok(4)), (5, Ok(5)), ]; for (i, (idx, wterm)) in tests.drain(..).enumerate() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = Worker::new("snap-manager").lazy_build("snap-manager"); let sched = worker.scheduler(); let store = new_storage_from_ents(sched, &td, &ents); let t = store.term(idx); if wterm != t { panic!("#{}: expect res {:?}, got {:?}", i, wterm, t); } } } fn get_meta_key_count(store: &PeerStorage<KvTestEngine, RaftTestEngine>) -> usize { let region_id = store.get_region_id(); let mut count = 0; let (meta_start, meta_end) = ( keys::region_meta_prefix(region_id), keys::region_meta_prefix(region_id + 1), ); store .engines .kv .scan_cf(CF_RAFT, &meta_start, &meta_end, false, |_, _| { count += 1; Ok(true) }) .unwrap(); let (raft_start, raft_end) = ( keys::region_raft_prefix(region_id), keys::region_raft_prefix(region_id + 1), ); store .engines .kv .scan_cf(CF_RAFT, &raft_start, &raft_end, false, |_, _| { count += 1; Ok(true) }) .unwrap(); store .engines .raft .scan(&raft_start, &raft_end, false, |_, _| { count += 1; Ok(true) }) .unwrap(); count } #[test] fn test_storage_clear_meta() { let td = Builder::new().prefix("tikv-store").tempdir().unwrap(); let worker = Worker::new("snap-manager").lazy_build("snap-manager"); let sched = worker.scheduler(); let mut store = new_storage_from_ents(sched, &td, &[new_entry(3, 3), new_entry(4, 4)]); append_ents(&mut store, &[new_entry(5, 5), new_entry(6, 6)]); assert_eq!(6, get_meta_key_count(&store)); let mut kv_wb = store.engines.kv.write_batch(); let mut raft_wb = store.engines.raft.write_batch(); store.clear_meta(&mut kv_wb, &mut raft_wb).unwrap(); kv_wb.write().unwrap(); raft_wb.write().unwrap(); assert_eq!(0, get_meta_key_count(&store)); } #[test] fn test_storage_entries() { let ents = vec![ new_entry(3, 3), new_entry(4, 4), new_entry(5, 5), new_entry(6, 6), ]; let max_u64 = u64::max_value(); let mut tests = vec![ ( 2, 6, max_u64, Err(RaftError::Store(StorageError::Compacted)), ), ( 3, 4, max_u64, Err(RaftError::Store(StorageError::Compacted)), ), (4, 5, max_u64, Ok(vec![new_entry(4, 4)])), (4, 6, max_u64, Ok(vec![new_entry(4, 4), new_entry(5, 5)])), ( 4, 7, max_u64, Ok(vec![new_entry(4, 4), new_entry(5, 5), new_entry(6, 6)]), ), // even if maxsize is zero, the first entry should be returned (4, 7, 0, Ok(vec![new_entry(4, 4)])), // limit to 2 ( 4, 7, u64::from(size_of(&ents[1]) + size_of(&ents[2])), Ok(vec![new_entry(4, 4), new_entry(5, 5)]), ), ( 4, 7, u64::from(size_of(&ents[1]) + size_of(&ents[2]) + size_of(&ents[3]) / 2), Ok(vec![new_entry(4, 4), new_entry(5, 5)]), ), ( 4, 7, u64::from(size_of(&ents[1]) + size_of(&ents[2]) + size_of(&ents[3]) - 1), Ok(vec![new_entry(4, 4), new_entry(5, 5)]), ), // all ( 4, 7, u64::from(size_of(&ents[1]) + size_of(&ents[2]) + size_of(&ents[3])), Ok(vec![new_entry(4, 4), new_entry(5, 5), new_entry(6, 6)]), ), ]; for (i, (lo, hi, maxsize, wentries)) in tests.drain(..).enumerate() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = Worker::new("snap-manager").lazy_build("snap-manager"); let sched = worker.scheduler(); let store = new_storage_from_ents(sched, &td, &ents); let e = store.entries(lo, hi, maxsize); if e != wentries { panic!("#{}: expect entries {:?}, got {:?}", i, wentries, e); } } } // last_index and first_index are not mutated by PeerStorage on its own, // so we don't test them here. #[test] fn test_storage_compact() { let ents = vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)]; let mut tests = vec![ (2, Err(RaftError::Store(StorageError::Compacted))), (3, Err(RaftError::Store(StorageError::Compacted))), (4, Ok(())), (5, Ok(())), ]; for (i, (idx, werr)) in tests.drain(..).enumerate() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = Worker::new("snap-manager").lazy_build("snap-manager"); let sched = worker.scheduler(); let store = new_storage_from_ents(sched, &td, &ents); let mut ctx = InvokeContext::new(&store); let res = store .term(idx) .map_err(From::from) .and_then(|term| compact_raft_log(&store.tag, &mut ctx.apply_state, idx, term)); // TODO check exact error type after refactoring error. if res.is_err() ^ werr.is_err() { panic!("#{}: want {:?}, got {:?}", i, werr, res); } if res.is_ok() { let mut kv_wb = store.engines.kv.write_batch(); ctx.save_apply_state_to(&mut kv_wb).unwrap(); kv_wb.write().unwrap(); } } } fn generate_and_schedule_snapshot( gen_task: GenSnapTask, engines: &Engines<KvTestEngine, RaftTestEngine>, sched: &Scheduler<RegionTask<KvTestSnapshot>>, ) -> Result<()> { let apply_state: RaftApplyState = engines .kv .get_msg_cf(CF_RAFT, &keys::apply_state_key(gen_task.region_id)) .unwrap() .unwrap(); let idx = apply_state.get_applied_index(); let entry = engines .raft .get_msg::<Entry>(&keys::raft_log_key(gen_task.region_id, idx)) .unwrap() .unwrap(); gen_task.generate_and_schedule_snapshot::<KvTestEngine>( engines.kv.clone().snapshot(), entry.get_term(), apply_state, sched, ) } #[test] fn test_storage_create_snapshot() { let ents = vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)]; let mut cs = ConfState::default(); cs.set_voters(vec![1, 2, 3]); let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let snap_dir = Builder::new().prefix("snap_dir").tempdir().unwrap(); let mgr = SnapManager::new(snap_dir.path().to_str().unwrap()); let mut worker = Worker::new("region-worker").lazy_build("la"); let sched = worker.scheduler(); let mut s = new_storage_from_ents(sched.clone(), &td, &ents); let (router, _) = mpsc::sync_channel(100); let runner = RegionRunner::new( s.engines.kv.clone(), mgr, 0, true, CoprocessorHost::<KvTestEngine>::default(), router, ); worker.start_with_timer(runner); let snap = s.snapshot(0); let unavailable = RaftError::Store(StorageError::SnapshotTemporarilyUnavailable); assert_eq!(snap.unwrap_err(), unavailable); assert_eq!(*s.snap_tried_cnt.borrow(), 1); let gen_task = s.gen_snap_task.borrow_mut().take().unwrap(); generate_and_schedule_snapshot(gen_task, &s.engines, &sched).unwrap(); let snap = match *s.snap_state.borrow() { SnapState::Generating { ref receiver, .. } => { receiver.recv_timeout(Duration::from_secs(3)).unwrap() } ref s => panic!("unexpected state: {:?}", s), }; assert_eq!(snap.get_metadata().get_index(), 5); assert_eq!(snap.get_metadata().get_term(), 5); assert!(!snap.get_data().is_empty()); let mut data = RaftSnapshotData::default(); protobuf::Message::merge_from_bytes(&mut data, snap.get_data()).unwrap(); assert_eq!(data.get_region().get_id(), 1); assert_eq!(data.get_region().get_peers().len(), 1); let (tx, rx) = channel(); s.set_snap_state(gen_snap_for_test(rx)); // Empty channel should cause snapshot call to wait. assert_eq!(s.snapshot(0).unwrap_err(), unavailable); assert_eq!(*s.snap_tried_cnt.borrow(), 1); tx.send(snap.clone()).unwrap(); assert_eq!(s.snapshot(0), Ok(snap.clone())); assert_eq!(*s.snap_tried_cnt.borrow(), 0); let (tx, rx) = channel(); tx.send(snap.clone()).unwrap(); s.set_snap_state(gen_snap_for_test(rx)); // stale snapshot should be abandoned, snapshot index < request index. assert_eq!( s.snapshot(snap.get_metadata().get_index() + 1).unwrap_err(), unavailable ); assert_eq!(*s.snap_tried_cnt.borrow(), 1); // Drop the task. let _ = s.gen_snap_task.borrow_mut().take().unwrap(); let mut ctx = InvokeContext::new(&s); let mut kv_wb = s.engines.kv.write_batch(); let mut ready_ctx = ReadyContext::new(&s); s.append( &mut ctx, [new_entry(6, 5), new_entry(7, 5)].to_vec(), &mut ready_ctx, ) .unwrap(); let mut hs = HardState::default(); hs.set_commit(7); hs.set_term(5); ctx.raft_state.set_hard_state(hs); ctx.raft_state.set_last_index(7); ctx.apply_state.set_applied_index(7); ctx.save_raft_state_to(&mut ready_ctx.raft_wb).unwrap(); ctx.save_apply_state_to(&mut kv_wb).unwrap(); kv_wb.write().unwrap(); ready_ctx.raft_wb.write().unwrap(); s.apply_state = ctx.apply_state; s.raft_state = ctx.raft_state; ctx = InvokeContext::new(&s); let term = s.term(7).unwrap(); compact_raft_log(&s.tag, &mut ctx.apply_state, 7, term).unwrap(); kv_wb = s.engines.kv.write_batch(); ctx.save_apply_state_to(&mut kv_wb).unwrap(); kv_wb.write().unwrap(); s.apply_state = ctx.apply_state; let (tx, rx) = channel(); tx.send(snap).unwrap(); s.set_snap_state(gen_snap_for_test(rx)); *s.snap_tried_cnt.borrow_mut() = 1; // stale snapshot should be abandoned, snapshot index < truncated index. assert_eq!(s.snapshot(0).unwrap_err(), unavailable); assert_eq!(*s.snap_tried_cnt.borrow(), 1); let gen_task = s.gen_snap_task.borrow_mut().take().unwrap(); generate_and_schedule_snapshot(gen_task, &s.engines, &sched).unwrap(); match *s.snap_state.borrow() { SnapState::Generating { ref receiver, .. } => { receiver.recv_timeout(Duration::from_secs(3)).unwrap(); worker.stop(); match receiver.recv_timeout(Duration::from_secs(3)) { Err(RecvTimeoutError::Disconnected) => {} res => panic!("unexpected result: {:?}", res), } } ref s => panic!("unexpected state {:?}", s), } // Disconnected channel should trigger another try. assert_eq!(s.snapshot(0).unwrap_err(), unavailable); let gen_task = s.gen_snap_task.borrow_mut().take().unwrap(); generate_and_schedule_snapshot(gen_task, &s.engines, &sched).unwrap_err(); assert_eq!(*s.snap_tried_cnt.borrow(), 2); for cnt in 2..super::MAX_SNAP_TRY_CNT + 10 { if cnt < 12 { // Canceled generating won't be counted in `snap_tried_cnt`. s.cancel_generating_snap(None); assert_eq!(*s.snap_tried_cnt.borrow(), 2); } else { assert_eq!(*s.snap_tried_cnt.borrow(), cnt - 10); } // Scheduled job failed should trigger . assert_eq!(s.snapshot(0).unwrap_err(), unavailable); let gen_task = s.gen_snap_task.borrow_mut().take().unwrap(); generate_and_schedule_snapshot(gen_task, &s.engines, &sched).unwrap_err(); } // When retry too many times, it should report a different error. match s.snapshot(0) { Err(RaftError::Store(StorageError::Other(_))) => {} res => panic!("unexpected res: {:?}", res), } } #[test] fn test_storage_append() { let ents = vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)]; let mut tests = vec![ ( vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)], vec![new_entry(4, 4), new_entry(5, 5)], ), ( vec![new_entry(3, 3), new_entry(4, 6), new_entry(5, 6)], vec![new_entry(4, 6), new_entry(5, 6)], ), ( vec![ new_entry(3, 3), new_entry(4, 4), new_entry(5, 5), new_entry(6, 5), ], vec![new_entry(4, 4), new_entry(5, 5), new_entry(6, 5)], ), // truncate incoming entries, truncate the existing entries and append ( vec![new_entry(2, 3), new_entry(3, 3), new_entry(4, 5)], vec![new_entry(4, 5)], ), // truncate the existing entries and append (vec![new_entry(4, 5)], vec![new_entry(4, 5)]), // direct append ( vec![new_entry(6, 5)], vec![new_entry(4, 4), new_entry(5, 5), new_entry(6, 5)], ), ]; for (i, (entries, wentries)) in tests.drain(..).enumerate() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let mut store = new_storage_from_ents(sched, &td, &ents); append_ents(&mut store, &entries); let li = store.last_index(); let actual_entries = store.entries(4, li + 1, u64::max_value()).unwrap(); if actual_entries != wentries { panic!("#{}: want {:?}, got {:?}", i, wentries, actual_entries); } } } #[test] fn test_storage_cache_fetch() { let ents = vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)]; let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let mut store = new_storage_from_ents(sched, &td, &ents); store.cache.as_mut().unwrap().cache.clear(); // empty cache should fetch data from rocksdb directly. let mut res = store.entries(4, 6, u64::max_value()).unwrap(); assert_eq!(*res, ents[1..]); let entries = vec![new_entry(6, 5), new_entry(7, 5)]; append_ents(&mut store, &entries); validate_cache(&store, &entries); // direct cache access res = store.entries(6, 8, u64::max_value()).unwrap(); assert_eq!(res, entries); // size limit should be supported correctly. res = store.entries(4, 8, 0).unwrap(); assert_eq!(res, vec![new_entry(4, 4)]); let mut size = ents[1..].iter().map(|e| u64::from(e.compute_size())).sum(); res = store.entries(4, 8, size).unwrap(); let mut exp_res = ents[1..].to_vec(); assert_eq!(res, exp_res); for e in &entries { size += u64::from(e.compute_size()); exp_res.push(e.clone()); res = store.entries(4, 8, size).unwrap(); assert_eq!(res, exp_res); } // range limit should be supported correctly. for low in 4..9 { for high in low..9 { let res = store.entries(low, high, u64::max_value()).unwrap(); assert_eq!(*res, exp_res[low as usize - 4..high as usize - 4]); } } } #[test] fn test_storage_cache_update() { let ents = vec![new_entry(3, 3), new_entry(4, 4), new_entry(5, 5)]; let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let mut store = new_storage_from_ents(sched, &td, &ents); store.cache.as_mut().unwrap().cache.clear(); // initial cache let mut entries = vec![new_entry(6, 5), new_entry(7, 5)]; append_ents(&mut store, &entries); validate_cache(&store, &entries); // rewrite entries = vec![new_entry(6, 6), new_entry(7, 6)]; append_ents(&mut store, &entries); validate_cache(&store, &entries); // rewrite old entry entries = vec![new_entry(5, 6), new_entry(6, 6)]; append_ents(&mut store, &entries); validate_cache(&store, &entries); // partial rewrite entries = vec![new_entry(6, 7), new_entry(7, 7)]; append_ents(&mut store, &entries); let mut exp_res = vec![new_entry(5, 6), new_entry(6, 7), new_entry(7, 7)]; validate_cache(&store, &exp_res); // direct append entries = vec![new_entry(8, 7), new_entry(9, 7)]; append_ents(&mut store, &entries); exp_res.extend_from_slice(&entries); validate_cache(&store, &exp_res); // rewrite middle entries = vec![new_entry(7, 8)]; append_ents(&mut store, &entries); exp_res.truncate(2); exp_res.push(new_entry(7, 8)); validate_cache(&store, &exp_res); let cap = MAX_CACHE_CAPACITY as u64; // result overflow entries = (3..=cap).map(|i| new_entry(i + 5, 8)).collect(); append_ents(&mut store, &entries); exp_res.remove(0); exp_res.extend_from_slice(&entries); validate_cache(&store, &exp_res); // input overflow entries = (0..=cap).map(|i| new_entry(i + cap + 6, 8)).collect(); append_ents(&mut store, &entries); exp_res = entries[entries.len() - cap as usize..].to_vec(); validate_cache(&store, &exp_res); // compact store.compact_to(cap + 10); exp_res = (cap + 10..cap * 2 + 7).map(|i| new_entry(i, 8)).collect(); validate_cache(&store, &exp_res); // compact shrink assert!(store.cache.as_ref().unwrap().cache.capacity() >= cap as usize); store.compact_to(cap * 2); exp_res = (cap * 2..cap * 2 + 7).map(|i| new_entry(i, 8)).collect(); validate_cache(&store, &exp_res); assert!(store.cache.as_ref().unwrap().cache.capacity() < cap as usize); // append shrink entries = (0..=cap).map(|i| new_entry(i, 8)).collect(); append_ents(&mut store, &entries); assert!(store.cache.as_ref().unwrap().cache.capacity() >= cap as usize); append_ents(&mut store, &[new_entry(6, 8)]); exp_res = (1..7).map(|i| new_entry(i, 8)).collect(); validate_cache(&store, &exp_res); assert!(store.cache.as_ref().unwrap().cache.capacity() < cap as usize); // compact all store.compact_to(cap + 2); validate_cache(&store, &[]); // invalid compaction should be ignored. store.compact_to(cap); } #[test] fn test_storage_apply_snapshot() { let ents = vec![ new_entry(3, 3), new_entry(4, 4), new_entry(5, 5), new_entry(6, 6), ]; let mut cs = ConfState::default(); cs.set_voters(vec![1, 2, 3]); let td1 = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let snap_dir = Builder::new().prefix("snap").tempdir().unwrap(); let mgr = SnapManager::new(snap_dir.path().to_str().unwrap()); let mut worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let s1 = new_storage_from_ents(sched.clone(), &td1, &ents); let (router, _) = mpsc::sync_channel(100); let runner = RegionRunner::new( s1.engines.kv.clone(), mgr, 0, true, CoprocessorHost::<KvTestEngine>::default(), router, ); worker.start(runner); assert!(s1.snapshot(0).is_err()); let gen_task = s1.gen_snap_task.borrow_mut().take().unwrap(); generate_and_schedule_snapshot(gen_task, &s1.engines, &sched).unwrap(); let snap1 = match *s1.snap_state.borrow() { SnapState::Generating { ref receiver, .. } => { receiver.recv_timeout(Duration::from_secs(3)).unwrap() } ref s => panic!("unexpected state: {:?}", s), }; assert_eq!(s1.truncated_index(), 3); assert_eq!(s1.truncated_term(), 3); worker.stop(); let td2 = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let mut s2 = new_storage(sched.clone(), &td2); assert_eq!(s2.first_index(), s2.applied_index() + 1); let mut ctx = InvokeContext::new(&s2); assert_ne!(ctx.last_term, snap1.get_metadata().get_term()); let mut kv_wb = s2.engines.kv.write_batch(); let mut raft_wb = s2.engines.raft.write_batch(); s2.apply_snapshot(&mut ctx, &snap1, &mut kv_wb, &mut raft_wb, &[]) .unwrap(); assert_eq!(ctx.last_term, snap1.get_metadata().get_term()); assert_eq!(ctx.apply_state.get_applied_index(), 6); assert_eq!(ctx.raft_state.get_last_index(), 6); assert_eq!(ctx.apply_state.get_truncated_state().get_index(), 6); assert_eq!(ctx.apply_state.get_truncated_state().get_term(), 6); assert_eq!(s2.first_index(), s2.applied_index() + 1); validate_cache(&s2, &[]); let td3 = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let ents = &[new_entry(3, 3), new_entry(4, 3)]; let mut s3 = new_storage_from_ents(sched, &td3, ents); validate_cache(&s3, &ents[1..]); let mut ctx = InvokeContext::new(&s3); assert_ne!(ctx.last_term, snap1.get_metadata().get_term()); let mut kv_wb = s3.engines.kv.write_batch(); let mut raft_wb = s3.engines.raft.write_batch(); s3.apply_snapshot(&mut ctx, &snap1, &mut kv_wb, &mut raft_wb, &[]) .unwrap(); assert_eq!(ctx.last_term, snap1.get_metadata().get_term()); assert_eq!(ctx.apply_state.get_applied_index(), 6); assert_eq!(ctx.raft_state.get_last_index(), 6); assert_eq!(ctx.apply_state.get_truncated_state().get_index(), 6); assert_eq!(ctx.apply_state.get_truncated_state().get_term(), 6); validate_cache(&s3, &[]); } <|fim▁hole|> #[test] fn test_canceling_apply_snapshot() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let mut s = new_storage(sched, &td); // PENDING can be canceled directly. s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_PENDING, )))); assert!(s.cancel_applying_snap()); assert_eq!(*s.snap_state.borrow(), SnapState::ApplyAborted); // RUNNING can't be canceled directly. s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_RUNNING, )))); assert!(!s.cancel_applying_snap()); assert_eq!( *s.snap_state.borrow(), SnapState::Applying(Arc::new(AtomicUsize::new(JOB_STATUS_CANCELLING))) ); // CANCEL can't be canceled again. assert!(!s.cancel_applying_snap()); s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_CANCELLED, )))); // canceled snapshot can be cancel directly. assert!(s.cancel_applying_snap()); assert_eq!(*s.snap_state.borrow(), SnapState::ApplyAborted); s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_FINISHED, )))); assert!(s.cancel_applying_snap()); assert_eq!(*s.snap_state.borrow(), SnapState::Relax); s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_FAILED, )))); let res = panic_hook::recover_safe(|| s.cancel_applying_snap()); assert!(res.is_err()); } #[test] fn test_try_finish_snapshot() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let mut s = new_storage(sched, &td); // PENDING can be finished. let mut snap_state = SnapState::Applying(Arc::new(AtomicUsize::new(JOB_STATUS_PENDING))); s.snap_state = RefCell::new(snap_state); assert_eq!(s.check_applying_snap(), CheckApplyingSnapStatus::Applying); assert_eq!( *s.snap_state.borrow(), SnapState::Applying(Arc::new(AtomicUsize::new(JOB_STATUS_PENDING))) ); // RUNNING can't be finished. snap_state = SnapState::Applying(Arc::new(AtomicUsize::new(JOB_STATUS_RUNNING))); s.snap_state = RefCell::new(snap_state); assert_eq!(s.check_applying_snap(), CheckApplyingSnapStatus::Applying); assert_eq!( *s.snap_state.borrow(), SnapState::Applying(Arc::new(AtomicUsize::new(JOB_STATUS_RUNNING))) ); snap_state = SnapState::Applying(Arc::new(AtomicUsize::new(JOB_STATUS_CANCELLED))); s.snap_state = RefCell::new(snap_state); assert_eq!(s.check_applying_snap(), CheckApplyingSnapStatus::Idle); assert_eq!(*s.snap_state.borrow(), SnapState::ApplyAborted); // ApplyAborted is not applying snapshot. assert_eq!(s.check_applying_snap(), CheckApplyingSnapStatus::Idle); assert_eq!(*s.snap_state.borrow(), SnapState::ApplyAborted); s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_FINISHED, )))); assert_eq!(s.check_applying_snap(), CheckApplyingSnapStatus::Success); assert_eq!(*s.snap_state.borrow(), SnapState::Relax); // Relax is not applying snapshot. assert_eq!(s.check_applying_snap(), CheckApplyingSnapStatus::Idle); assert_eq!(*s.snap_state.borrow(), SnapState::Relax); s.snap_state = RefCell::new(SnapState::Applying(Arc::new(AtomicUsize::new( JOB_STATUS_FAILED, )))); let res = panic_hook::recover_safe(|| s.check_applying_snap()); assert!(res.is_err()); } #[test] fn test_sync_log() { // Do not sync empty entrise. let mut tbl = vec![(Entry::default(), false)]; // Sync if sync_log is set. let mut e = Entry::default(); e.set_sync_log(true); tbl.push((e, true)); // Sync if context is marked sync. let context = ProposalContext::SYNC_LOG.to_vec(); let mut e = Entry::default(); e.set_context(context); tbl.push((e.clone(), true)); // Sync if sync_log is set and context is marked sync_log. e.set_sync_log(true); tbl.push((e, true)); for (e, sync) in tbl { assert_eq!(get_sync_log_from_entry(&e), sync, "{:?}", e); } } #[test] fn test_validate_states() { let td = Builder::new().prefix("tikv-store-test").tempdir().unwrap(); let worker = LazyWorker::new("snap-manager"); let sched = worker.scheduler(); let kv_db = engine_test::kv::new_engine(td.path().to_str().unwrap(), None, ALL_CFS, None).unwrap(); let raft_path = td.path().join(Path::new("raft")); let raft_db = engine_test::raft::new_engine(raft_path.to_str().unwrap(), None, CF_DEFAULT, None) .unwrap(); let engines = Engines::new(kv_db, raft_db); bootstrap_store(&engines, 1, 1).unwrap(); let region = initial_region(1, 1, 1); prepare_bootstrap_cluster(&engines, &region).unwrap(); let build_storage = || -> Result<PeerStorage<KvTestEngine, RaftTestEngine>> { PeerStorage::new(engines.clone(), &region, sched.clone(), 0, "".to_owned()) }; let mut s = build_storage().unwrap(); let mut raft_state = RaftLocalState::default(); raft_state.set_last_index(RAFT_INIT_LOG_INDEX); raft_state.mut_hard_state().set_term(RAFT_INIT_LOG_TERM); raft_state.mut_hard_state().set_commit(RAFT_INIT_LOG_INDEX); let initial_state = s.initial_state().unwrap(); assert_eq!(initial_state.hard_state, *raft_state.get_hard_state()); // last_index < commit_index is invalid. let raft_state_key = keys::raft_state_key(1); raft_state.set_last_index(11); let log_key = keys::raft_log_key(1, 11); engines .raft .put_msg(&log_key, &new_entry(11, RAFT_INIT_LOG_TERM)) .unwrap(); raft_state.mut_hard_state().set_commit(12); engines.raft.put_msg(&raft_state_key, &raft_state).unwrap(); assert!(build_storage().is_err()); let log_key = keys::raft_log_key(1, 20); engines .raft .put_msg(&log_key, &new_entry(20, RAFT_INIT_LOG_TERM)) .unwrap(); raft_state.set_last_index(20); engines.raft.put_msg(&raft_state_key, &raft_state).unwrap(); s = build_storage().unwrap(); let initial_state = s.initial_state().unwrap(); assert_eq!(initial_state.hard_state, *raft_state.get_hard_state()); // Missing last log is invalid. engines.raft.delete(&log_key).unwrap(); assert!(build_storage().is_err()); engines .raft .put_msg(&log_key, &new_entry(20, RAFT_INIT_LOG_TERM)) .unwrap(); // applied_index > commit_index is invalid. let mut apply_state = RaftApplyState::default(); apply_state.set_applied_index(13); apply_state.mut_truncated_state().set_index(13); apply_state .mut_truncated_state() .set_term(RAFT_INIT_LOG_TERM); let apply_state_key = keys::apply_state_key(1); engines .kv .put_msg_cf(CF_RAFT, &apply_state_key, &apply_state) .unwrap(); assert!(build_storage().is_err()); // It should not recover if corresponding log doesn't exist. apply_state.set_commit_index(14); apply_state.set_commit_term(RAFT_INIT_LOG_TERM); engines .kv .put_msg_cf(CF_RAFT, &apply_state_key, &apply_state) .unwrap(); assert!(build_storage().is_err()); let log_key = keys::raft_log_key(1, 14); engines .raft .put_msg(&log_key, &new_entry(14, RAFT_INIT_LOG_TERM)) .unwrap(); raft_state.mut_hard_state().set_commit(14); s = build_storage().unwrap(); let initial_state = s.initial_state().unwrap(); assert_eq!(initial_state.hard_state, *raft_state.get_hard_state()); // log term miss match is invalid. engines .raft .put_msg(&log_key, &new_entry(14, RAFT_INIT_LOG_TERM - 1)) .unwrap(); assert!(build_storage().is_err()); // hard state term miss match is invalid. engines .raft .put_msg(&log_key, &new_entry(14, RAFT_INIT_LOG_TERM)) .unwrap(); raft_state.mut_hard_state().set_term(RAFT_INIT_LOG_TERM - 1); engines.raft.put_msg(&raft_state_key, &raft_state).unwrap(); assert!(build_storage().is_err()); // last index < recorded_commit_index is invalid. raft_state.mut_hard_state().set_term(RAFT_INIT_LOG_TERM); raft_state.set_last_index(13); let log_key = keys::raft_log_key(1, 13); engines .raft .put_msg(&log_key, &new_entry(13, RAFT_INIT_LOG_TERM)) .unwrap(); engines.raft.put_msg(&raft_state_key, &raft_state).unwrap(); assert!(build_storage().is_err()); } fn gen_snap_for_test(rx: Receiver<Snapshot>) -> SnapState { SnapState::Generating { canceled: Arc::new(AtomicBool::new(false)), index: Arc::new(AtomicU64::new(0)), receiver: rx, } } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># flake8: NOQA <|fim▁hole|>from . import evaluations from . import extensions from . import models from . import utils<|fim▁end|>
from . import datasets
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Licensed under a 3-clause BSD style license - see LICENSE.rst from .. import config as _config class Conf(_config.ConfigNamespace): """ Configuration parameters for `astropy.table`. """ auto_colname = _config.ConfigItem( 'col{0}', 'The template that determines the name of a column if it cannot be ' 'determined. Uses new-style (format method) string formatting.', aliases=['astropy.table.column.auto_colname']) default_notebook_table_class = _config.ConfigItem( 'table-striped table-bordered table-condensed', 'The table class to be used in Jupyter notebooks when displaying ' 'tables (and not overridden). See <http://getbootstrap.com/css/#tables ' 'for a list of useful bootstrap classes.')<|fim▁hole|> 'List of conditions for issuing a warning when replacing a table ' "column using setitem, e.g. t['a'] = value. Allowed options are " "'always', 'slice', 'refcount', 'attributes'.", 'list', ) replace_inplace = _config.ConfigItem( False, 'Always use in-place update of a table column when using setitem, ' "e.g. t['a'] = value. This overrides the default behavior of " "replacing the column entirely with the new value when possible. " "This configuration option will be deprecated and then removed in " "subsequent major releases." ) conf = Conf() from .column import Column, MaskedColumn, StringTruncateWarning, ColumnInfo from .groups import TableGroups, ColumnGroups from .table import (Table, QTable, TableColumns, Row, TableFormatter, NdarrayMixin, TableReplaceWarning) from .operations import join, setdiff, hstack, vstack, unique, TableMergeError from .bst import BST, FastBST, FastRBT from .sorted_array import SortedArray from .serialize import SerializedColumn # Finally import the formats for the read and write method but delay building # the documentation until all are loaded. (#5275) from ..io import registry with registry.delay_doc_updates(Table): # Import routines that connect readers/writers to astropy.table from .jsviewer import JSViewer from ..io.ascii import connect from ..io.fits import connect from ..io.misc import connect from ..io.votable import connect<|fim▁end|>
replace_warnings = _config.ConfigItem( ['slice'],
<|file_name|>most_free_time.py<|end_file_name|><|fim▁begin|>''' Have the function MostFreeTime(strArr) read the strArr parameter being passed which will represent a full day and will be filled with events that span from time X to time Y in the day. The format of each event will be hh:mmAM/PM-hh:mmAM/PM. For example, strArr may be ["10:00AM-12:30PM", "02:00PM-02:45PM","09:10AM-09:50AM"]. Your program will have to output the longest amount of free time available between the start of your first event and the end of your last event in the format: hh:mm. The start event should be the earliest event in the day and the latest event should be the latest event in the day. The output for the previous input would therefore be 01:30 (with the earliest event in the day starting at 09:10AM and the latest event ending at 02:45PM). The input will contain at least 3 events and the events may be out of order. ''' def min_bef(hh,mm,pm): if (pm==0) and hh==12: return 1440 elif (pm==1): if(hh<12): hh += 12 mm = 60-mm return ((24-(hh+1))*60)+mm def min_aft(hh,mm,pm): if (pm==0) and hh==12: return 0 elif (pm==1): if(hh<12): hh += 12 return (hh*60)+mm def CountingMinutesI(sen): sen = sen.replace('M','') sen = sen.replace(':',' ') sen = sen.replace('P',' 1') sen = sen.replace('A',' 0') lit = sen.split("-") lit = lit[0].split(" ") +lit[1].split(" ") lita=[] for i in lit: lita.append(int(i)) #print lita if(lita[2] == lita[5]): res = min_bef(lita[0],lita[1],lita[2])-min_bef(lita[3],lita[4],lita[5]) if res <0 : return 1440+res else: return res elif(lita[2] < lita[5]): return min_bef(lita[0],lita[1],lita[2])-min_bef(lita[3],lita[4],lita[5]) elif(lita[2] > lita[5]): return min_bef(lita[0],lita[1],lita[2])+min_aft(lita[3],lita[4],lita[5]) def sort_times(times): ams = [] pms = [] for i in times: if i[5].lower() == 'p' and int(i[0:2])<12: pms.append(i) else: ams.append(i) full = sort_timeList(ams)+sort_timeList(pms) return full <|fim▁hole|>def less_than(x, y): if(int(x[0:2]) < int(y[0:2])): #print int(x[0:2]),int(y[0:2]) return True elif (int(x[3:5]) < int(y[3:5])): #print int(x[3:5]),int(y[3:5]) return True else: return False def make_comparator(less_than): def compare(x, y): if less_than(x, y): return -1 elif less_than(y, x): return 1 else: return 0 return compare def MostFreeTime(times): times = sort_times(times) #print times maxt = 0 for i in range (0,len(times)-1): free = times[i][8:]+"-"+times[i+1][:7] maxt = max(maxt,CountingMinutesI(free)) #print maxt #print "FInal result: "+str(maxt ) return str(maxt/60).zfill(2) +":"+str(maxt%60).zfill(2) sen = ["12:15PM-02:00PM","09:00AM-12:11PM","02:02PM-04:00PM"] print MostFreeTime(sen)<|fim▁end|>
def sort_timeList(times): sortedTimes = sorted(times, cmp=make_comparator(less_than), reverse=False)#[::-1] return sortedTimes
<|file_name|>test_select.rs<|end_file_name|><|fim▁begin|>use workdir::Workdir; macro_rules! select_test { ($name:ident, $select:expr, $select_no_headers:expr, $expected_headers:expr, $expected_rows:expr) => ( mod $name { use workdir::Workdir; use super::data; #[test] fn headers() { let wrk = Workdir::new(stringify!($name)); wrk.create("data.csv", data(true)); let mut cmd = wrk.command("select"); cmd.arg("--").arg($select).arg("data.csv"); let got: Vec<Vec<String>> = wrk.read_stdout(&mut cmd); let expected = vec![ $expected_headers.iter() .map(|s| s.to_string()) .collect::<Vec<String>>(), $expected_rows.iter() .map(|s| s.to_string()) .collect::<Vec<String>>(), ]; assert_eq!(got, expected); } #[test] fn no_headers() { let wrk = Workdir::new(stringify!($name)); wrk.create("data.csv", data(false)); let mut cmd = wrk.command("select"); cmd.arg("--no-headers") .arg("--").arg($select_no_headers).arg("data.csv"); let got: Vec<Vec<String>> = wrk.read_stdout(&mut cmd); let expected = vec![ $expected_rows.iter() .map(|s| s.to_string()) .collect::<Vec<String>>(), ]; assert_eq!(got, expected); } } ); } macro_rules! select_test_err { ($name:ident, $select:expr) => ( #[test] fn $name() { let wrk = Workdir::new(stringify!($name)); wrk.create("data.csv", data(true)); let mut cmd = wrk.command("select"); cmd.arg($select).arg("data.csv"); wrk.assert_err(&mut cmd); } ); } fn header_row() -> Vec<String> { svec!["h1", "h2", "h[]3", "h4", "h1"] } fn data(headers: bool) -> Vec<Vec<String>> { let mut rows = vec![ svec!["a", "b", "c", "d", "e"], ]; if headers { rows.insert(0, header_row()) } rows } select_test!(select_simple, "h1", "1", ["h1"], ["a"]); select_test!(select_simple_idx, "h1[0]", "1", ["h1"], ["a"]); select_test!(select_simple_idx_2, "h1[1]", "5", ["h1"], ["e"]); select_test!(select_quoted, r#""h[]3""#, "3", ["h[]3"], ["c"]); select_test!(select_quoted_idx, r#""h[]3"[0]"#, "3", ["h[]3"], ["c"]); select_test!(select_range, "h1-h4", "1-4", ["h1", "h2", "h[]3", "h4"], ["a", "b", "c", "d"]); select_test!(select_range_multi, r#"h1-h2,"h[]3"-h4"#, "1-2,3-4", ["h1", "h2", "h[]3", "h4"], ["a", "b", "c", "d"]);<|fim▁hole|> select_test!(select_reverse, "h1[1]-h1[0]", "5-1", ["h1", "h4", "h[]3", "h2", "h1"], ["e", "d", "c", "b", "a"]); select_test!(select_not, r#"!"h[]3"[0]"#, "!3", ["h1", "h2", "h4", "h1"], ["a", "b", "d", "e"]); select_test!(select_not_range, "!h1[1]-h2", "!5-2", ["h1"], ["a"]); select_test!(select_duplicate, "h1,h1", "1,1", ["h1", "h1"], ["a", "a"]); select_test!(select_duplicate_range, "h1-h2,h1-h2", "1-2,1-2", ["h1", "h2", "h1", "h2"], ["a", "b", "a", "b"]); select_test!(select_duplicate_range_reverse, "h1-h2,h2-h1", "1-2,2-1", ["h1", "h2", "h2", "h1"], ["a", "b", "b", "a"]); select_test!(select_range_no_end, "h4-", "4-", ["h4", "h1"], ["d", "e"]); select_test!(select_range_no_start, "-h2", "-2", ["h1", "h2"], ["a", "b"]); select_test!(select_range_no_end_cat, "h4-,h1", "4-,1", ["h4", "h1", "h1"], ["d", "e", "a"]); select_test!(select_range_no_start_cat, "-h2,h1[1]", "-2,5", ["h1", "h2", "h1"], ["a", "b", "e"]); select_test_err!(select_err_unknown_header, "dne"); select_test_err!(select_err_oob_low, "0"); select_test_err!(select_err_oob_high, "6"); select_test_err!(select_err_idx_as_name, "1[0]"); select_test_err!(select_err_idx_oob_high, "h1[2]"); select_test_err!(select_err_idx_not_int, "h1[2.0]"); select_test_err!(select_err_idx_not_int_2, "h1[a]"); select_test_err!(select_err_unclosed_quote, r#""h1"#); select_test_err!(select_err_unclosed_bracket, r#""h1"[1"#); select_test_err!(select_err_expected_end_of_field, "a-b-");<|fim▁end|>
select_test!(select_range_multi_idx, r#"h1-h2,"h[]3"[0]-h4"#, "1-2,3-4", ["h1", "h2", "h[]3", "h4"], ["a", "b", "c", "d"]);
<|file_name|>backtop.js<|end_file_name|><|fim▁begin|>(function($, f) { // If there's no jQuery, Unslider can't work, so kill the operation. if(!$) return f; var Unslider = function() { // Set up our elements this.el = f; this.items = f; // Dimensions this.sizes = []; this.max = [0,0]; // Current inded this.current = 0; // Start/stop timer this.interval = f; // Set some options this.opts = { speed: 500, delay: 3000, // f for no autoplay complete: f, // when a slide's finished keys: !f, // keyboard shortcuts - disable if it breaks things dots: f, // display �T�T�T�▊�� pagination fluid: f // is it a percentage width?, }; // Create a deep clone for methods where context changes var _ = this; this.init = function(el, opts) { this.el = el; this.ul = el.children('ul'); this.max = [el.outerWidth(), el.outerHeight()]; this.items = this.ul.children('li').each(this.calculate); // Check whether we're passing any options in to Unslider this.opts = $.extend(this.opts, opts); // Set up the Unslider this.setup(); return this; }; // Get the width for an element // Pass a jQuery element as the context with .call(), and the index as a parameter: Unslider.calculate.call($('li:first'), 0) this.calculate = function(index) { var me = $(this), width = me.outerWidth(), height = me.outerHeight(); // Add it to the sizes list _.sizes[index] = [width, height]; // Set the max values if(width > _.max[0]) _.max[0] = width; if(height > _.max[1]) _.max[1] = height; }; // Work out what methods need calling this.setup = function() { // Set the main element this.el.css({ overflow: 'hidden', width: _.max[0], height: this.items.first().outerHeight() }); // Set the relative widths this.ul.css({width: (this.items.length * 100) + '%', position: 'relative'}); this.items.css('width', (100 / this.items.length) + '%'); if(this.opts.delay !== f) { this.start(); this.el.hover(this.stop, this.start); } // Custom keyboard support this.opts.keys && $(document).keydown(this.keys); // Dot pagination this.opts.dots && this.dots(); // Little patch for fluid-width sliders. Screw those guys. if(this.opts.fluid) { var resize = function() { _.el.css('width', Math.min(Math.round((_.el.outerWidth() / _.el.parent().outerWidth()) * 100), 100) + '%'); }; resize(); $(window).resize(resize); } if(this.opts.arrows) { this.el.parent().append('<p class="arrows"><span class="prev">��</span><span class="next">��</span></p>') .find('.arrows span').click(function() { $.isFunction(_[this.className]) && _[this.className](); }); }; // Swipe support if($.event.swipe) { this.el.on('swipeleft', _.prev).on('swiperight', _.next); } }; // Move Unslider to a slide index this.move = function(index, cb) { // If it's out of bounds, go to the first slide if(!this.items.eq(index).length) index = 0; if(index < 0) index = (this.items.length - 1); var target = this.items.eq(index); var obj = {height: target.outerHeight()}; var speed = cb ? 5 : this.opts.speed; if(!this.ul.is(':animated')) { // Handle those pesky dots _.el.find('.dot:eq(' + index + ')').addClass('active').siblings().removeClass('active'); this.el.animate(obj, speed) && this.ul.animate($.extend({left: '-' + index + '00%'}, obj), speed, function(data) { _.current = index; $.isFunction(_.opts.complete) && !cb && _.opts.complete(_.el); }); } }; // Autoplay functionality this.start = function() { _.interval = setInterval(function() { _.move(_.current + 1); }, _.opts.delay); }; // Stop autoplay this.stop = function() { _.interval = clearInterval(_.interval); return _; }; // Keypresses this.keys = function(e) { var key = e.which; var map = { // Prev/next 37: _.prev, 39: _.next, // Esc 27: _.stop }; if($.isFunction(map[key])) { map[key](); } }; // Arrow navigation this.next = function() { return _.stop().move(_.current + 1) }; this.prev = function() { return _.stop().move(_.current - 1) }; this.dots = function() { // Create the HTML var html = '<ol class="dots">'; $.each(this.items, function(index) { html += '<li class="dot' + (index < 1 ? ' active' : '') + '">' + (index + 1) + '</li>'; }); html += '</ol>'; // Add it to the Unslider this.el.addClass('has-dots').append(html).find('.dot').click(function() { _.move($(this).index()); }); }; }; // Create a jQuery plugin $.fn.unslider = function(o) { var len = this.length; // Enable multiple-slider support return this.each(function(index) { // Cache a copy of $(this), so it var me = $(this); var instance = (new Unslider).init(me, o); // Invoke an Unslider instance me.data('unslider' + (len > 1 ? '-' + (index + 1) : ''), instance); }); }; })(window.jQuery, false); jQuery(document).ready(function($){ // browser window scroll (in pixels) after which the "back to top" link is shown var offset = 300, //browser window scroll (in pixels) after which the "back to top" link opacity is reduced offset_opacity = 1200, //duration of the top scrolling animation (in ms) scroll_top_duration = 700, //grab the "back to top" link $back_to_top = $('.back-top'); //hide or show the "back to top" link $(window).scroll(function(){ ( $(this).scrollTop() > offset ) ? $back_to_top.addClass('cd-is-visible') : $back_to_top.removeClass('cd-is-visible cd-fade-out'); if( $(this).scrollTop() > offset_opacity ) { $back_to_top.addClass('cd-fade-out'); } }); //smooth scroll to top $back_to_top.on('click', function(event){ event.preventDefault(); $('body,html').animate({ scrollTop: 0 , }, scroll_top_duration ); }); //設定footer置底 var winheight = $( window ).height(); var bodyheight = $("body").height(); if (winheight >= bodyheight){ $(".outer-footer").css("position" , "fixed"); $(".outer-footer").css("bottom" , "0"); } }); /** * Unslider by @idiot<|fim▁hole|><|fim▁end|>
*/
<|file_name|>analyses_transposed.py<|end_file_name|><|fim▁begin|># coding=utf-8 # This file is part of Bika LIMS # # Copyright 2011-2016 by it's authors. # Some rights reserved. See LICENSE.txt, AUTHORS.txt. from Products.Five.browser.pagetemplatefile import ViewPageTemplateFile from bika.lims.browser.bika_listing import BikaListingTable from bika.lims.browser.worksheet.views.analyses import AnalysesView class AnalysesTransposedView(AnalysesView): """ The view for displaying the table of manage_results transposed. Analysis Requests are displayed in columns and analyses in rows. Uses most of the logic provided by BikaListingView through bika.lims.worksheet.views.AnalysesView to generate the items, but renders its own template, which is highly specific for display analysis results. Because of this, some generic BikaListing functionalities, such as sorting, pagination, contextual menus for columns, etc. will not work in this view. """ def contents_table(self, table_only = True): """ Overrides contents_table method from the parent class BikaListingView, using the transposed template instead of the classic template. """ table = AnalysesTransposedTable(bika_listing = self, table_only = True) return table.render(self) class AnalysesTransposedTable(BikaListingTable): """ The BikaListingTable that uses a transposed template for displaying the results. """ render = ViewPageTemplateFile("../templates/analyses_transposed.pt") render_cell = ViewPageTemplateFile("../templates/analyses_transposed_cell.pt") def __init__(self, bika_listing = None, table_only = False): BikaListingTable.__init__(self, bika_listing, True) self.rows_headers = [] self.trans_items = {} self.positions = [] self._transpose_data() def _transpose_data(self): cached = [] index = 0 #ignore = ['Analysis', 'Service', 'Result', 'ResultDM'] include = ['Attachments', 'DetectionLimit', 'DueDate','Pos', 'ResultDM'] for col in self.bika_listing.review_state['columns']: if col == 'Result': # Further interims will be inserted in this position resindex = index if col not in include: continue lcol = self.bika_listing.columns[col] self.rows_headers.append({'id': col, 'title': lcol['title'], 'type': lcol.get('type',''),<|fim▁hole|> 'input_width': lcol.get('input_width','')}) cached.append(col) index += 1 for item in self.items: if item['Service'] not in cached: self.rows_headers.insert(resindex, {'id': item['Service'], 'title': item['title'], 'type': item.get('type',''), 'row_type': 'analysis', 'index': index}) resindex += 1 cached.append(item['Service']) pos = item['Pos'] if pos in self.trans_items: self.trans_items[pos][item['Service']] = item else: self.trans_items[pos] = {item['Service']: item} if pos not in self.positions: self.positions.append(pos) def rendered_items(self, cat=None, **kwargs): return '' def render_row_cell(self, rowheader, position = ''): self.current_rowhead = rowheader self.current_position = position if rowheader['row_type'] == 'field': # Only the first item for this position contains common # data for all the analyses with the same position its = [i for i in self.items if i['Pos'] == position] self.current_item = its[0] if its else {} elif position in self.trans_items \ and rowheader['id'] in self.trans_items[position]: self.current_item = self.trans_items[position][rowheader['id']] else: return '' return self.render_cell()<|fim▁end|>
'row_type': 'field', 'hidden': not lcol.get('toggle', True), 'input_class': lcol.get('input_class',''),
<|file_name|>test_vfs.py<|end_file_name|><|fim▁begin|># This file is part of beets. # Copyright 2011, Adrian Sampson. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. """Tests for the virtual filesystem builder..""" import _common from _common import unittest from beets import library from beets import vfs class VFSTest(unittest.TestCase): def setUp(self): self.lib = library.Library(':memory:', path_formats=[ ('default', 'albums/$album/$title'), ('singleton:true', 'tracks/$artist/$title'), ]) self.lib.add(_common.item()) self.lib.add_album([_common.item()]) self.lib.save() self.tree = vfs.libtree(self.lib) def test_singleton_item(self): self.assertEqual(self.tree.dirs['tracks'].dirs['the artist']. files['the title'], 1) def test_album_item(self): self.assertEqual(self.tree.dirs['albums'].dirs['the album']. files['the title'], 2) def suite(): return unittest.TestLoader().loadTestsFromName(__name__) if __name__ == '__main__':<|fim▁hole|> unittest.main(defaultTest='suite')<|fim▁end|>
<|file_name|>student.py<|end_file_name|><|fim▁begin|>__author__ = 'xubinggui' class Student(object): def __init__(self, name, score): self.name = name self.score = score def print_score(self): print(self.score)<|fim▁hole|><|fim▁end|>
bart = Student('Bart Simpson', 59) bart.print_score()
<|file_name|>cpu_temperature_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2019 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Unit tests for the contents of cpu_temperature.py """ # pylint: disable=unused-argument import logging import unittest from devil import devil_env from devil.android import cpu_temperature from devil.android import device_utils from devil.utils import mock_calls from devil.android.sdk import adb_wrapper with devil_env.SysPath(devil_env.PYMOCK_PATH): import mock # pylint: disable=import-error class CpuTemperatureTest(mock_calls.TestCase): @mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock()) def setUp(self): # Mock the device self.mock_device = mock.Mock(spec=device_utils.DeviceUtils) self.mock_device.build_product = 'blueline' self.mock_device.adb = mock.Mock(spec=adb_wrapper.AdbWrapper) self.mock_device.FileExists.return_value = True self.cpu_temp = cpu_temperature.CpuTemperature(self.mock_device) self.cpu_temp.InitThermalDeviceInformation() class CpuTemperatureInitTest(unittest.TestCase): @mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock()) def testInitWithDeviceUtil(self): d = mock.Mock(spec=device_utils.DeviceUtils) d.build_product = 'blueline' c = cpu_temperature.CpuTemperature(d) self.assertEqual(d, c.GetDeviceForTesting()) def testInitWithMissing_fails(self): with self.assertRaises(TypeError): cpu_temperature.CpuTemperature(None) with self.assertRaises(TypeError): cpu_temperature.CpuTemperature('') class CpuTemperatureGetThermalDeviceInformationTest(CpuTemperatureTest): @mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock()) def testGetThermalDeviceInformation_noneWhenIncorrectLabel(self): invalid_device = mock.Mock(spec=device_utils.DeviceUtils) invalid_device.build_product = 'invalid_name' c = cpu_temperature.CpuTemperature(invalid_device) c.InitThermalDeviceInformation() self.assertEqual(c.GetDeviceInfoForTesting(), None) def testGetThermalDeviceInformation_getsCorrectInformation(self): correct_information = { 'cpu0': '/sys/class/thermal/thermal_zone11/temp', 'cpu1': '/sys/class/thermal/thermal_zone12/temp', 'cpu2': '/sys/class/thermal/thermal_zone13/temp', 'cpu3': '/sys/class/thermal/thermal_zone14/temp', 'cpu4': '/sys/class/thermal/thermal_zone15/temp', 'cpu5': '/sys/class/thermal/thermal_zone16/temp', 'cpu6': '/sys/class/thermal/thermal_zone17/temp', 'cpu7': '/sys/class/thermal/thermal_zone18/temp' } self.assertEqual( cmp(correct_information, self.cpu_temp.GetDeviceInfoForTesting().get('cpu_temps')), 0) class CpuTemperatureIsSupportedTest(CpuTemperatureTest): @mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock()) def testIsSupported_returnsTrue(self): d = mock.Mock(spec=device_utils.DeviceUtils) d.build_product = 'blueline' d.FileExists.return_value = True c = cpu_temperature.CpuTemperature(d) self.assertTrue(c.IsSupported()) @mock.patch('devil.android.perf.perf_control.PerfControl', mock.Mock()) def testIsSupported_returnsFalse(self): d = mock.Mock(spec=device_utils.DeviceUtils) d.build_product = 'blueline' d.FileExists.return_value = False c = cpu_temperature.CpuTemperature(d) self.assertFalse(c.IsSupported()) class CpuTemperatureLetCpuCoolToTemperatureTest(CpuTemperatureTest): # Return values for the mock side effect cooling_down0 = ( [45000 for _ in range(8)] + [43000 for _ in range(8)] + [41000 for _ in range(8)]) @mock.patch('time.sleep', mock.Mock()) def testLetBatteryCoolToTemperature_coolWithin24Calls(self): self.mock_device.ReadFile = mock.Mock(side_effect=self.cooling_down0) self.cpu_temp.LetCpuCoolToTemperature(42) self.mock_device.ReadFile.assert_called() self.assertEquals(self.mock_device.ReadFile.call_count, 24) cooling_down1 = [45000 for _ in range(8)] + [41000 for _ in range(16)] @mock.patch('time.sleep', mock.Mock()) def testLetBatteryCoolToTemperature_coolWithin16Calls(self): self.mock_device.ReadFile = mock.Mock(side_effect=self.cooling_down1) self.cpu_temp.LetCpuCoolToTemperature(42) self.mock_device.ReadFile.assert_called() self.assertEquals(self.mock_device.ReadFile.call_count, 16) constant_temp = [45000 for _ in range(40)] @mock.patch('time.sleep', mock.Mock())<|fim▁hole|> self.mock_device.ReadFile.assert_called() self.assertEquals(self.mock_device.ReadFile.call_count, 24) if __name__ == '__main__': logging.getLogger().setLevel(logging.DEBUG) unittest.main(verbosity=2)<|fim▁end|>
def testLetBatteryCoolToTemperature_timeoutAfterThree(self): self.mock_device.ReadFile = mock.Mock(side_effect=self.constant_temp) self.cpu_temp.LetCpuCoolToTemperature(42)
<|file_name|>VideoQuestion.test.js<|end_file_name|><|fim▁begin|>import React from 'react'; import renderer from 'react-test-renderer'; import VideoQuestion from './VideoQuestion'; jest.mock('expo', () => ({ Video: 'Video' })); <|fim▁hole|> question="Wer ist eine Ananas?" answers={[ 'Ich bin eine Ananas', 'Du bist eine Ananas', 'Wir sind eine Ananas' ]} /> ); expect(tree).toMatchSnapshot(); });<|fim▁end|>
it('renders without crashing', () => { const tree = renderer.create( <VideoQuestion video={require('../../assets/videos/placeholder.mp4')}
<|file_name|>EjerMetods18.java<|end_file_name|><|fim▁begin|>package sergio; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.Calendar; import java.util.GregorianCalendar; //Ejercicio Metodos 18 //Realiza una clase llamada milibreria que contenga al menos cinco de los metodos realizados. //Usalas de 3 formas diferentes //Autor: Sergio Tobal //Fecha: 12-02-2012 public class EjerMetods18 { /** * @param args * @throws IOException * @throws NumberFormatException */ public static void main(String[] args) throws NumberFormatException, IOException { BufferedReader lectura = new BufferedReader(new InputStreamReader(System.in)); int numsend=10,edad; char nombre; boolean nombreceive; String msgname = null; System.out.println("Dame tu inicial:"); nombre=lectura.readLine().charAt(0); nombreceive=EsMayus(nombre); if (nombreceive==true) { msgname="MAYUSCULAS"; } else if (nombreceive==false) { msgname="minusculas"; } EsPerfecto(numsend); System.out.println("Tu primer numero perfecto es "+numsend+" porque tienes "+(edad=ObtenerEdad())+" años, y tu inicial esta escrita en "+msgname); } private static boolean EsPerfecto(int numsend) { int perfect=0; for (int i = 1; i < numsend; i++) { if (numsend % i == 0) { perfect += i; } } if (perfect == numsend) { return true; }else { return false; } } private static int ObtenerEdad()throws NumberFormatException, IOException{ BufferedReader lectura = new BufferedReader(new InputStreamReader(System.in)); int year,month,day; System.out.println("Dime tu dia de nacimiento: "); day=Integer.parseInt(lectura.readLine());<|fim▁hole|> year=Integer.parseInt(lectura.readLine()); Calendar cal = new GregorianCalendar(year, month, day); Calendar now = new GregorianCalendar(); int edad = now.get(Calendar.YEAR) - cal.get(Calendar.YEAR); if ((cal.get(Calendar.MONTH) > now.get(Calendar.MONTH)) || (cal.get(Calendar.MONTH) == now.get(Calendar.MONTH) && cal.get(Calendar.DAY_OF_MONTH) > now.get(Calendar.DAY_OF_MONTH))) { edad--; } return edad; } private static int Factorial(int num) { int factorial=1; // Va multiplicando el numero del usuario por 1 hasta que el numero llega ha cero y retorna // la multiplicacion de todos los numeros while (num!=0) { factorial=factorial*num; num--; } return factorial; } private static boolean ValFecha(int day, int month) { if ((day>=1 && day<=31) && (month>=1 && month<=12)) { return true; }else { return false; } } private static boolean EsMayus(char nombre) { boolean opt=true; // La funcion isUpperCase comprueba que el contenido de num sea mayuscula if (Character.isUpperCase(nombre) == true) { opt=true; // La funcion isLowerCase comprueba que el contenido de num sea minuscula } else if(Character.isLowerCase(nombre) == true){ opt=false; } return opt; } }<|fim▁end|>
System.out.println("Dime tu mes de nacimiento: "); month=Integer.parseInt(lectura.readLine()); System.out.println("Dime tu año de nacimiento: ");
<|file_name|>routes.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2017 Michael Krotscheck * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * * See the License for the specific language governing permissions and * limitations under the License. */ import { RouterModule } from '@angular/router'; import { ModuleWithProviders } from '@angular/core'; import { LoginComponent } from './login.component';<|fim▁hole|>/** * All routes used by the login module. * * @author Michael Krotscheck */ export const ROUTES: ModuleWithProviders = RouterModule.forChild([ { path: 'login', component: LoginComponent, canActivate: [RequireLoggedOutGuard], } ]);<|fim▁end|>
import { RequireLoggedOutGuard } from './require-logged-out.guard';
<|file_name|>network_proxy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # # Electrum - lightweight Bitcoin client # Copyright (C) 2014 Thomas Voegtlin # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import socket import time import sys import os import threading import traceback import json import Queue import util from network import Network from util import print_error, print_stderr, parse_json from simple_config import SimpleConfig from daemon import NetworkServer, DAEMON_PORT class NetworkProxy(threading.Thread): def __init__(self, socket, config=None): if config is None: config = {} # Do not use mutables as default arguments! threading.Thread.__init__(self) self.config = SimpleConfig(config) if type(config) == type({}) else config self.message_id = 0 self.unanswered_requests = {} self.subscriptions = {} self.debug = False self.lock = threading.Lock() self.pending_transactions_for_notifications = [] self.callbacks = {} self.running = True self.daemon = True if socket: self.pipe = util.SocketPipe(socket) self.network = None else: self.network = Network(config) self.pipe = util.QueuePipe(send_queue=self.network.requests_queue) self.network.start(self.pipe.get_queue) for key in ['status','banner','updated','servers','interfaces']: value = self.network.get_status_value(key) self.pipe.get_queue.put({'method':'network.status', 'params':[key, value]}) # status variables self.status = 'connecting' self.servers = {} self.banner = '' self.blockchain_height = 0 self.server_height = 0 self.interfaces = [] def is_running(self): return self.running def run(self): while self.is_running(): try: response = self.pipe.get() except util.timeout: continue if response is None: break self.process(response) self.trigger_callback('stop') if self.network: self.network.stop() print_error("NetworkProxy: terminating") def process(self, response): if self.debug: print_error("<--", response) if response.get('method') == 'network.status': key, value = response.get('params') if key == 'status': self.status = value elif key == 'banner': self.banner = value elif key == 'updated': self.blockchain_height, self.server_height = value elif key == 'servers': self.servers = value elif key == 'interfaces': self.interfaces = value self.trigger_callback(key) return msg_id = response.get('id') result = response.get('result') error = response.get('error') if msg_id is not None: with self.lock: method, params, callback = self.unanswered_requests.pop(msg_id) else: method = response.get('method') params = response.get('params') with self.lock: for k,v in self.subscriptions.items(): if (method, params) in v: callback = k break else: print_error( "received unexpected notification", method, params) return <|fim▁hole|> callback(r) def send(self, messages, callback): """return the ids of the requests that we sent""" # detect subscriptions sub = [] for message in messages: m, v = message if m[-10:] == '.subscribe': sub.append(message) if sub: with self.lock: if self.subscriptions.get(callback) is None: self.subscriptions[callback] = [] for message in sub: if message not in self.subscriptions[callback]: self.subscriptions[callback].append(message) with self.lock: requests = [] ids = [] for m in messages: method, params = m request = { 'id':self.message_id, 'method':method, 'params':params } self.unanswered_requests[self.message_id] = method, params, callback ids.append(self.message_id) requests.append(request) if self.debug: print_error("-->", request) self.message_id += 1 self.pipe.send_all(requests) return ids def synchronous_get(self, requests, timeout=100000000): queue = Queue.Queue() ids = self.send(requests, queue.put) id2 = ids[:] res = {} while ids: r = queue.get(True, timeout) _id = r.get('id') ids.remove(_id) if r.get('error'): return BaseException(r.get('error')) result = r.get('result') res[_id] = r.get('result') out = [] for _id in id2: out.append(res[_id]) return out def get_servers(self): return self.servers def get_interfaces(self): return self.interfaces def get_header(self, height): return self.synchronous_get([('network.get_header',[height])])[0] def get_local_height(self): return self.blockchain_height def get_server_height(self): return self.server_height def is_connected(self): return self.status == 'connected' def is_connecting(self): return self.status == 'connecting' def is_up_to_date(self): return self.unanswered_requests == {} def get_parameters(self): return self.synchronous_get([('network.get_parameters',[])])[0] def set_parameters(self, *args): return self.synchronous_get([('network.set_parameters',args)])[0] def stop(self): self.running = False def stop_daemon(self): return self.send([('daemon.stop',[])], None) def register_callback(self, event, callback): with self.lock: if not self.callbacks.get(event): self.callbacks[event] = [] self.callbacks[event].append(callback) def trigger_callback(self, event): with self.lock: callbacks = self.callbacks.get(event,[])[:] if callbacks: [callback() for callback in callbacks]<|fim▁end|>
r = {'method':method, 'params':params, 'result':result, 'id':msg_id, 'error':error}
<|file_name|>kanbanery.js<|end_file_name|><|fim▁begin|>/*jslint indent: 2 */ /*global $: false, document: false, togglbutton: false*/ 'use strict'; togglbutton.render('section.task-actions:not(.toggl)', {observe: true}, function (elem) { var link, linkAction = document.createElement("LI"), taskTitle = $("p.task-title"), firstAction = $('.task-actions ul li:first-child', elem), actionList = firstAction.parentNode; link = togglbutton.createTimerLink({ className: 'kanbanery', description: taskTitle.textContent }); linkAction.appendChild(link);<|fim▁hole|> actionList.insertBefore(linkAction, firstAction); });<|fim▁end|>
<|file_name|>TileEntityRepositry.java<|end_file_name|><|fim▁begin|>package com.acgmodcrew.kip.tileentity; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.inventory.IInventory; import net.minecraft.item.ItemStack; import net.minecraft.tileentity.TileEntity; /** * Created by Malec on 05/03/2015. */ public class TileEntityRepositry extends TileEntity implements IInventory { private ItemStack[] inventory = new ItemStack[4]; @Override public int getSizeInventory() { return 4; } @Override public ItemStack getStackInSlot(int slot) { return inventory[slot]; } @Override public ItemStack decrStackSize(int slot, int p_70298_2_) { if (this.inventory[slot] != null) { ItemStack itemstack; if (this.inventory[slot].stackSize <= p_70298_2_) { itemstack = this.inventory[slot]; this.inventory[slot] = null; this.markDirty();<|fim▁hole|> itemstack = this.inventory[slot].splitStack(p_70298_2_); if (this.inventory[slot].stackSize == 0) { this.inventory[slot] = null; } this.markDirty(); return itemstack; } } else { return null; } } @Override public ItemStack getStackInSlotOnClosing(int p_70304_1_) { return null; } @Override public void setInventorySlotContents(int slot, ItemStack itemStack) { inventory[slot] = itemStack; } @Override public String getInventoryName() { return "Repository"; } @Override public boolean hasCustomInventoryName() { return false; } @Override public int getInventoryStackLimit() { return 1; } @Override public boolean isUseableByPlayer(EntityPlayer entityplayer) { if (worldObj == null) { return true; } if (worldObj.getTileEntity(xCoord, yCoord, zCoord) != this) { return false; } return entityplayer.getDistanceSq((double) xCoord + 0.5D, (double) yCoord + 0.5D, (double) zCoord + 0.5D) <= 64D; } @Override public void openInventory() { } @Override public void closeInventory() { } @Override public boolean isItemValidForSlot(int p_94041_1_, ItemStack p_94041_2_) { return false; } }<|fim▁end|>
return itemstack; } else {
<|file_name|>travel_time_step_cost_estimator.cpp<|end_file_name|><|fim▁begin|>#include <vigir_footstep_planning_default_plugins/step_cost_estimators/travel_time_step_cost_estimator.h> namespace vigir_footstep_planning { TravelTimeStepCostEstimator::TravelTimeStepCostEstimator() : StepCostEstimatorPlugin("travel_time_cost_estimator") { } bool TravelTimeStepCostEstimator::loadParams(const vigir_generic_params::ParameterSet& params) { if (!StepCostEstimatorPlugin::loadParams(params)) return false; params.getParam("travel_time_cost_estimator/sway/parabol_a", a_sway_inv, 0.0); a_sway_inv = 1.0/a_sway_inv; params.getParam("travel_time_cost_estimator/sway/parabol_b", b_sway_inv, 0.0); b_sway_inv = 1.0/b_sway_inv; params.getParam("travel_time_cost_estimator/sway/const_time", const_sway_time, 0.0); params.getParam("travel_time_cost_estimator/swing/parabol_a", a_swing_inv, 0.0); a_swing_inv = 1.0/a_swing_inv; params.getParam("travel_time_cost_estimator/swing/parabol_b", b_swing_inv, 0.0);<|fim▁hole|> return true; } bool TravelTimeStepCostEstimator::getCost(const State& left_foot, const State& right_foot, const State& swing_foot, double& cost, double& cost_multiplier, double& risk, double& risk_multiplier) const { cost_multiplier = 1.0; risk = 0.0; risk_multiplier = 1.0; const State& stand_foot = swing_foot.getLeg() == LEFT ? right_foot : left_foot; const State& swing_foot_before = swing_foot.getLeg() == LEFT ? left_foot : right_foot; tf::Transform sway = swing_foot_before.getPose().inverse() * stand_foot.getPose(); tf::Transform swing = swing_foot_before.getPose().inverse() * swing_foot.getPose(); double sway_duration = parabol(sway.getOrigin().x(), sway.getOrigin().y(), a_sway_inv, b_sway_inv) + const_sway_time; double step_duration = parabol(swing.getOrigin().x(), swing.getOrigin().y(), a_swing_inv, b_swing_inv) + const_swing_time; cost = sway_duration + step_duration; return true; } } #include <pluginlib/class_list_macros.h> PLUGINLIB_EXPORT_CLASS(vigir_footstep_planning::TravelTimeStepCostEstimator, vigir_footstep_planning::StepCostEstimatorPlugin)<|fim▁end|>
b_swing_inv = 1.0/b_swing_inv; params.getParam("travel_time_cost_estimator/swing/const_time", const_swing_time, 0.0);
<|file_name|>output_merger.rs<|end_file_name|><|fim▁begin|>//! Output Merger (OM) stage description. //! The final stage in a pipeline that creates pixel colors from //! the input shader results, depth/stencil information, etc. use crate::pso::{graphics::StencilValue, State}; /// A pixel-wise comparison function. #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum Comparison { /// `false` Never = 0, /// `x < y` Less = 1, /// `x == y` Equal = 2, /// `x <= y` LessEqual = 3, /// `x > y` Greater = 4, /// `x != y` NotEqual = 5, /// `x >= y` GreaterEqual = 6, /// `true` Always = 7, } bitflags!( /// Target output color mask. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct ColorMask: u8 { /// Red mask const RED = 0x1; /// Green mask const GREEN = 0x2; /// Blue mask const BLUE = 0x4; /// Alpha channel mask const ALPHA = 0x8; /// Mask for RGB channels const COLOR = 0x7; /// Mask all channels const ALL = 0xF; /// Mask no channels. const NONE = 0x0; } ); impl Default for ColorMask { fn default() -> Self { Self::ALL } } /// Defines the possible blending factors. /// During blending, the source or destination fragment may be /// multiplied by a factor to produce the final result. #[allow(missing_docs)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum Factor { Zero = 0, One = 1, SrcColor = 2, OneMinusSrcColor = 3, DstColor = 4, OneMinusDstColor = 5, SrcAlpha = 6, OneMinusSrcAlpha = 7, DstAlpha = 8, OneMinusDstAlpha = 9, ConstColor = 10, OneMinusConstColor = 11, ConstAlpha = 12, OneMinusConstAlpha = 13, SrcAlphaSaturate = 14, Src1Color = 15, OneMinusSrc1Color = 16, Src1Alpha = 17, OneMinusSrc1Alpha = 18, } /// Blending operations. #[allow(missing_docs)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum BlendOp { /// Adds source and destination. /// Source and destination are multiplied by factors before addition. Add { src: Factor, dst: Factor }, /// Subtracts destination from source. /// Source and destination are multiplied by factors before subtraction. Sub { src: Factor, dst: Factor }, /// Subtracts source from destination. /// Source and destination are multiplied by factors before subtraction. RevSub { src: Factor, dst: Factor }, /// Component-wise minimum value of source and destination. Min, /// Component-wise maximum value of source and destination. Max, } impl BlendOp { /// Replace the destination value with the source. pub const REPLACE: Self = BlendOp::Add { src: Factor::One, dst: Factor::Zero, }; /// Add the source and destination together. pub const ADD: Self = BlendOp::Add { src: Factor::One, dst: Factor::One, }; /// Alpha blend the source and destination together. pub const ALPHA: Self = BlendOp::Add { src: Factor::SrcAlpha, dst: Factor::OneMinusSrcAlpha, }; /// Alpha blend a premultiplied-alpha source with the destination. pub const PREMULTIPLIED_ALPHA: Self = BlendOp::Add { src: Factor::One, dst: Factor::OneMinusSrcAlpha, }; } /// Specifies whether to use blending, and if so, /// which operations to use for color and alpha channels. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct BlendState { /// The blend operation to use for the color channels. pub color: BlendOp, /// The blend operation to use for the alpha channel. pub alpha: BlendOp, } impl BlendState { /// Replace the color. pub const REPLACE: Self = BlendState { color: BlendOp::REPLACE, alpha: BlendOp::REPLACE, }; /// Additive blending. pub const ADD: Self = BlendState { color: BlendOp::ADD, alpha: BlendOp::ADD, }; /// Multiplicative blending. pub const MULTIPLY: Self = BlendState { color: BlendOp::Add { src: Factor::Zero, dst: Factor::SrcColor, }, alpha: BlendOp::Add { src: Factor::Zero, dst: Factor::SrcAlpha, }, }; /// Alpha blending. pub const ALPHA: Self = BlendState { color: BlendOp::ALPHA, alpha: BlendOp::PREMULTIPLIED_ALPHA, }; /// Pre-multiplied alpha blending. pub const PREMULTIPLIED_ALPHA: Self = BlendState { color: BlendOp::PREMULTIPLIED_ALPHA, alpha: BlendOp::PREMULTIPLIED_ALPHA, }; } /// PSO color target descriptor. #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct ColorBlendDesc { /// Color write mask. pub mask: ColorMask, /// Blend state, if enabled. pub blend: Option<BlendState>, } impl ColorBlendDesc { /// Empty blend descriptor just writes out the color without blending. // this can be used because `Default::default()` isn't a const function... pub const EMPTY: Self = ColorBlendDesc { mask: ColorMask::ALL, blend: None, }; } /// Depth test state. #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct DepthTest { /// Comparison function to use. pub fun: Comparison, /// Specify whether to write to the depth buffer or not. pub write: bool, } impl DepthTest { /// A depth test that always fails. pub const FAIL: Self = DepthTest { fun: Comparison::Never, write: false, }; /// A depth test that always succeeds but doesn't /// write to the depth buffer // DOC TODO: Not a terribly helpful description there... pub const PASS_TEST: Self = DepthTest { fun: Comparison::Always, write: false, }; /// A depth test that always succeeds and writes its result /// to the depth buffer. pub const PASS_WRITE: Self = DepthTest { fun: Comparison::Always, write: true, }; } /// The operation to use for stencil masking. #[repr(u8)] #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub enum StencilOp { /// Keep the current value in the stencil buffer (no change). Keep = 0,<|fim▁hole|> /// Increment the stencil buffer value, clamping to its maximum value. IncrementClamp = 3, /// Decrement the stencil buffer value, clamping to its minimum value. DecrementClamp = 4, /// Bitwise invert the current value in the stencil buffer. Invert = 5, /// Increment the stencil buffer value, wrapping around to 0 on overflow. IncrementWrap = 6, /// Decrement the stencil buffer value, wrapping around to the maximum value on overflow. DecrementWrap = 7, } /// Complete stencil state for a given side of a face. #[derive(Clone, Copy, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct StencilFace { /// Comparison function to use to determine if the stencil test passes. pub fun: Comparison, /// What operation to do if the stencil test fails. pub op_fail: StencilOp, /// What operation to do if the stencil test passes but the depth test fails. pub op_depth_fail: StencilOp, /// What operation to do if both the depth and stencil test pass. pub op_pass: StencilOp, } impl Default for StencilFace { fn default() -> StencilFace { StencilFace { fun: Comparison::Never, op_fail: StencilOp::Keep, op_depth_fail: StencilOp::Keep, op_pass: StencilOp::Keep, } } } /// A generic struct holding the properties of two sides of a polygon. #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Sided<T> { /// Information about the front face. pub front: T, /// Information about the back face. pub back: T, } impl<T: Copy> Sided<T> { /// Create a new `Sided` structure with both `front` and `back` holding /// the same value. pub fn new(value: T) -> Self { Sided { front: value, back: value, } } } /// Pair of stencil values that could be either baked into a graphics pipeline /// or provided dynamically. pub type StencilValues = State<Sided<StencilValue>>; /// Defines a stencil test. Stencil testing is an operation /// performed to cull fragments; /// the new fragment is tested against the value held in the /// stencil buffer, and if the test fails the fragment is /// discarded. #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct StencilTest { /// Operations for stencil faces. pub faces: Sided<StencilFace>, /// Masks that are ANDd with both the stencil buffer value and the reference value when they /// are read before doing the stencil test. pub read_masks: StencilValues, /// Mask that are ANDd with the stencil value before writing to the stencil buffer. pub write_masks: StencilValues, /// The reference values used for stencil tests. pub reference_values: StencilValues, } impl Default for StencilTest { fn default() -> Self { StencilTest { faces: Sided::default(), read_masks: State::Static(Sided::new(!0)), write_masks: State::Static(Sided::new(!0)), reference_values: State::Static(Sided::new(0)), } } } /// PSO depth-stencil target descriptor. #[derive(Clone, Copy, Debug, Default, Eq, Hash, PartialEq)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct DepthStencilDesc { /// Optional depth testing/writing. pub depth: Option<DepthTest>, /// Enable depth bounds testing. pub depth_bounds: bool, /// Stencil test/write. pub stencil: Option<StencilTest>, } impl DepthStencilDesc { /// Returns true if the descriptor assumes the depth attachment. pub fn uses_depth(&self) -> bool { self.depth.is_some() || self.depth_bounds } /// Returns true if the descriptor assumes the stencil attachment. pub fn uses_stencil(&self) -> bool { self.stencil.is_some() } } bitflags!( /// Face. #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Face: u32 { /// Empty face. TODO: remove when constexpr are stabilized to use empty() const NONE = 0x0; /// Front face. const FRONT = 0x1; /// Back face. const BACK = 0x2; } );<|fim▁end|>
/// Set the value in the stencil buffer to zero. Zero = 1, /// Set the stencil buffer value to `reference` from `StencilFace`. Replace = 2,
<|file_name|>login.py<|end_file_name|><|fim▁begin|>from handlers import Handler from models import User # Login handler class Login(Handler): def get(self): self.render('login-form.html') def post(self): username = self.request.get('username')<|fim▁hole|> u = User.login(username, password) if u: self.login(u) self.redirect('/blog') else: msg = 'Invalid login' self.render('login-form.html', error=msg)<|fim▁end|>
password = self.request.get('password')
<|file_name|>chacha.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The ChaCha random number generator. use core::prelude::*; use core::num::Int; use {Rng, SeedableRng, Rand}; const KEY_WORDS : uint = 8; // 8 words for the 256-bit key const STATE_WORDS : uint = 16; const CHACHA_ROUNDS: uint = 20; // Cryptographically secure from 8 upwards as of this writing /// A random number generator that uses the ChaCha20 algorithm [1]. /// /// The ChaCha algorithm is widely accepted as suitable for /// cryptographic purposes, but this implementation has not been /// verified as such. Prefer a generator like `OsRng` that defers to /// the operating system for cases that need high security. /// /// [1]: D. J. Bernstein, [*ChaCha, a variant of /// Salsa20*](http://cr.yp.to/chacha.html) #[derive(Copy, Clone)] pub struct ChaChaRng { buffer: [u32; STATE_WORDS], // Internal buffer of output state: [u32; STATE_WORDS], // Initial state index: uint, // Index into state } static EMPTY: ChaChaRng = ChaChaRng { buffer: [0; STATE_WORDS], state: [0; STATE_WORDS], index: STATE_WORDS }; macro_rules! quarter_round{ ($a: expr, $b: expr, $c: expr, $d: expr) => {{ $a += $b; $d ^= $a; $d = $d.rotate_left(16); $c += $d; $b ^= $c; $b = $b.rotate_left(12); $a += $b; $d ^= $a; $d = $d.rotate_left( 8); $c += $d; $b ^= $c; $b = $b.rotate_left( 7); }} } macro_rules! double_round{ ($x: expr) => {{ // Column round quarter_round!($x[ 0], $x[ 4], $x[ 8], $x[12]); quarter_round!($x[ 1], $x[ 5], $x[ 9], $x[13]); quarter_round!($x[ 2], $x[ 6], $x[10], $x[14]); quarter_round!($x[ 3], $x[ 7], $x[11], $x[15]); // Diagonal round quarter_round!($x[ 0], $x[ 5], $x[10], $x[15]); quarter_round!($x[ 1], $x[ 6], $x[11], $x[12]); quarter_round!($x[ 2], $x[ 7], $x[ 8], $x[13]); quarter_round!($x[ 3], $x[ 4], $x[ 9], $x[14]); }} } #[inline] fn core(output: &mut [u32; STATE_WORDS], input: &[u32; STATE_WORDS]) { *output = *input; for _ in 0..CHACHA_ROUNDS / 2 { double_round!(output); } for i in 0..STATE_WORDS { output[i] += input[i]; } } impl ChaChaRng { /// Create an ChaCha random number generator using the default /// fixed key of 8 zero words. pub fn new_unseeded() -> ChaChaRng { let mut rng = EMPTY; rng.init(&[0; KEY_WORDS]); rng } /// Sets the internal 128-bit ChaCha counter to /// a user-provided value. This permits jumping /// arbitrarily ahead (or backwards) in the pseudorandom stream. /// /// Since the nonce words are used to extend the counter to 128 bits, /// users wishing to obtain the conventional ChaCha pseudorandom stream /// associated with a particular nonce can call this function with /// arguments `0, desired_nonce`. pub fn set_counter(&mut self, counter_low: u64, counter_high: u64) { self.state[12] = (counter_low >> 0) as u32; self.state[13] = (counter_low >> 32) as u32; self.state[14] = (counter_high >> 0) as u32; self.state[15] = (counter_high >> 32) as u32; self.index = STATE_WORDS; // force recomputation } /// Initializes `self.state` with the appropriate key and constants /// /// We deviate slightly from the ChaCha specification regarding /// the nonce, which is used to extend the counter to 128 bits. /// This is provably as strong as the original cipher, though, /// since any distinguishing attack on our variant also works /// against ChaCha with a chosen-nonce. See the XSalsa20 [1] /// security proof for a more involved example of this. /// /// The modified word layout is: /// ```text /// constant constant constant constant /// key key key key /// key key key key /// counter counter counter counter /// ``` /// [1]: Daniel J. Bernstein. [*Extending the Salsa20 /// nonce.*](http://cr.yp.to/papers.html#xsalsa) fn init(&mut self, key: &[u32; KEY_WORDS]) { self.state[0] = 0x61707865; self.state[1] = 0x3320646E; self.state[2] = 0x79622D32; self.state[3] = 0x6B206574; for i in 0..KEY_WORDS { self.state[4+i] = key[i]; } self.state[12] = 0; self.state[13] = 0; self.state[14] = 0; self.state[15] = 0; self.index = STATE_WORDS; } /// Refill the internal output buffer (`self.buffer`) fn update(&mut self) { core(&mut self.buffer, &self.state); self.index = 0; // update 128-bit counter self.state[12] += 1; if self.state[12] != 0 { return }; self.state[13] += 1; if self.state[13] != 0 { return }; self.state[14] += 1; if self.state[14] != 0 { return }; self.state[15] += 1; } } impl Rng for ChaChaRng { #[inline] fn next_u32(&mut self) -> u32 { if self.index == STATE_WORDS { self.update(); } let value = self.buffer[self.index % STATE_WORDS]; self.index += 1; value } } impl<'a> SeedableRng<&'a [u32]> for ChaChaRng { fn reseed(&mut self, seed: &'a [u32]) { // reset state self.init(&[0u32; KEY_WORDS]); // set key in place let key = &mut self.state[4 .. 4+KEY_WORDS]; for (k, s) in key.iter_mut().zip(seed.iter()) { *k = *s; } } /// Create a ChaCha generator from a seed, /// obtained from a variable-length u32 array. /// Only up to 8 words are used; if less than 8 /// words are used, the remaining are set to zero. fn from_seed(seed: &'a [u32]) -> ChaChaRng { let mut rng = EMPTY; rng.reseed(seed); rng } } impl Rand for ChaChaRng { fn rand<R: Rng>(other: &mut R) -> ChaChaRng { let mut key : [u32; KEY_WORDS] = [0; KEY_WORDS]; for word in &mut key { *word = other.gen(); } SeedableRng::from_seed(key.as_slice()) } } #[cfg(test)] mod test { use std::prelude::v1::*; use core::iter::order; use {Rng, SeedableRng};<|fim▁hole|> let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>(); let mut ra: ChaChaRng = SeedableRng::from_seed(&*s); let mut rb: ChaChaRng = SeedableRng::from_seed(&*s); assert!(order::equals(ra.gen_ascii_chars().take(100), rb.gen_ascii_chars().take(100))); } #[test] fn test_rng_seeded() { let seed : &[_] = &[0,1,2,3,4,5,6,7]; let mut ra: ChaChaRng = SeedableRng::from_seed(seed); let mut rb: ChaChaRng = SeedableRng::from_seed(seed); assert!(order::equals(ra.gen_ascii_chars().take(100), rb.gen_ascii_chars().take(100))); } #[test] fn test_rng_reseed() { let s = ::test::rng().gen_iter::<u32>().take(8).collect::<Vec<u32>>(); let mut r: ChaChaRng = SeedableRng::from_seed(&*s); let string1: String = r.gen_ascii_chars().take(100).collect(); r.reseed(&s); let string2: String = r.gen_ascii_chars().take(100).collect(); assert_eq!(string1, string2); } #[test] fn test_rng_true_values() { // Test vectors 1 and 2 from // http://tools.ietf.org/html/draft-nir-cfrg-chacha20-poly1305-04 let seed : &[_] = &[0u32; 8]; let mut ra: ChaChaRng = SeedableRng::from_seed(seed); let v = (0..16).map(|_| ra.next_u32()).collect::<Vec<_>>(); assert_eq!(v, vec!(0xade0b876, 0x903df1a0, 0xe56a5d40, 0x28bd8653, 0xb819d2bd, 0x1aed8da0, 0xccef36a8, 0xc70d778b, 0x7c5941da, 0x8d485751, 0x3fe02477, 0x374ad8b8, 0xf4b8436a, 0x1ca11815, 0x69b687c3, 0x8665eeb2)); let v = (0..16).map(|_| ra.next_u32()).collect::<Vec<_>>(); assert_eq!(v, vec!(0xbee7079f, 0x7a385155, 0x7c97ba98, 0x0d082d73, 0xa0290fcb, 0x6965e348, 0x3e53c612, 0xed7aee32, 0x7621b729, 0x434ee69c, 0xb03371d5, 0xd539d874, 0x281fed31, 0x45fb0a51, 0x1f0ae1ac, 0x6f4d794b)); let seed : &[_] = &[0,1,2,3,4,5,6,7]; let mut ra: ChaChaRng = SeedableRng::from_seed(seed); // Store the 17*i-th 32-bit word, // i.e., the i-th word of the i-th 16-word block let mut v : Vec<u32> = Vec::new(); for _ in 0..16 { v.push(ra.next_u32()); for _ in 0..16 { ra.next_u32(); } } assert_eq!(v, vec!(0xf225c81a, 0x6ab1be57, 0x04d42951, 0x70858036, 0x49884684, 0x64efec72, 0x4be2d186, 0x3615b384, 0x11cfa18e, 0xd3c50049, 0x75c775f6, 0x434c6530, 0x2c5bad8f, 0x898881dc, 0x5f1c86d9, 0xc1f8e7f4)); } #[test] fn test_rng_clone() { let seed : &[_] = &[0u32; 8]; let mut rng: ChaChaRng = SeedableRng::from_seed(seed); let mut clone = rng.clone(); for _ in 0..16 { assert_eq!(rng.next_u64(), clone.next_u64()); } } }<|fim▁end|>
use super::ChaChaRng; #[test] fn test_rng_rand_seeded() {
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python import os import sys extra_opts = {'test_suite': 'tests'} extra_deps = [] extra_test_deps = [] if sys.version_info[:2] == (2, 6): extra_deps.append('argparse') extra_deps.append('simplejson') extra_test_deps.append('unittest2') extra_opts['test_suite'] = 'unittest2.collector' try: from setuptools import setup, find_packages except ImportError: from ez_setup import use_setuptools use_setuptools() from setuptools import setup, find_packages try: with open('README.rst', 'r') as fd: extra_opts['long_description'] = fd.read() except IOError: pass # Install without README.rst setup( name='mongo-orchestration', version='0.4.dev0',<|fim▁hole|> author_email='[email protected]', description='Restful service for managing MongoDB servers', keywords=['mongo-orchestration', 'mongodb', 'mongo', 'rest', 'testing'], license="http://www.apache.org/licenses/LICENSE-2.0.html", platforms=['any'], url='https://github.com/10gen/mongo-orchestration', install_requires=['pymongo>=3.0.2', 'bottle>=0.12.7', 'CherryPy>=3.5.0'] + extra_deps, tests_require=['coverage>=3.5'] + extra_test_deps, packages=find_packages(exclude=('tests',)), package_data={ 'mongo_orchestration': [ os.path.join('configurations', config_dir, '*.json') for config_dir in ('servers', 'replica_sets', 'sharded_clusters') ] + [os.path.join('lib', 'client.pem')] }, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: Implementation :: CPython" ], entry_points={ 'console_scripts': [ 'mongo-orchestration = mongo_orchestration.server:main' ] }, **extra_opts )<|fim▁end|>
author='MongoDB, Inc.',
<|file_name|>if_expression.cc<|end_file_name|><|fim▁begin|>#include "if_expression.h" #include <glog/logging.h> #include "../internal/compilation.h" #include "../public/value.h" namespace afc { namespace vm { namespace { class IfExpression : public Expression { public: IfExpression(std::shared_ptr<Expression> cond, std::shared_ptr<Expression> true_case, std::shared_ptr<Expression> false_case, std::unordered_set<VMType> return_types) : cond_(std::move(cond)), true_case_(std::move(true_case)), false_case_(std::move(false_case)), return_types_(std::move(return_types)) { CHECK(cond_ != nullptr); CHECK(cond_->IsBool()); CHECK(true_case_ != nullptr); CHECK(false_case_ != nullptr); } std::vector<VMType> Types() override { return true_case_->Types(); } std::unordered_set<VMType> ReturnTypes() const override { return return_types_; } PurityType purity() override { return cond_->purity() == PurityType::kPure && true_case_->purity() == PurityType::kPure && false_case_->purity() == PurityType::kPure ? PurityType::kPure : PurityType::kUnknown; } futures::Value<EvaluationOutput> Evaluate(Trampoline* trampoline, const VMType& type) override { return trampoline->Bounce(cond_.get(), VMType::Bool()) .Transform([type, true_case = true_case_, false_case = false_case_, trampoline](EvaluationOutput cond_output) { switch (cond_output.type) { case EvaluationOutput::OutputType::kReturn: case EvaluationOutput::OutputType::kAbort: return futures::Past(std::move(cond_output)); case EvaluationOutput::OutputType::kContinue: return trampoline->Bounce(cond_output.value->boolean ? true_case.get() : false_case.get(), type); } auto error = afc::editor::Error(L"Unhandled OutputType case."); LOG(FATAL) << error; return futures::Past(EvaluationOutput::Abort(error)); }); } std::unique_ptr<Expression> Clone() override { return std::make_unique<IfExpression>(cond_, true_case_, false_case_, return_types_); } private: const std::shared_ptr<Expression> cond_; const std::shared_ptr<Expression> true_case_; const std::shared_ptr<Expression> false_case_; const std::unordered_set<VMType> return_types_; }; } // namespace std::unique_ptr<Expression> NewIfExpression( Compilation* compilation, std::unique_ptr<Expression> condition, std::unique_ptr<Expression> true_case, std::unique_ptr<Expression> false_case) { if (condition == nullptr || true_case == nullptr || false_case == nullptr) { return nullptr; } if (!condition->IsBool()) { compilation->errors.push_back( L"Expected bool value for condition of \"if\" expression but found " + TypesToString(condition->Types()) + L"."); return nullptr; } if (!(true_case->Types() == false_case->Types())) { compilation->errors.push_back( L"Type mismatch between branches of conditional expression: " + TypesToString(true_case->Types()) + L" and " + TypesToString(false_case->Types()) + L"."); return nullptr; } std::wstring error; auto return_types = CombineReturnTypes(true_case->ReturnTypes(), false_case->ReturnTypes(), &error); if (!return_types.has_value()) { compilation->errors.push_back(error); return nullptr; }<|fim▁hole|> return std::make_unique<IfExpression>( std::move(condition), std::move(true_case), std::move(false_case), std::move(return_types.value())); } } // namespace vm } // namespace afc<|fim▁end|>
<|file_name|>publishconf.py<|end_file_name|><|fim▁begin|><|fim▁hole|> # This file is only used if you use `make publish` or # explicitly specify it as your config file. import os import sys sys.path.append(os.curdir) from pelicanconf import * SITEURL = 'http://rcarneva.github.io' RELATIVE_URLS = False FEED_ALL_ATOM = 'feeds/all.atom.xml' CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml' DELETE_OUTPUT_DIRECTORY = True # Following items are often useful when publishing #DISQUS_SITENAME = "" #GOOGLE_ANALYTICS = ""<|fim▁end|>
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals
<|file_name|>matches.rs<|end_file_name|><|fim▁begin|>use clippy_utils::consts::{constant, constant_full_int, miri_to_const, FullInt}; use clippy_utils::diagnostics::{ multispan_sugg, span_lint_and_help, span_lint_and_note, span_lint_and_sugg, span_lint_and_then, }; use clippy_utils::higher; use clippy_utils::source::{expr_block, indent_of, snippet, snippet_block, snippet_opt, snippet_with_applicability}; use clippy_utils::sugg::Sugg; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item, match_type, peel_mid_ty_refs}; use clippy_utils::visitors::is_local_used; use clippy_utils::{ get_parent_expr, in_macro, is_expn_of, is_lang_ctor, is_lint_allowed, is_refutable, is_unit_expr, is_wild, meets_msrv, msrvs, path_to_local, path_to_local_id, peel_hir_pat_refs, peel_n_hir_expr_refs, recurse_or_patterns, remove_blocks, strip_pat_refs, }; use clippy_utils::{paths, search_same, SpanlessEq, SpanlessHash}; use core::array; use core::iter::{once, ExactSizeIterator}; use if_chain::if_chain; use rustc_ast::ast::{Attribute, LitKind}; use rustc_errors::Applicability; use rustc_hir::def::{CtorKind, DefKind, Res}; use rustc_hir::LangItem::{OptionNone, OptionSome}; use rustc_hir::{ self as hir, Arm, BindingAnnotation, Block, BorrowKind, Expr, ExprKind, Guard, HirId, Local, MatchSource, Mutability, Node, Pat, PatKind, PathSegment, QPath, RangeEnd, TyKind, }; use rustc_hir::{HirIdMap, HirIdSet}; use rustc_lint::{LateContext, LateLintPass, LintContext}; use rustc_middle::lint::in_external_macro; use rustc_middle::ty::{self, Ty, TyS, VariantDef}; use rustc_semver::RustcVersion; use rustc_session::{declare_tool_lint, impl_lint_pass}; use rustc_span::source_map::{Span, Spanned}; use rustc_span::sym; use std::cmp::Ordering; use std::collections::hash_map::Entry; use std::iter; use std::ops::Bound; declare_clippy_lint! { /// ### What it does /// Checks for matches with a single arm where an `if let` /// will usually suffice. /// /// ### Why is this bad? /// Just readability – `if let` nests less than a `match`. /// /// ### Example /// ```rust /// # fn bar(stool: &str) {} /// # let x = Some("abc"); /// // Bad /// match x { /// Some(ref foo) => bar(foo), /// _ => (), /// } /// /// // Good /// if let Some(ref foo) = x { /// bar(foo); /// } /// ``` pub SINGLE_MATCH, style, "a `match` statement with a single nontrivial arm (i.e., where the other arm is `_ => {}`) instead of `if let`" } declare_clippy_lint! { /// ### What it does /// Checks for matches with two arms where an `if let else` will /// usually suffice. /// /// ### Why is this bad? /// Just readability – `if let` nests less than a `match`. /// /// ### Known problems /// Personal style preferences may differ. /// /// ### Example /// Using `match`: /// /// ```rust /// # fn bar(foo: &usize) {} /// # let other_ref: usize = 1; /// # let x: Option<&usize> = Some(&1); /// match x { /// Some(ref foo) => bar(foo), /// _ => bar(&other_ref), /// } /// ``` /// /// Using `if let` with `else`: /// /// ```rust /// # fn bar(foo: &usize) {} /// # let other_ref: usize = 1; /// # let x: Option<&usize> = Some(&1); /// if let Some(ref foo) = x { /// bar(foo); /// } else { /// bar(&other_ref); /// } /// ``` pub SINGLE_MATCH_ELSE, pedantic, "a `match` statement with two arms where the second arm's pattern is a placeholder instead of a specific match pattern" } declare_clippy_lint! { /// ### What it does /// Checks for matches where all arms match a reference, /// suggesting to remove the reference and deref the matched expression /// instead. It also checks for `if let &foo = bar` blocks. /// /// ### Why is this bad? /// It just makes the code less readable. That reference /// destructuring adds nothing to the code. /// /// ### Example /// ```rust,ignore /// // Bad /// match x { /// &A(ref y) => foo(y), /// &B => bar(), /// _ => frob(&x), /// } /// /// // Good /// match *x { /// A(ref y) => foo(y), /// B => bar(), /// _ => frob(x), /// } /// ``` pub MATCH_REF_PATS, style, "a `match` or `if let` with all arms prefixed with `&` instead of deref-ing the match expression" } declare_clippy_lint! { /// ### What it does /// Checks for matches where match expression is a `bool`. It /// suggests to replace the expression with an `if...else` block. /// /// ### Why is this bad? /// It makes the code less readable. /// /// ### Example /// ```rust /// # fn foo() {} /// # fn bar() {} /// let condition: bool = true; /// match condition { /// true => foo(), /// false => bar(), /// } /// ``` /// Use if/else instead: /// ```rust /// # fn foo() {} /// # fn bar() {} /// let condition: bool = true; /// if condition { /// foo(); /// } else { /// bar(); /// } /// ``` pub MATCH_BOOL, pedantic, "a `match` on a boolean expression instead of an `if..else` block" } declare_clippy_lint! { /// ### What it does /// Checks for overlapping match arms. /// /// ### Why is this bad? /// It is likely to be an error and if not, makes the code /// less obvious. /// /// ### Example /// ```rust /// let x = 5; /// match x { /// 1..=10 => println!("1 ... 10"), /// 5..=15 => println!("5 ... 15"), /// _ => (), /// } /// ``` pub MATCH_OVERLAPPING_ARM, style, "a `match` with overlapping arms" } declare_clippy_lint! { /// ### What it does /// Checks for arm which matches all errors with `Err(_)` /// and take drastic actions like `panic!`. /// /// ### Why is this bad? /// It is generally a bad practice, similar to /// catching all exceptions in java with `catch(Exception)` /// /// ### Example /// ```rust /// let x: Result<i32, &str> = Ok(3); /// match x { /// Ok(_) => println!("ok"), /// Err(_) => panic!("err"), /// } /// ``` pub MATCH_WILD_ERR_ARM, pedantic, "a `match` with `Err(_)` arm and take drastic actions" } declare_clippy_lint! { /// ### What it does /// Checks for match which is used to add a reference to an /// `Option` value. /// /// ### Why is this bad? /// Using `as_ref()` or `as_mut()` instead is shorter. /// /// ### Example /// ```rust /// let x: Option<()> = None; /// /// // Bad /// let r: Option<&()> = match x { /// None => None, /// Some(ref v) => Some(v), /// }; /// /// // Good /// let r: Option<&()> = x.as_ref(); /// ``` pub MATCH_AS_REF, complexity, "a `match` on an Option value instead of using `as_ref()` or `as_mut`" } declare_clippy_lint! { /// ### What it does /// Checks for wildcard enum matches using `_`. /// /// ### Why is this bad? /// New enum variants added by library updates can be missed. /// /// ### Known problems /// Suggested replacements may be incorrect if guards exhaustively cover some /// variants, and also may not use correct path to enum if it's not present in the current scope. /// /// ### Example /// ```rust /// # enum Foo { A(usize), B(usize) } /// # let x = Foo::B(1); /// // Bad /// match x { /// Foo::A(_) => {}, /// _ => {}, /// } /// /// // Good /// match x { /// Foo::A(_) => {}, /// Foo::B(_) => {}, /// } /// ``` pub WILDCARD_ENUM_MATCH_ARM, restriction, "a wildcard enum match arm using `_`" } declare_clippy_lint! { /// ### What it does /// Checks for wildcard enum matches for a single variant. /// /// ### Why is this bad? /// New enum variants added by library updates can be missed. /// /// ### Known problems /// Suggested replacements may not use correct path to enum /// if it's not present in the current scope. /// /// ### Example /// ```rust /// # enum Foo { A, B, C } /// # let x = Foo::B; /// // Bad /// match x { /// Foo::A => {}, /// Foo::B => {}, /// _ => {}, /// } /// /// // Good /// match x { /// Foo::A => {}, /// Foo::B => {}, /// Foo::C => {}, /// } /// ``` pub MATCH_WILDCARD_FOR_SINGLE_VARIANTS, pedantic, "a wildcard enum match for a single variant" } declare_clippy_lint! { /// ### What it does /// Checks for wildcard pattern used with others patterns in same match arm. /// /// ### Why is this bad? /// Wildcard pattern already covers any other pattern as it will match anyway. /// It makes the code less readable, especially to spot wildcard pattern use in match arm. /// /// ### Example /// ```rust /// // Bad /// match "foo" { /// "a" => {}, /// "bar" | _ => {}, /// } /// /// // Good /// match "foo" { /// "a" => {}, /// _ => {}, /// } /// ``` pub WILDCARD_IN_OR_PATTERNS, complexity, "a wildcard pattern used with others patterns in same match arm" } declare_clippy_lint! { /// ### What it does /// Checks for matches being used to destructure a single-variant enum /// or tuple struct where a `let` will suffice. /// /// ### Why is this bad? /// Just readability – `let` doesn't nest, whereas a `match` does. /// /// ### Example /// ```rust /// enum Wrapper { /// Data(i32), /// } /// /// let wrapper = Wrapper::Data(42); /// /// let data = match wrapper { /// Wrapper::Data(i) => i, /// }; /// ``` /// /// The correct use would be: /// ```rust /// enum Wrapper { /// Data(i32), /// } /// /// let wrapper = Wrapper::Data(42); /// let Wrapper::Data(data) = wrapper; /// ``` pub INFALLIBLE_DESTRUCTURING_MATCH, style, "a `match` statement with a single infallible arm instead of a `let`" } declare_clippy_lint! { /// ### What it does /// Checks for useless match that binds to only one value. /// /// ### Why is this bad? /// Readability and needless complexity. /// /// ### Known problems /// Suggested replacements may be incorrect when `match` /// is actually binding temporary value, bringing a 'dropped while borrowed' error. /// /// ### Example /// ```rust /// # let a = 1; /// # let b = 2; /// /// // Bad /// match (a, b) { /// (c, d) => { /// // useless match /// } /// } /// /// // Good /// let (c, d) = (a, b); /// ``` pub MATCH_SINGLE_BINDING, complexity, "a match with a single binding instead of using `let` statement" } declare_clippy_lint! { /// ### What it does /// Checks for unnecessary '..' pattern binding on struct when all fields are explicitly matched. /// /// ### Why is this bad? /// Correctness and readability. It's like having a wildcard pattern after /// matching all enum variants explicitly. /// /// ### Example /// ```rust /// # struct A { a: i32 } /// let a = A { a: 5 }; /// /// // Bad /// match a { /// A { a: 5, .. } => {}, /// _ => {}, /// } /// /// // Good /// match a { /// A { a: 5 } => {}, /// _ => {}, /// } /// ``` pub REST_PAT_IN_FULLY_BOUND_STRUCTS, restriction, "a match on a struct that binds all fields but still uses the wildcard pattern" } declare_clippy_lint! { /// ### What it does /// Lint for redundant pattern matching over `Result`, `Option`, /// `std::task::Poll` or `std::net::IpAddr` /// /// ### Why is this bad? /// It's more concise and clear to just use the proper /// utility function /// /// ### Known problems /// This will change the drop order for the matched type. Both `if let` and /// `while let` will drop the value at the end of the block, both `if` and `while` will drop the /// value before entering the block. For most types this change will not matter, but for a few /// types this will not be an acceptable change (e.g. locks). See the /// [reference](https://doc.rust-lang.org/reference/destructors.html#drop-scopes) for more about /// drop order. /// /// ### Example /// ```rust /// # use std::task::Poll; /// # use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// if let Ok(_) = Ok::<i32, i32>(42) {} /// if let Err(_) = Err::<i32, i32>(42) {} /// if let None = None::<()> {} /// if let Some(_) = Some(42) {} /// if let Poll::Pending = Poll::Pending::<()> {} /// if let Poll::Ready(_) = Poll::Ready(42) {} /// if let IpAddr::V4(_) = IpAddr::V4(Ipv4Addr::LOCALHOST) {} /// if let IpAddr::V6(_) = IpAddr::V6(Ipv6Addr::LOCALHOST) {} /// match Ok::<i32, i32>(42) { /// Ok(_) => true, /// Err(_) => false, /// }; /// ``` /// /// The more idiomatic use would be: /// /// ```rust /// # use std::task::Poll; /// # use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; /// if Ok::<i32, i32>(42).is_ok() {} /// if Err::<i32, i32>(42).is_err() {} /// if None::<()>.is_none() {} /// if Some(42).is_some() {} /// if Poll::Pending::<()>.is_pending() {} /// if Poll::Ready(42).is_ready() {} /// if IpAddr::V4(Ipv4Addr::LOCALHOST).is_ipv4() {} /// if IpAddr::V6(Ipv6Addr::LOCALHOST).is_ipv6() {} /// Ok::<i32, i32>(42).is_ok(); /// ``` pub REDUNDANT_PATTERN_MATCHING, style, "use the proper utility function avoiding an `if let`" } declare_clippy_lint! { /// ### What it does /// Checks for `match` or `if let` expressions producing a /// `bool` that could be written using `matches!` /// /// ### Why is this bad? /// Readability and needless complexity. /// /// ### Known problems /// This lint falsely triggers, if there are arms with /// `cfg` attributes that remove an arm evaluating to `false`. /// /// ### Example /// ```rust /// let x = Some(5); /// /// // Bad /// let a = match x { /// Some(0) => true, /// _ => false, /// }; /// /// let a = if let Some(0) = x { /// true /// } else { /// false /// }; /// /// // Good /// let a = matches!(x, Some(0)); /// ``` pub MATCH_LIKE_MATCHES_MACRO, style, "a match that could be written with the matches! macro" } declare_clippy_lint! { /// ### What it does /// Checks for `match` with identical arm bodies. /// /// ### Why is this bad? /// This is probably a copy & paste error. If arm bodies /// are the same on purpose, you can factor them /// [using `|`](https://doc.rust-lang.org/book/patterns.html#multiple-patterns). /// /// ### Known problems /// False positive possible with order dependent `match` /// (see issue /// [#860](https://github.com/rust-lang/rust-clippy/issues/860)). /// /// ### Example /// ```rust,ignore /// match foo { /// Bar => bar(), /// Quz => quz(), /// Baz => bar(), // <= oops /// } /// ``` /// /// This should probably be /// ```rust,ignore /// match foo { /// Bar => bar(), /// Quz => quz(), /// Baz => baz(), // <= fixed /// } /// ``` /// /// or if the original code was not a typo: /// ```rust,ignore /// match foo { /// Bar | Baz => bar(), // <= shows the intent better /// Quz => quz(), /// } /// ``` pub MATCH_SAME_ARMS, pedantic, "`match` with identical arm bodies" } #[derive(Default)] pub struct Matches { msrv: Option<RustcVersion>, infallible_destructuring_match_linted: bool, } impl Matches { #[must_use] pub fn new(msrv: Option<RustcVersion>) -> Self { Self { msrv, ..Matches::default() } } } impl_lint_pass!(Matches => [ SINGLE_MATCH, MATCH_REF_PATS, MATCH_BOOL, SINGLE_MATCH_ELSE, MATCH_OVERLAPPING_ARM, MATCH_WILD_ERR_ARM, MATCH_AS_REF, WILDCARD_ENUM_MATCH_ARM, MATCH_WILDCARD_FOR_SINGLE_VARIANTS, WILDCARD_IN_OR_PATTERNS, MATCH_SINGLE_BINDING, INFALLIBLE_DESTRUCTURING_MATCH, REST_PAT_IN_FULLY_BOUND_STRUCTS, REDUNDANT_PATTERN_MATCHING, MATCH_LIKE_MATCHES_MACRO, MATCH_SAME_ARMS, ]); impl<'tcx> LateLintPass<'tcx> for Matches { fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if in_external_macro(cx.sess(), expr.span) || in_macro(expr.span) { return; } redundant_pattern_match::check(cx, expr); if meets_msrv(self.msrv.as_ref(), &msrvs::MATCHES_MACRO) { if !check_match_like_matches(cx, expr) { lint_match_arms(cx, expr); } } else { lint_match_arms(cx, expr); } if let ExprKind::Match(ex, arms, MatchSource::Normal) = expr.kind { check_single_match(cx, ex, arms, expr); check_match_bool(cx, ex, arms, expr); check_overlapping_arms(cx, ex, arms); check_wild_err_arm(cx, ex, arms); check_wild_enum_match(cx, ex, arms); check_match_as_ref(cx, ex, arms, expr); check_wild_in_or_pats(cx, arms); if self.infallible_destructuring_match_linted { self.infallible_destructuring_match_linted = false; } else { check_match_single_binding(cx, ex, arms, expr); } } if let ExprKind::Match(ex, arms, _) = expr.kind { check_match_ref_pats(cx, ex, arms.iter().map(|el| el.pat), expr); } if let Some(higher::IfLet { let_pat, let_expr, .. }) = higher::IfLet::hir(cx, expr) { check_match_ref_pats(cx, let_expr, once(let_pat), expr); } } fn check_local(&mut self, cx: &LateContext<'tcx>, local: &'tcx Local<'_>) { if_chain! { if !in_external_macro(cx.sess(), local.span); if !in_macro(local.span); if let Some(expr) = local.init; if let ExprKind::Match(target, arms, MatchSource::Normal) = expr.kind; if arms.len() == 1 && arms[0].guard.is_none(); if let PatKind::TupleStruct( QPath::Resolved(None, variant_name), args, _) = arms[0].pat.kind; if args.len() == 1; if let PatKind::Binding(_, arg, ..) = strip_pat_refs(&args[0]).kind; let body = remove_blocks(arms[0].body); if path_to_local_id(body, arg); then { let mut applicability = Applicability::MachineApplicable; self.infallible_destructuring_match_linted = true; span_lint_and_sugg( cx, INFALLIBLE_DESTRUCTURING_MATCH, local.span, "you seem to be trying to use `match` to destructure a single infallible pattern. \ Consider using `let`", "try this", format!( "let {}({}) = {};", snippet_with_applicability(cx, variant_name.span, "..", &mut applicability), snippet_with_applicability(cx, local.pat.span, "..", &mut applicability), snippet_with_applicability(cx, target.span, "..", &mut applicability), ), applicability, ); } } } fn check_pat(&mut self, cx: &LateContext<'tcx>, pat: &'tcx Pat<'_>) { if_chain! { if !in_external_macro(cx.sess(), pat.span); if !in_macro(pat.span); if let PatKind::Struct(QPath::Resolved(_, path), fields, true) = pat.kind; if let Some(def_id) = path.res.opt_def_id(); let ty = cx.tcx.type_of(def_id); if let ty::Adt(def, _) = ty.kind(); if def.is_struct() || def.is_union(); if fields.len() == def.non_enum_variant().fields.len(); then { span_lint_and_help( cx, REST_PAT_IN_FULLY_BOUND_STRUCTS, pat.span, "unnecessary use of `..` pattern in struct binding. All fields were already bound", None, "consider removing `..` from this binding", ); } } } extract_msrv_attr!(LateContext); } #[rustfmt::skip] fn check_single_match(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) { if arms.len() == 2 && arms[0].guard.is_none() && arms[1].guard.is_none() { if in_macro(expr.span) { // Don't lint match expressions present in // macro_rules! block return; } if let PatKind::Or(..) = arms[0].pat.kind { // don't lint for or patterns for now, this makes // the lint noisy in unnecessary situations return; } let els = arms[1].body; let els = if is_unit_expr(remove_blocks(els)) { None } else if let ExprKind::Block(Block { stmts, expr: block_expr, .. }, _) = els.kind { if stmts.len() == 1 && block_expr.is_none() || stmts.is_empty() && block_expr.is_some() { // single statement/expr "else" block, don't lint return; } // block with 2+ statements or 1 expr and 1+ statement Some(els) } else { // not a block, don't lint return; }; let ty = cx.typeck_results().expr_ty(ex); if *ty.kind() != ty::Bool || is_lint_allowed(cx, MATCH_BOOL, ex.hir_id) { check_single_match_single_pattern(cx, ex, arms, expr, els); check_single_match_opt_like(cx, ex, arms, expr, ty, els); } } } fn check_single_match_single_pattern( cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>, els: Option<&Expr<'_>>, ) { if is_wild(arms[1].pat) { report_single_match_single_pattern(cx, ex, arms, expr, els); } } fn report_single_match_single_pattern( cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>, els: Option<&Expr<'_>>, ) { let lint = if els.is_some() { SINGLE_MATCH_ELSE } else { SINGLE_MATCH }; let els_str = els.map_or(String::new(), |els| { format!(" else {}", expr_block(cx, els, None, "..", Some(expr.span))) }); let (pat, pat_ref_count) = peel_hir_pat_refs(arms[0].pat); let (msg, sugg) = if_chain! { if let PatKind::Path(_) | PatKind::Lit(_) = pat.kind; let (ty, ty_ref_count) = peel_mid_ty_refs(cx.typeck_results().expr_ty(ex)); if let Some(spe_trait_id) = cx.tcx.lang_items().structural_peq_trait(); if let Some(pe_trait_id) = cx.tcx.lang_items().eq_trait(); if ty.is_integral() || ty.is_char() || ty.is_str() || (implements_trait(cx, ty, spe_trait_id, &[]) && implements_trait(cx, ty, pe_trait_id, &[ty.into()])); then { // scrutinee derives PartialEq and the pattern is a constant. let pat_ref_count = match pat.kind { // string literals are already a reference. PatKind::Lit(Expr { kind: ExprKind::Lit(lit), .. }) if lit.node.is_str() => pat_ref_count + 1, _ => pat_ref_count, }; // References are only implicitly added to the pattern, so no overflow here. // e.g. will work: match &Some(_) { Some(_) => () } // will not: match Some(_) { &Some(_) => () } let ref_count_diff = ty_ref_count - pat_ref_count; // Try to remove address of expressions first. let (ex, removed) = peel_n_hir_expr_refs(ex, ref_count_diff); let ref_count_diff = ref_count_diff - removed; let msg = "you seem to be trying to use `match` for an equality check. Consider using `if`"; let sugg = format!( "if {} == {}{} {}{}", snippet(cx, ex.span, ".."), // PartialEq for different reference counts may not exist. "&".repeat(ref_count_diff), snippet(cx, arms[0].pat.span, ".."), expr_block(cx, arms[0].body, None, "..", Some(expr.span)), els_str, ); (msg, sugg) } else { let msg = "you seem to be trying to use `match` for destructuring a single pattern. Consider using `if let`"; let sugg = format!( "if let {} = {} {}{}", snippet(cx, arms[0].pat.span, ".."), snippet(cx, ex.span, ".."), expr_block(cx, arms[0].body, None, "..", Some(expr.span)), els_str, ); (msg, sugg) } }; span_lint_and_sugg( cx, lint, expr.span, msg, "try this", sugg, Applicability::HasPlaceholders, ); } fn check_single_match_opt_like( cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>, ty: Ty<'_>, els: Option<&Expr<'_>>, ) { // list of candidate `Enum`s we know will never get any more members let candidates = &[ (&paths::COW, "Borrowed"), (&paths::COW, "Cow::Borrowed"), (&paths::COW, "Cow::Owned"), (&paths::COW, "Owned"), (&paths::OPTION, "None"), (&paths::RESULT, "Err"), (&paths::RESULT, "Ok"), ]; let path = match arms[1].pat.kind { PatKind::TupleStruct(ref path, inner, _) => { // Contains any non wildcard patterns (e.g., `Err(err)`)? if !inner.iter().all(is_wild) { return; } rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_qpath(path, false)) }, PatKind::Binding(BindingAnnotation::Unannotated, .., ident, None) => ident.to_string(), PatKind::Path(ref path) => { rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_qpath(path, false)) }, _ => return, }; for &(ty_path, pat_path) in candidates { if path == *pat_path && match_type(cx, ty, ty_path) { report_single_match_single_pattern(cx, ex, arms, expr, els); } } } fn check_match_bool(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) { // Type of expression is `bool`. if *cx.typeck_results().expr_ty(ex).kind() == ty::Bool { span_lint_and_then( cx, MATCH_BOOL, expr.span, "you seem to be trying to match on a boolean expression", move |diag| { if arms.len() == 2 { // no guards let exprs = if let PatKind::Lit(arm_bool) = arms[0].pat.kind { if let ExprKind::Lit(ref lit) = arm_bool.kind { match lit.node { LitKind::Bool(true) => Some((&*arms[0].body, &*arms[1].body)), LitKind::Bool(false) => Some((&*arms[1].body, &*arms[0].body)), _ => None, } } else { None } } else { None }; if let Some((true_expr, false_expr)) = exprs { let sugg = match (is_unit_expr(true_expr), is_unit_expr(false_expr)) { (false, false) => Some(format!( "if {} {} else {}", snippet(cx, ex.span, "b"), expr_block(cx, true_expr, None, "..", Some(expr.span)), expr_block(cx, false_expr, None, "..", Some(expr.span)) )), (false, true) => Some(format!( "if {} {}", snippet(cx, ex.span, "b"), expr_block(cx, true_expr, None, "..", Some(expr.span)) )), (true, false) => { let test = Sugg::hir(cx, ex, ".."); Some(format!( "if {} {}", !test, expr_block(cx, false_expr, None, "..", Some(expr.span)) )) }, (true, true) => None, }; if let Some(sugg) = sugg { diag.span_suggestion( expr.span, "consider using an `if`/`else` expression", sugg, Applicability::HasPlaceholders, ); } } } }, ); } } fn check_overlapping_arms<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) { if arms.len() >= 2 && cx.typeck_results().expr_ty(ex).is_integral() { let ranges = all_ranges(cx, arms, cx.typeck_results().expr_ty(ex)); if !ranges.is_empty() { if let Some((start, end)) = overlapping(&ranges) { span_lint_and_note( cx, MATCH_OVERLAPPING_ARM, start.span, "some ranges overlap", Some(end.span), "overlaps with this", ); } } } } fn check_wild_err_arm<'tcx>(cx: &LateContext<'tcx>, ex: &Expr<'tcx>, arms: &[Arm<'tcx>]) { let ex_ty = cx.typeck_results().expr_ty(ex).peel_refs(); if is_type_diagnostic_item(cx, ex_ty, sym::Result) { for arm in arms { if let PatKind::TupleStruct(ref path, inner, _) = arm.pat.kind { let path_str = rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_qpath(path, false)); if path_str == "Err" { let mut matching_wild = inner.iter().any(is_wild); let mut ident_bind_name = String::from("_"); if !matching_wild { // Looking for unused bindings (i.e.: `_e`) for pat in inner.iter() { if let PatKind::Binding(_, id, ident, None) = pat.kind { if ident.as_str().starts_with('_') && !is_local_used(cx, arm.body, id) { ident_bind_name = (&ident.name.as_str()).to_string(); matching_wild = true; } } } } if_chain! { if matching_wild; if is_panic_call(arm.body); then { // `Err(_)` or `Err(_e)` arm with `panic!` found span_lint_and_note(cx, MATCH_WILD_ERR_ARM, arm.pat.span, &format!("`Err({})` matches all errors", &ident_bind_name), None, "match each error separately or use the error output, or use `.except(msg)` if the error case is unreachable", ); } } } } } } } enum CommonPrefixSearcher<'a> { None, Path(&'a [PathSegment<'a>]), Mixed, } impl CommonPrefixSearcher<'a> { fn with_path(&mut self, path: &'a [PathSegment<'a>]) { match path { [path @ .., _] => self.with_prefix(path), [] => (), } } fn with_prefix(&mut self, path: &'a [PathSegment<'a>]) { match self { Self::None => *self = Self::Path(path), Self::Path(self_path) if path .iter() .map(|p| p.ident.name) .eq(self_path.iter().map(|p| p.ident.name)) => {}, Self::Path(_) => *self = Self::Mixed, Self::Mixed => (), } } } fn is_hidden(cx: &LateContext<'_>, variant_def: &VariantDef) -> bool { let attrs = cx.tcx.get_attrs(variant_def.def_id); clippy_utils::attrs::is_doc_hidden(attrs) || clippy_utils::attrs::is_unstable(attrs) } #[allow(clippy::too_many_lines)] fn check_wild_enum_match(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) { let ty = cx.typeck_results().expr_ty(ex).peel_refs(); let adt_def = match ty.kind() { ty::Adt(adt_def, _) if adt_def.is_enum() && !(is_type_diagnostic_item(cx, ty, sym::Option) || is_type_diagnostic_item(cx, ty, sym::Result)) => { adt_def }, _ => return, }; // First pass - check for violation, but don't do much book-keeping because this is hopefully // the uncommon case, and the book-keeping is slightly expensive. let mut wildcard_span = None; let mut wildcard_ident = None; let mut has_non_wild = false; for arm in arms { match peel_hir_pat_refs(arm.pat).0.kind { PatKind::Wild => wildcard_span = Some(arm.pat.span), PatKind::Binding(_, _, ident, None) => { wildcard_span = Some(arm.pat.span); wildcard_ident = Some(ident); }, _ => has_non_wild = true, } } let wildcard_span = match wildcard_span { Some(x) if has_non_wild => x, _ => return, }; // Accumulate the variants which should be put in place of the wildcard because they're not // already covered. let has_hidden = adt_def.variants.iter().any(|x| is_hidden(cx, x)); let mut missing_variants: Vec<_> = adt_def.variants.iter().filter(|x| !is_hidden(cx, x)).collect(); let mut path_prefix = CommonPrefixSearcher::None; for arm in arms { // Guards mean that this case probably isn't exhaustively covered. Technically // this is incorrect, as we should really check whether each variant is exhaustively // covered by the set of guards that cover it, but that's really hard to do. recurse_or_patterns(arm.pat, |pat| { let path = match &peel_hir_pat_refs(pat).0.kind { PatKind::Path(path) => { #[allow(clippy::match_same_arms)] let id = match cx.qpath_res(path, pat.hir_id) { Res::Def( DefKind::Const | DefKind::ConstParam | DefKind::AnonConst | DefKind::InlineConst, _, ) => return, Res::Def(_, id) => id, _ => return, }; if arm.guard.is_none() { missing_variants.retain(|e| e.ctor_def_id != Some(id)); } path }, PatKind::TupleStruct(path, patterns, ..) => { if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() { if arm.guard.is_none() && patterns.iter().all(|p| !is_refutable(cx, p)) { missing_variants.retain(|e| e.ctor_def_id != Some(id)); } } path }, PatKind::Struct(path, patterns, ..) => { if let Some(id) = cx.qpath_res(path, pat.hir_id).opt_def_id() { if arm.guard.is_none() && patterns.iter().all(|p| !is_refutable(cx, p.pat)) { missing_variants.retain(|e| e.def_id != id); } } path }, _ => return, }; match path { QPath::Resolved(_, path) => path_prefix.with_path(path.segments), QPath::TypeRelative( hir::Ty { kind: TyKind::Path(QPath::Resolved(_, path)), .. }, _, ) => path_prefix.with_prefix(path.segments), _ => (), } }); } let format_suggestion = |variant: &VariantDef| { format!( "{}{}{}{}", if let Some(ident) = wildcard_ident { format!("{} @ ", ident.name) } else { String::new() }, if let CommonPrefixSearcher::Path(path_prefix) = path_prefix { let mut s = String::new(); for seg in path_prefix { s.push_str(&seg.ident.as_str()); s.push_str("::"); } s } else { let mut s = cx.tcx.def_path_str(adt_def.did); s.push_str("::"); s }, variant.ident.name, match variant.ctor_kind { CtorKind::Fn if variant.fields.len() == 1 => "(_)", CtorKind::Fn => "(..)", CtorKind::Const => "", CtorKind::Fictive => "{ .. }", } ) }; match missing_variants.as_slice() { [] => (), [x] if !adt_def.is_variant_list_non_exhaustive() && !has_hidden => span_lint_and_sugg( cx, MATCH_WILDCARD_FOR_SINGLE_VARIANTS, wildcard_span, "wildcard matches only a single variant and will also match any future added variants", "try this", format_suggestion(x), Applicability::MaybeIncorrect, ), variants => { let mut suggestions: Vec<_> = variants.iter().copied().map(format_suggestion).collect(); let message = if adt_def.is_variant_list_non_exhaustive() || has_hidden { suggestions.push("_".into()); "wildcard matches known variants and will also match future added variants" } else { "wildcard match will also match any future added variants" }; span_lint_and_sugg( cx, WILDCARD_ENUM_MATCH_ARM, wildcard_span, message, "try this", suggestions.join(" | "), Applicability::MaybeIncorrect, ); }, }; } // If the block contains only a `panic!` macro (as expression or statement) fn is_panic_call(expr: &Expr<'_>) -> bool { // Unwrap any wrapping blocks let span = if let ExprKind::Block(block, _) = expr.kind { match (&block.expr, block.stmts.len(), block.stmts.first()) { (&Some(exp), 0, _) => exp.span, (&None, 1, Some(stmt)) => stmt.span, _ => return false, } } else { expr.span }; is_expn_of(span, "panic").is_some() && is_expn_of(span, "unreachable").is_none() } fn check_match_ref_pats<'a, 'b, I>(cx: &LateContext<'_>, ex: &Expr<'_>, pats: I, expr: &Expr<'_>) where 'b: 'a, I: Clone + Iterator<Item = &'a Pat<'b>>, { if !has_multiple_ref_pats(pats.clone()) { return; } let (first_sugg, msg, title); let span = ex.span.source_callsite(); if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, inner) = ex.kind { first_sugg = once((span, Sugg::hir_with_macro_callsite(cx, inner, "..").to_string())); msg = "try"; title = "you don't need to add `&` to both the expression and the patterns"; } else { first_sugg = once((span, Sugg::hir_with_macro_callsite(cx, ex, "..").deref().to_string())); msg = "instead of prefixing all patterns with `&`, you can dereference the expression"; title = "you don't need to add `&` to all patterns"; } let remaining_suggs = pats.filter_map(|pat| { if let PatKind::Ref(refp, _) = pat.kind { Some((pat.span, snippet(cx, refp.span, "..").to_string())) } else { None } }); span_lint_and_then(cx, MATCH_REF_PATS, expr.span, title, |diag| { if !expr.span.from_expansion() { multispan_sugg(diag, msg, first_sugg.chain(remaining_suggs)); } }); } fn check_match_as_ref(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) { if arms.len() == 2 && arms[0].guard.is_none() && arms[1].guard.is_none() { let arm_ref: Option<BindingAnnotation> = if is_none_arm(cx, &arms[0]) { is_ref_some_arm(cx, &arms[1]) } else if is_none_arm(cx, &arms[1]) { is_ref_some_arm(cx, &arms[0]) } else { None }; if let Some(rb) = arm_ref { let suggestion = if rb == BindingAnnotation::Ref { "as_ref" } else { "as_mut" }; let output_ty = cx.typeck_results().expr_ty(expr); let input_ty = cx.typeck_results().expr_ty(ex); let cast = if_chain! { if let ty::Adt(_, substs) = input_ty.kind(); let input_ty = substs.type_at(0); if let ty::Adt(_, substs) = output_ty.kind(); let output_ty = substs.type_at(0); if let ty::Ref(_, output_ty, _) = *output_ty.kind(); if input_ty != output_ty; then { ".map(|x| x as _)" } else { "" } }; let mut applicability = Applicability::MachineApplicable; span_lint_and_sugg( cx, MATCH_AS_REF, expr.span, &format!("use `{}()` instead", suggestion), "try this", format!( "{}.{}(){}", snippet_with_applicability(cx, ex.span, "_", &mut applicability), suggestion, cast, ), applicability, ); } } } fn check_wild_in_or_pats(cx: &LateContext<'_>, arms: &[Arm<'_>]) { for arm in arms { if let PatKind::Or(fields) = arm.pat.kind { // look for multiple fields in this arm that contains at least one Wild pattern if fields.len() > 1 && fields.iter().any(is_wild) { span_lint_and_help( cx, WILDCARD_IN_OR_PATTERNS, arm.pat.span, "wildcard pattern covers any other pattern as it will match anyway", None, "consider handling `_` separately", ); } } } } /// Lint a `match` or `if let .. { .. } else { .. }` expr that could be replaced by `matches!` fn check_match_like_matches<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool { if let Some(higher::IfLet { let_pat, let_expr, if_then, if_else: Some(if_else), }) = higher::IfLet::hir(cx, expr) { return find_matches_sugg( cx, let_expr, array::IntoIter::new([(&[][..], Some(let_pat), if_then, None), (&[][..], None, if_else, None)]), expr, true, ); } if let ExprKind::Match(scrut, arms, MatchSource::Normal) = expr.kind { return find_matches_sugg( cx, scrut, arms.iter().map(|arm| { ( cx.tcx.hir().attrs(arm.hir_id), Some(arm.pat), arm.body, arm.guard.as_ref(), ) }), expr, false, ); } false } /// Lint a `match` or `if let` for replacement by `matches!` fn find_matches_sugg<'a, 'b, I>( cx: &LateContext<'_>, ex: &Expr<'_>, mut iter: I, expr: &Expr<'_>, is_if_let: bool, ) -> bool where 'b: 'a, I: Clone + DoubleEndedIterator + ExactSizeIterator + Iterator< Item = ( &'a [Attribute], Option<&'a Pat<'b>>, &'a Expr<'b>, Option<&'a Guard<'b>>, ), >, { if_chain! { if iter.len() >= 2; if cx.typeck_results().expr_ty(expr).is_bool(); if let Some((_, last_pat_opt, last_expr, _)) = iter.next_back(); let iter_without_last = iter.clone(); if let Some((first_attrs, _, first_expr, first_guard)) = iter.next(); if let Some(b0) = find_bool_lit(&first_expr.kind, is_if_let); if let Some(b1) = find_bool_lit(&last_expr.kind, is_if_let); if b0 != b1; if first_guard.is_none() || iter.len() == 0; if first_attrs.is_empty(); if iter .all(|arm| { find_bool_lit(&arm.2.kind, is_if_let).map_or(false, |b| b == b0) && arm.3.is_none() && arm.0.is_empty() }); then { if let Some(last_pat) = last_pat_opt { if !is_wild(last_pat) { return false; } } // The suggestion may be incorrect, because some arms can have `cfg` attributes // evaluated into `false` and so such arms will be stripped before. let mut applicability = Applicability::MaybeIncorrect; let pat = { use itertools::Itertools as _; iter_without_last .filter_map(|arm| { let pat_span = arm.1?.span; Some(snippet_with_applicability(cx, pat_span, "..", &mut applicability)) }) .join(" | ") }; let pat_and_guard = if let Some(Guard::If(g)) = first_guard { format!("{} if {}", pat, snippet_with_applicability(cx, g.span, "..", &mut applicability)) } else { pat }; // strip potential borrows (#6503), but only if the type is a reference let mut ex_new = ex; if let ExprKind::AddrOf(BorrowKind::Ref, .., ex_inner) = ex.kind { if let ty::Ref(..) = cx.typeck_results().expr_ty(ex_inner).kind() { ex_new = ex_inner; } }; span_lint_and_sugg( cx, MATCH_LIKE_MATCHES_MACRO, expr.span, &format!("{} expression looks like `matches!` macro", if is_if_let { "if let .. else" } else { "match" }), "try this", format!( "{}matches!({}, {})", if b0 { "" } else { "!" }, snippet_with_applicability(cx, ex_new.span, "..", &mut applicability), pat_and_guard, ), applicability, ); true } else { false } } } /// Extract a `bool` or `{ bool }` fn find_bool_lit(ex: &ExprKind<'_>, is_if_let: bool) -> Option<bool> { match ex { ExprKind::Lit(Spanned { node: LitKind::Bool(b), .. }) => Some(*b), ExprKind::Block( rustc_hir::Block { stmts: &[], expr: Some(exp), .. }, _, ) if is_if_let => { if let ExprKind::Lit(Spanned { node: LitKind::Bool(b), .. }) = exp.kind { Some(b) } else { None } }, _ => None, } } #[allow(clippy::too_many_lines)] fn check_match_single_binding<'a>(cx: &LateContext<'a>, ex: &Expr<'a>, arms: &[Arm<'_>], expr: &Expr<'_>) { if in_macro(expr.span) || arms.len() != 1 || is_refutable(cx, arms[0].pat) { return; } // HACK: // This is a hack to deal with arms that are excluded by macros like `#[cfg]`. It is only used here // to prevent false positives as there is currently no better way to detect if code was excluded by // a macro. See PR #6435 if_chain! { if let Some(match_snippet) = snippet_opt(cx, expr.span); if let Some(arm_snippet) = snippet_opt(cx, arms[0].span); if let Some(ex_snippet) = snippet_opt(cx, ex.span); let rest_snippet = match_snippet.replace(&arm_snippet, "").replace(&ex_snippet, ""); if rest_snippet.contains("=>"); then { // The code it self contains another thick arrow "=>" // -> Either another arm or a comment return; } } let matched_vars = ex.span; let bind_names = arms[0].pat.span; let match_body = remove_blocks(arms[0].body); let mut snippet_body = if match_body.span.from_expansion() { Sugg::hir_with_macro_callsite(cx, match_body, "..").to_string() } else { snippet_block(cx, match_body.span, "..", Some(expr.span)).to_string() }; // Do we need to add ';' to suggestion ? match match_body.kind { ExprKind::Block(block, _) => { // macro + expr_ty(body) == () if block.span.from_expansion() && cx.typeck_results().expr_ty(match_body).is_unit() { snippet_body.push(';'); } }, _ => { // expr_ty(body) == () if cx.typeck_results().expr_ty(match_body).is_unit() { snippet_body.push(';'); } }, } let mut applicability = Applicability::MaybeIncorrect; match arms[0].pat.kind { PatKind::Binding(..) | PatKind::Tuple(_, _) | PatKind::Struct(..) => { // If this match is in a local (`let`) stmt let (target_span, sugg) = if let Some(parent_let_node) = opt_parent_let(cx, ex) { ( parent_let_node.span, format!( "let {} = {};\n{}let {} = {};", snippet_with_applicability(cx, bind_names, "..", &mut applicability), snippet_with_applicability(cx, matched_vars, "..", &mut applicability), " ".repeat(indent_of(cx, expr.span).unwrap_or(0)), snippet_with_applicability(cx, parent_let_node.pat.span, "..", &mut applicability), snippet_body ), ) } else { // If we are in closure, we need curly braces around suggestion let mut indent = " ".repeat(indent_of(cx, ex.span).unwrap_or(0)); let (mut cbrace_start, mut cbrace_end) = ("".to_string(), "".to_string()); if let Some(parent_expr) = get_parent_expr(cx, expr) { if let ExprKind::Closure(..) = parent_expr.kind { cbrace_end = format!("\n{}}}", indent); // Fix body indent due to the closure indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0)); cbrace_start = format!("{{\n{}", indent); } } // If the parent is already an arm, and the body is another match statement, // we need curly braces around suggestion let parent_node_id = cx.tcx.hir().get_parent_node(expr.hir_id); if let Node::Arm(arm) = &cx.tcx.hir().get(parent_node_id) { if let ExprKind::Match(..) = arm.body.kind { cbrace_end = format!("\n{}}}", indent); // Fix body indent due to the match indent = " ".repeat(indent_of(cx, bind_names).unwrap_or(0)); cbrace_start = format!("{{\n{}", indent); } } ( expr.span, format!( "{}let {} = {};\n{}{}{}", cbrace_start, snippet_with_applicability(cx, bind_names, "..", &mut applicability), snippet_with_applicability(cx, matched_vars, "..", &mut applicability), indent, snippet_body, cbrace_end ), ) }; span_lint_and_sugg( cx, MATCH_SINGLE_BINDING, target_span, "this match could be written as a `let` statement", "consider using `let` statement", sugg, applicability, ); }, PatKind::Wild => { if ex.can_have_side_effects() { let indent = " ".repeat(indent_of(cx, expr.span).unwrap_or(0)); let sugg = format!( "{};\n{}{}", snippet_with_applicability(cx, ex.span, "..", &mut applicability), indent, snippet_body ); span_lint_and_sugg( cx, MATCH_SINGLE_BINDING, expr.span, "this match could be replaced by its scrutinee and body", "consider using the scrutinee and body instead", sugg, applicability, ); } else { span_lint_and_sugg( cx, MATCH_SINGLE_BINDING, expr.span, "this match could be replaced by its body itself", "consider using the match body instead", snippet_body, Applicability::MachineApplicable, ); } }, _ => (), } } /// Returns true if the `ex` match expression is in a local (`let`) statement fn opt_parent_let<'a>(cx: &LateContext<'a>, ex: &Expr<'a>) -> Option<&'a Local<'a>> { let map = &cx.tcx.hir(); if_chain! { if let Some(Node::Expr(parent_arm_expr)) = map.find(map.get_parent_node(ex.hir_id)); if let Some(Node::Local(parent_let_expr)) = map.find(map.get_parent_node(parent_arm_expr.hir_id)); then { return Some(parent_let_expr); } } None } /// Gets all arms that are unbounded `PatRange`s. fn all_ranges<'tcx>(cx: &LateContext<'tcx>, arms: &'tcx [Arm<'_>], ty: Ty<'tcx>) -> Vec<SpannedRange<FullInt>> { arms.iter() .filter_map(|arm| { if let Arm { pat, guard: None, .. } = *arm { if let PatKind::Range(ref lhs, ref rhs, range_end) = pat.kind { let lhs = match lhs { Some(lhs) => constant(cx, cx.typeck_results(), lhs)?.0, None => miri_to_const(ty.numeric_min_val(cx.tcx)?)?, }; let rhs = match rhs { Some(rhs) => constant(cx, cx.typeck_results(), rhs)?.0, None => miri_to_const(ty.numeric_max_val(cx.tcx)?)?, }; let lhs_val = lhs.int_value(cx, ty)?; let rhs_val = rhs.int_value(cx, ty)?; let rhs_bound = match range_end { RangeEnd::Included => Bound::Included(rhs_val), RangeEnd::Excluded => Bound::Excluded(rhs_val), }; return Some(SpannedRange { span: pat.span, node: (lhs_val, rhs_bound), }); } if let PatKind::Lit(value) = pat.kind { let value = constant_full_int(cx, cx.typeck_results(), value)?; return Some(SpannedRange { span: pat.span, node: (value, Bound::Included(value)), }); } } None }) .collect() } #[derive(Debug, Eq, PartialEq)] pub struct SpannedRange<T> { pub span: Span, pub node: (T, Bound<T>), } // Checks if arm has the form `None => None` fn is_none_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> bool { matches!(arm.pat.kind, PatKind::Path(ref qpath) if is_lang_ctor(cx, qpath, OptionNone)) } // Checks if arm has the form `Some(ref v) => Some(v)` (checks for `ref` and `ref mut`) fn is_ref_some_arm(cx: &LateContext<'_>, arm: &Arm<'_>) -> Option<BindingAnnotation> { if_chain! { if let PatKind::TupleStruct(ref qpath, [first_pat, ..], _) = arm.pat.kind; if is_lang_ctor(cx, qpath, OptionSome); if let PatKind::Binding(rb, .., ident, _) = first_pat.kind; if rb == BindingAnnotation::Ref || rb == BindingAnnotation::RefMut; if let ExprKind::Call(e, args) = remove_blocks(arm.body).kind; if let ExprKind::Path(ref some_path) = e.kind; if is_lang_ctor(cx, some_path, OptionSome) && args.len() == 1; if let ExprKind::Path(QPath::Resolved(_, path2)) = args[0].kind; if path2.segments.len() == 1 && ident.name == path2.segments[0].ident.name; then { return Some(rb) } } None } fn has_multiple_ref_pats<'a, 'b, I>(pats: I) -> bool where 'b: 'a, I: Iterator<Item = &'a Pat<'b>>, { let mut ref_count = 0; for opt in pats.map(|pat| match pat.kind { PatKind::Ref(..) => Some(true), // &-patterns PatKind::Wild => Some(false), // an "anything" wildcard is also fine _ => None, // any other pattern is not fine }) { if let Some(inner) = opt { if inner { ref_count += 1; } } else { return false; } } ref_count > 1 } pub fn overlapping<T>(ranges: &[SpannedRange<T>]) -> Option<(&SpannedRange<T>, &SpannedRange<T>)> where T: Copy + Ord, { #[derive(Copy, Clone, Debug, Eq, PartialEq)] enum Kind<'a, T> { Start(T, &'a SpannedRange<T>), End(Bound<T>, &'a SpannedRange<T>), } impl<'a, T: Copy> Kind<'a, T> { fn range(&self) -> &'a SpannedRange<T> { match *self { Kind::Start(_, r) | Kind::End(_, r) => r, } } fn value(self) -> Bound<T> { match self { Kind::Start(t, _) => Bound::Included(t), Kind::End(t, _) => t, } } } impl<'a, T: Copy + Ord> PartialOrd for Kind<'a, T> { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl<'a, T: Copy + Ord> Ord for Kind<'a, T> { fn cmp(&self, other: &Self) -> Ordering { match (self.value(), other.value()) { (Bound::Included(a), Bound::Included(b)) | (Bound::Excluded(a), Bound::Excluded(b)) => a.cmp(&b), // Range patterns cannot be unbounded (yet) (Bound::Unbounded, _) | (_, Bound::Unbounded) => unimplemented!(), (Bound::Included(a), Bound::Excluded(b)) => match a.cmp(&b) { Ordering::Equal => Ordering::Greater, other => other, }, (Bound::Excluded(a), Bound::Included(b)) => match a.cmp(&b) { Ordering::Equal => Ordering::Less, other => other, }, } } } let mut values = Vec::with_capacity(2 * ranges.len()); for r in ranges { values.push(Kind::Start(r.node.0, r)); values.push(Kind::End(r.node.1, r)); } values.sort(); for (a, b) in iter::zip(&values, values.iter().skip(1)) { match (a, b) { (&Kind::Start(_, ra), &Kind::End(_, rb)) => { if ra.node != rb.node { return Some((ra, rb)); } }, (&Kind::End(a, _), &Kind::Start(b, _)) if a != Bound::Included(b) => (), _ => { // skip if the range `a` is completely included into the range `b` if let Ordering::Equal | Ordering::Less = a.cmp(b) { let kind_a = Kind::End(a.range().node.1, a.range()); let kind_b = Kind::End(b.range().node.1, b.range()); if let Ordering::Equal | Ordering::Greater = kind_a.cmp(&kind_b) { return None; } } return Some((a.range(), b.range())); }, } } None } mod redundant_pattern_match { use super::REDUNDANT_PATTERN_MATCHING; use clippy_utils::diagnostics::span_lint_and_then; use clippy_utils::higher; use clippy_utils::source::{snippet, snippet_with_applicability}; use clippy_utils::ty::{implements_trait, is_type_diagnostic_item, is_type_lang_item, match_type}; use clippy_utils::{is_lang_ctor, is_qpath_def_path, is_trait_method, paths}; use if_chain::if_chain; use rustc_ast::ast::LitKind; use rustc_data_structures::fx::FxHashSet; use rustc_errors::Applicability; use rustc_hir::LangItem::{OptionNone, OptionSome, PollPending, PollReady, ResultErr, ResultOk}; use rustc_hir::{ intravisit::{walk_expr, ErasedMap, NestedVisitorMap, Visitor}, Arm, Block, Expr, ExprKind, LangItem, MatchSource, Node, Pat, PatKind, QPath, }; use rustc_lint::LateContext; use rustc_middle::ty::{self, subst::GenericArgKind, Ty}; use rustc_span::sym; pub fn check<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) { if let Some(higher::IfLet { if_else, let_pat, let_expr, .. }) = higher::IfLet::hir(cx, expr) { find_sugg_for_if_let(cx, expr, let_pat, let_expr, "if", if_else.is_some()); } if let ExprKind::Match(op, arms, MatchSource::Normal) = &expr.kind { find_sugg_for_match(cx, expr, op, arms); } if let Some(higher::WhileLet { let_pat, let_expr, .. }) = higher::WhileLet::hir(expr) { find_sugg_for_if_let(cx, expr, let_pat, let_expr, "while", false); } } /// Checks if the drop order for a type matters. Some std types implement drop solely to /// deallocate memory. For these types, and composites containing them, changing the drop order /// won't result in any observable side effects. fn type_needs_ordered_drop(cx: &LateContext<'tcx>, ty: Ty<'tcx>) -> bool { type_needs_ordered_drop_inner(cx, ty, &mut FxHashSet::default()) } fn type_needs_ordered_drop_inner(cx: &LateContext<'tcx>, ty: Ty<'tcx>, seen: &mut FxHashSet<Ty<'tcx>>) -> bool { if !seen.insert(ty) { return false; } if !ty.needs_drop(cx.tcx, cx.param_env) { false } else if !cx .tcx .lang_items() .drop_trait() .map_or(false, |id| implements_trait(cx, ty, id, &[])) { // This type doesn't implement drop, so no side effects here. // Check if any component type has any. match ty.kind() { ty::Tuple(_) => ty.tuple_fields().any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)), ty::Array(ty, _) => type_needs_ordered_drop_inner(cx, ty, seen), ty::Adt(adt, subs) => adt .all_fields() .map(|f| f.ty(cx.tcx, subs)) .any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)), _ => true, } } // Check for std types which implement drop, but only for memory allocation. else if is_type_diagnostic_item(cx, ty, sym::Vec) || is_type_lang_item(cx, ty, LangItem::OwnedBox) || is_type_diagnostic_item(cx, ty, sym::Rc) || is_type_diagnostic_item(cx, ty, sym::Arc) || is_type_diagnostic_item(cx, ty, sym::cstring_type) || is_type_diagnostic_item(cx, ty, sym::BTreeMap) || is_type_diagnostic_item(cx, ty, sym::LinkedList) || match_type(cx, ty, &paths::WEAK_RC) || match_type(cx, ty, &paths::WEAK_ARC) { // Check all of the generic arguments. if let ty::Adt(_, subs) = ty.kind() { subs.types().any(|ty| type_needs_ordered_drop_inner(cx, ty, seen)) } else { true } } else { true } } // Extract the generic arguments out of a type fn try_get_generic_ty(ty: Ty<'_>, index: usize) -> Option<Ty<'_>> { if_chain! { if let ty::Adt(_, subs) = ty.kind(); if let Some(sub) = subs.get(index); if let GenericArgKind::Type(sub_ty) = sub.unpack(); then { Some(sub_ty) } else { None } } } // Checks if there are any temporaries created in the given expression for which drop order // matters. fn temporaries_need_ordered_drop(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool { struct V<'a, 'tcx> { cx: &'a LateContext<'tcx>, res: bool, } impl<'a, 'tcx> Visitor<'tcx> for V<'a, 'tcx> { type Map = ErasedMap<'tcx>; fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> { NestedVisitorMap::None } fn visit_expr(&mut self, expr: &'tcx Expr<'tcx>) { match expr.kind { // Taking the reference of a value leaves a temporary // e.g. In `&String::new()` the string is a temporary value. // Remaining fields are temporary values // e.g. In `(String::new(), 0).1` the string is a temporary value. ExprKind::AddrOf(_, _, expr) | ExprKind::Field(expr, _) => { if !matches!(expr.kind, ExprKind::Path(_)) { if type_needs_ordered_drop(self.cx, self.cx.typeck_results().expr_ty(expr)) { self.res = true; } else { self.visit_expr(expr); } } }, // the base type is alway taken by reference. // e.g. In `(vec![0])[0]` the vector is a temporary value. ExprKind::Index(base, index) => { if !matches!(base.kind, ExprKind::Path(_)) { if type_needs_ordered_drop(self.cx, self.cx.typeck_results().expr_ty(base)) { self.res = true; } else { self.visit_expr(base); } } self.visit_expr(index); }, // Method calls can take self by reference. // e.g. In `String::new().len()` the string is a temporary value. ExprKind::MethodCall(_, _, [self_arg, args @ ..], _) => { if !matches!(self_arg.kind, ExprKind::Path(_)) { let self_by_ref = self .cx .typeck_results() .type_dependent_def_id(expr.hir_id) .map_or(false, |id| self.cx.tcx.fn_sig(id).skip_binder().inputs()[0].is_ref()); if self_by_ref && type_needs_ordered_drop(self.cx, self.cx.typeck_results().expr_ty(self_arg)) { self.res = true; } else { self.visit_expr(self_arg); } } args.iter().for_each(|arg| self.visit_expr(arg)); }, // Either explicitly drops values, or changes control flow. ExprKind::DropTemps(_) | ExprKind::Ret(_) | ExprKind::Break(..) | ExprKind::Yield(..) | ExprKind::Block(Block { expr: None, .. }, _) | ExprKind::Loop(..) => (), // Only consider the final expression. ExprKind::Block(Block { expr: Some(expr), .. }, _) => self.visit_expr(expr), _ => walk_expr(self, expr), } } } let mut v = V { cx, res: false }; v.visit_expr(expr); v.res } fn find_sugg_for_if_let<'tcx>( cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, let_pat: &Pat<'_>, let_expr: &'tcx Expr<'_>, keyword: &'static str, has_else: bool, ) { // also look inside refs let mut kind = &let_pat.kind; // if we have &None for example, peel it so we can detect "if let None = x" if let PatKind::Ref(inner, _mutability) = kind { kind = &inner.kind; } let op_ty = cx.typeck_results().expr_ty(let_expr); // Determine which function should be used, and the type contained by the corresponding // variant. let (good_method, inner_ty) = match kind { PatKind::TupleStruct(ref path, [sub_pat], _) => { if let PatKind::Wild = sub_pat.kind { if is_lang_ctor(cx, path, ResultOk) { ("is_ok()", try_get_generic_ty(op_ty, 0).unwrap_or(op_ty)) } else if is_lang_ctor(cx, path, ResultErr) { ("is_err()", try_get_generic_ty(op_ty, 1).unwrap_or(op_ty)) } else if is_lang_ctor(cx, path, OptionSome) { ("is_some()", op_ty) } else if is_lang_ctor(cx, path, PollReady) { ("is_ready()", op_ty) } else if is_qpath_def_path(cx, path, sub_pat.hir_id, &paths::IPADDR_V4) { ("is_ipv4()", op_ty) } else if is_qpath_def_path(cx, path, sub_pat.hir_id, &paths::IPADDR_V6) { ("is_ipv6()", op_ty) } else { return; } } else { return; } }, PatKind::Path(ref path) => { let method = if is_lang_ctor(cx, path, OptionNone) { "is_none()" } else if is_lang_ctor(cx, path, PollPending) { "is_pending()" } else { return; }; // `None` and `Pending` don't have an inner type. (method, cx.tcx.types.unit) },<|fim▁hole|> // type needs to be considered, not just the inner type of the branch being matched on. // Note the last expression in a block is dropped after all local bindings. let check_ty = if has_else || (keyword == "if" && matches!(cx.tcx.hir().parent_iter(expr.hir_id).next(), Some((_, Node::Block(..))))) { op_ty } else { inner_ty }; // All temporaries created in the scrutinee expression are dropped at the same time as the // scrutinee would be, so they have to be considered as well. // e.g. in `if let Some(x) = foo.lock().unwrap().baz.as_ref() { .. }` the lock will be held // for the duration if body. let needs_drop = type_needs_ordered_drop(cx, check_ty) || temporaries_need_ordered_drop(cx, let_expr); // check that `while_let_on_iterator` lint does not trigger if_chain! { if keyword == "while"; if let ExprKind::MethodCall(method_path, _, _, _) = let_expr.kind; if method_path.ident.name == sym::next; if is_trait_method(cx, let_expr, sym::Iterator); then { return; } } let result_expr = match &let_expr.kind { ExprKind::AddrOf(_, _, borrowed) => borrowed, _ => let_expr, }; span_lint_and_then( cx, REDUNDANT_PATTERN_MATCHING, let_pat.span, &format!("redundant pattern matching, consider using `{}`", good_method), |diag| { // if/while let ... = ... { ... } // ^^^^^^^^^^^^^^^^^^^^^^^^^^^ let expr_span = expr.span; // if/while let ... = ... { ... } // ^^^ let op_span = result_expr.span.source_callsite(); // if/while let ... = ... { ... } // ^^^^^^^^^^^^^^^^^^^ let span = expr_span.until(op_span.shrink_to_hi()); let mut app = if needs_drop { Applicability::MaybeIncorrect } else { Applicability::MachineApplicable }; let sugg = snippet_with_applicability(cx, op_span, "_", &mut app); diag.span_suggestion(span, "try this", format!("{} {}.{}", keyword, sugg, good_method), app); if needs_drop { diag.note("this will change drop order of the result, as well as all temporaries"); diag.note("add `#[allow(clippy::redundant_pattern_matching)]` if this is important"); } }, ); } fn find_sugg_for_match<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, op: &Expr<'_>, arms: &[Arm<'_>]) { if arms.len() == 2 { let node_pair = (&arms[0].pat.kind, &arms[1].pat.kind); let found_good_method = match node_pair { ( PatKind::TupleStruct(ref path_left, patterns_left, _), PatKind::TupleStruct(ref path_right, patterns_right, _), ) if patterns_left.len() == 1 && patterns_right.len() == 1 => { if let (PatKind::Wild, PatKind::Wild) = (&patterns_left[0].kind, &patterns_right[0].kind) { find_good_method_for_match( cx, arms, path_left, path_right, &paths::RESULT_OK, &paths::RESULT_ERR, "is_ok()", "is_err()", ) .or_else(|| { find_good_method_for_match( cx, arms, path_left, path_right, &paths::IPADDR_V4, &paths::IPADDR_V6, "is_ipv4()", "is_ipv6()", ) }) } else { None } }, (PatKind::TupleStruct(ref path_left, patterns, _), PatKind::Path(ref path_right)) | (PatKind::Path(ref path_left), PatKind::TupleStruct(ref path_right, patterns, _)) if patterns.len() == 1 => { if let PatKind::Wild = patterns[0].kind { find_good_method_for_match( cx, arms, path_left, path_right, &paths::OPTION_SOME, &paths::OPTION_NONE, "is_some()", "is_none()", ) .or_else(|| { find_good_method_for_match( cx, arms, path_left, path_right, &paths::POLL_READY, &paths::POLL_PENDING, "is_ready()", "is_pending()", ) }) } else { None } }, _ => None, }; if let Some(good_method) = found_good_method { let span = expr.span.to(op.span); let result_expr = match &op.kind { ExprKind::AddrOf(_, _, borrowed) => borrowed, _ => op, }; span_lint_and_then( cx, REDUNDANT_PATTERN_MATCHING, expr.span, &format!("redundant pattern matching, consider using `{}`", good_method), |diag| { diag.span_suggestion( span, "try this", format!("{}.{}", snippet(cx, result_expr.span, "_"), good_method), Applicability::MaybeIncorrect, // snippet ); }, ); } } } #[allow(clippy::too_many_arguments)] fn find_good_method_for_match<'a>( cx: &LateContext<'_>, arms: &[Arm<'_>], path_left: &QPath<'_>, path_right: &QPath<'_>, expected_left: &[&str], expected_right: &[&str], should_be_left: &'a str, should_be_right: &'a str, ) -> Option<&'a str> { let body_node_pair = if is_qpath_def_path(cx, path_left, arms[0].pat.hir_id, expected_left) && is_qpath_def_path(cx, path_right, arms[1].pat.hir_id, expected_right) { (&(*arms[0].body).kind, &(*arms[1].body).kind) } else if is_qpath_def_path(cx, path_right, arms[1].pat.hir_id, expected_left) && is_qpath_def_path(cx, path_left, arms[0].pat.hir_id, expected_right) { (&(*arms[1].body).kind, &(*arms[0].body).kind) } else { return None; }; match body_node_pair { (ExprKind::Lit(ref lit_left), ExprKind::Lit(ref lit_right)) => match (&lit_left.node, &lit_right.node) { (LitKind::Bool(true), LitKind::Bool(false)) => Some(should_be_left), (LitKind::Bool(false), LitKind::Bool(true)) => Some(should_be_right), _ => None, }, _ => None, } } } #[test] fn test_overlapping() { use rustc_span::source_map::DUMMY_SP; let sp = |s, e| SpannedRange { span: DUMMY_SP, node: (s, e), }; assert_eq!(None, overlapping::<u8>(&[])); assert_eq!(None, overlapping(&[sp(1, Bound::Included(4))])); assert_eq!( None, overlapping(&[sp(1, Bound::Included(4)), sp(5, Bound::Included(6))]) ); assert_eq!( None, overlapping(&[ sp(1, Bound::Included(4)), sp(5, Bound::Included(6)), sp(10, Bound::Included(11)) ],) ); assert_eq!( Some((&sp(1, Bound::Included(4)), &sp(3, Bound::Included(6)))), overlapping(&[sp(1, Bound::Included(4)), sp(3, Bound::Included(6))]) ); assert_eq!( Some((&sp(5, Bound::Included(6)), &sp(6, Bound::Included(11)))), overlapping(&[ sp(1, Bound::Included(4)), sp(5, Bound::Included(6)), sp(6, Bound::Included(11)) ],) ); } /// Implementation of `MATCH_SAME_ARMS`. fn lint_match_arms<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) { if let ExprKind::Match(_, arms, MatchSource::Normal) = expr.kind { let hash = |&(_, arm): &(usize, &Arm<'_>)| -> u64 { let mut h = SpanlessHash::new(cx); h.hash_expr(arm.body); h.finish() }; let eq = |&(lindex, lhs): &(usize, &Arm<'_>), &(rindex, rhs): &(usize, &Arm<'_>)| -> bool { let min_index = usize::min(lindex, rindex); let max_index = usize::max(lindex, rindex); let mut local_map: HirIdMap<HirId> = HirIdMap::default(); let eq_fallback = |a: &Expr<'_>, b: &Expr<'_>| { if_chain! { if let Some(a_id) = path_to_local(a); if let Some(b_id) = path_to_local(b); let entry = match local_map.entry(a_id) { Entry::Vacant(entry) => entry, // check if using the same bindings as before Entry::Occupied(entry) => return *entry.get() == b_id, }; // the names technically don't have to match; this makes the lint more conservative if cx.tcx.hir().name(a_id) == cx.tcx.hir().name(b_id); if TyS::same_type(cx.typeck_results().expr_ty(a), cx.typeck_results().expr_ty(b)); if pat_contains_local(lhs.pat, a_id); if pat_contains_local(rhs.pat, b_id); then { entry.insert(b_id); true } else { false } } }; // Arms with a guard are ignored, those can’t always be merged together // This is also the case for arms in-between each there is an arm with a guard (min_index..=max_index).all(|index| arms[index].guard.is_none()) && SpanlessEq::new(cx) .expr_fallback(eq_fallback) .eq_expr(lhs.body, rhs.body) // these checks could be removed to allow unused bindings && bindings_eq(lhs.pat, local_map.keys().copied().collect()) && bindings_eq(rhs.pat, local_map.values().copied().collect()) }; let indexed_arms: Vec<(usize, &Arm<'_>)> = arms.iter().enumerate().collect(); for (&(_, i), &(_, j)) in search_same(&indexed_arms, hash, eq) { span_lint_and_then( cx, MATCH_SAME_ARMS, j.body.span, "this `match` has identical arm bodies", |diag| { diag.span_note(i.body.span, "same as this"); // Note: this does not use `span_suggestion` on purpose: // there is no clean way // to remove the other arm. Building a span and suggest to replace it to "" // makes an even more confusing error message. Also in order not to make up a // span for the whole pattern, the suggestion is only shown when there is only // one pattern. The user should know about `|` if they are already using it… let lhs = snippet(cx, i.pat.span, "<pat1>"); let rhs = snippet(cx, j.pat.span, "<pat2>"); if let PatKind::Wild = j.pat.kind { // if the last arm is _, then i could be integrated into _ // note that i.pat cannot be _, because that would mean that we're // hiding all the subsequent arms, and rust won't compile diag.span_note( i.body.span, &format!( "`{}` has the same arm body as the `_` wildcard, consider removing it", lhs ), ); } else { diag.span_help(i.pat.span, &format!("consider refactoring into `{} | {}`", lhs, rhs,)) .help("...or consider changing the match arm bodies"); } }, ); } } } fn pat_contains_local(pat: &Pat<'_>, id: HirId) -> bool { let mut result = false; pat.walk_short(|p| { result |= matches!(p.kind, PatKind::Binding(_, binding_id, ..) if binding_id == id); !result }); result } /// Returns true if all the bindings in the `Pat` are in `ids` and vice versa fn bindings_eq(pat: &Pat<'_>, mut ids: HirIdSet) -> bool { let mut result = true; pat.each_binding_or_first(&mut |_, id, _, _| result &= ids.remove(&id)); result && ids.is_empty() }<|fim▁end|>
_ => return, }; // If this is the last expression in a block or there is an else clause then the whole
<|file_name|>rfc-1014.rs<|end_file_name|><|fim▁begin|>// Copyright 2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // run-pass #![allow(dead_code)] // ignore-cloudabi stdout does not map to file descriptor 1 by default // ignore-wasm32-bare no libc #![feature(rustc_private)] extern crate libc; type DWORD = u32; type HANDLE = *mut u8; #[cfg(windows)]<|fim▁hole|> fn GetStdHandle(which: DWORD) -> HANDLE; fn CloseHandle(handle: HANDLE) -> i32; } #[cfg(windows)] fn close_stdout() { const STD_OUTPUT_HANDLE: DWORD = -11i32 as DWORD; unsafe { CloseHandle(GetStdHandle(STD_OUTPUT_HANDLE)); } } #[cfg(not(windows))] fn close_stdout() { unsafe { libc::close(1); } } fn main() { close_stdout(); println!("hello world"); }<|fim▁end|>
extern "system" {
<|file_name|>endpoints.py<|end_file_name|><|fim▁begin|># coding=utf-8 from re import match from mapi.exceptions import ( MapiNetworkException, MapiNotFoundException, MapiProviderException, ) from mapi.utils import clean_dict, request_json __all__ = [ "omdb_search", "omdb_title", "tmdb_find", "tmdb_movies", "tmdb_search_movies", "tvdb_episodes_id", "tvdb_login", "tvdb_refresh_token", "tvdb_search_series", "tvdb_series_id", "tvdb_series_id_episodes", "tvdb_series_id_episodes_query", ] OMDB_MEDIA_TYPES = {"episode", "movie", "series"} OMDB_PLOT_TYPES = {"short", "long"} TVDB_LANGUAGE_CODES = [ "cs", "da", "de", "el", "en", "es", "fi", "fr", "he", "hr", "hu", "it", "ja", "ko", "nl", "no", "pl", "pt", "ru", "sl", "sv", "tr", "zh", ] def omdb_title( api_key, id_imdb=None, media_type=None, title=None, season=None, episode=None, year=None, plot=None, cache=True, ): """ Lookup media using the Open Movie Database. Online docs: http://www.omdbapi.com/#parameters """ if (not title and not id_imdb) or (title and id_imdb): raise MapiProviderException("either id_imdb or title must be specified") elif media_type and media_type not in OMDB_MEDIA_TYPES: raise MapiProviderException( "media_type must be one of %s" % ",".join(OMDB_MEDIA_TYPES) ) elif plot and plot not in OMDB_PLOT_TYPES: raise MapiProviderException( "plot must be one of %s" % ",".join(OMDB_PLOT_TYPES) ) url = "http://www.omdbapi.com" parameters = { "apikey": api_key, "i": id_imdb, "t": title, "y": year, "season": season, "episode": episode, "type": media_type, "plot": plot, } parameters = clean_dict(parameters) status, content = request_json(url, parameters, cache=cache) error = content.get("Error") if isinstance(content, dict) else None if status == 401: raise MapiProviderException("invalid API key") elif status != 200 or not isinstance(content, dict): raise MapiNetworkException("OMDb down or unavailable?") elif error: raise MapiNotFoundException(error) return content def omdb_search(api_key, query, year=None, media_type=None, page=1, cache=True): """ Search for media using the Open Movie Database. Online docs: http://www.omdbapi.com/#parameters. """ if media_type and media_type not in OMDB_MEDIA_TYPES: raise MapiProviderException( "media_type must be one of %s" % ",".join(OMDB_MEDIA_TYPES) ) if 1 > page > 100: raise MapiProviderException("page must be between 1 and 100") url = "http://www.omdbapi.com" parameters = { "apikey": api_key, "s": query, "y": year, "type": media_type, "page": page, } parameters = clean_dict(parameters) status, content = request_json(url, parameters, cache=cache) if status == 401: raise MapiProviderException("invalid API key") elif content and not content.get("totalResults"): raise MapiNotFoundException() elif not content or status != 200: # pragma: no cover raise MapiNetworkException("OMDb down or unavailable?") return content def tmdb_find( api_key, external_source, external_id, language="en-US", cache=True ): """ Search for The Movie Database objects using another DB's foreign key. Note: language codes aren't checked on this end or by TMDb, so if you enter an invalid language code your search itself will succeed, but certain fields like synopsis will just be empty. Online docs: developers.themoviedb.org/3/find. """ sources = ["imdb_id", "freebase_mid", "freebase_id", "tvdb_id", "tvrage_id"] if external_source not in sources: raise MapiProviderException("external_source must be in %s" % sources) if external_source == "imdb_id" and not match(r"tt\d+", external_id): raise MapiProviderException("invalid imdb tt-const value") url = "https://api.themoviedb.org/3/find/" + external_id or "" parameters = { "api_key": api_key, "external_source": external_source, "language": language, } keys = [ "movie_results", "person_results", "tv_episode_results", "tv_results", "tv_season_results", ] status, content = request_json(url, parameters, cache=cache) if status == 401: raise MapiProviderException("invalid API key") elif status != 200 or not any(content.keys()): # pragma: no cover raise MapiNetworkException("TMDb down or unavailable?") elif status == 404 or not any(content.get(k, {}) for k in keys): raise MapiNotFoundException return content def tmdb_movies(api_key, id_tmdb, language="en-US", cache=True): """ Lookup a movie item using The Movie Database. Online docs: developers.themoviedb.org/3/movies. """ try: url = "https://api.themoviedb.org/3/movie/%d" % int(id_tmdb) except ValueError: raise MapiProviderException("id_tmdb must be numeric") parameters = {"api_key": api_key, "language": language} status, content = request_json(url, parameters, cache=cache) if status == 401: raise MapiProviderException("invalid API key") elif status == 404: raise MapiNotFoundException elif status != 200 or not any(content.keys()): # pragma: no cover raise MapiNetworkException("TMDb down or unavailable?") return content def tmdb_search_movies( api_key, title, year=None, adult=False, region=None, page=1, cache=True ): """ Search for movies using The Movie Database. Online docs: developers.themoviedb.org/3/search/search-movies. """ url = "https://api.themoviedb.org/3/search/movie" try: if year: year = int(year) except ValueError: raise MapiProviderException("year must be numeric") parameters = { "api_key": api_key, "query": title, "page": page, "include_adult": adult, "region": region, "year": year, } status, content = request_json(url, parameters, cache=cache) if status == 401: raise MapiProviderException("invalid API key") elif status != 200 or not any(content.keys()): # pragma: no cover raise MapiNetworkException("TMDb down or unavailable?") elif status == 404 or status == 422 or not content.get("total_results"): raise MapiNotFoundException return content def tvdb_login(api_key): """ Logs into TVDb using the provided api key. Note: You can register for a free TVDb key at thetvdb.com/?tab=apiregister Online docs: api.thetvdb.com/swagger#!/Authentication/post_login. """ url = "https://api.thetvdb.com/login" body = {"apikey": api_key} status, content = request_json(url, body=body, cache=False) if status == 401: raise MapiProviderException("invalid api key") elif status != 200 or not content.get("token"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content["token"] def tvdb_refresh_token(token): """ Refreshes JWT token. Online docs: api.thetvdb.com/swagger#!/Authentication/get_refresh_token. """ url = "https://api.thetvdb.com/refresh_token" headers = {"Authorization": "Bearer %s" % token} status, content = request_json(url, headers=headers, cache=False) if status == 401: raise MapiProviderException("invalid token") elif status != 200 or not content.get("token"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content["token"] def tvdb_episodes_id(token, id_tvdb, lang="en", cache=True): """ Returns the full information for a given episode id. Online docs: https://api.thetvdb.com/swagger#!/Episodes. """ if lang not in TVDB_LANGUAGE_CODES: raise MapiProviderException( "'lang' must be one of %s" % ",".join(TVDB_LANGUAGE_CODES) ) try: url = "https://api.thetvdb.com/episodes/%d" % int(id_tvdb) except ValueError: raise MapiProviderException("id_tvdb must be numeric") headers = {"Accept-Language": lang, "Authorization": "Bearer %s" % token} status, content = request_json(url, headers=headers, cache=cache) if status == 401: raise MapiProviderException("invalid token") elif status == 404: raise MapiNotFoundException elif status == 200 and "invalidLanguage" in content.get("errors", {}): raise MapiNotFoundException elif status != 200 or not content.get("data"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content def tvdb_series_id(token, id_tvdb, lang="en", cache=True): """ Returns a series records that contains all information known about a particular series id. Online docs: api.thetvdb.com/swagger#!/Series/get_series_id. """ if lang not in TVDB_LANGUAGE_CODES: raise MapiProviderException( "'lang' must be one of %s" % ",".join(TVDB_LANGUAGE_CODES) ) try: url = "https://api.thetvdb.com/series/%d" % int(id_tvdb) except ValueError: raise MapiProviderException("id_tvdb must be numeric") headers = {"Accept-Language": lang, "Authorization": "Bearer %s" % token} status, content = request_json(url, headers=headers, cache=cache) if status == 401: raise MapiProviderException("invalid token") elif status == 404: raise MapiNotFoundException elif status != 200 or not content.get("data"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content def tvdb_series_id_episodes(token, id_tvdb, page=1, lang="en", cache=True): """ All episodes for a given series. Note: Paginated with 100 results per page. Online docs: api.thetvdb.com/swagger#!/Series/get_series_id_episodes. """ if lang not in TVDB_LANGUAGE_CODES: raise MapiProviderException( "'lang' must be one of %s" % ",".join(TVDB_LANGUAGE_CODES) ) try: url = "https://api.thetvdb.com/series/%d/episodes" % int(id_tvdb)<|fim▁hole|> headers = {"Accept-Language": lang, "Authorization": "Bearer %s" % token} parameters = {"page": page} status, content = request_json( url, parameters, headers=headers, cache=cache ) if status == 401: raise MapiProviderException("invalid token") elif status == 404: raise MapiNotFoundException elif status != 200 or not content.get("data"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content def tvdb_series_id_episodes_query( token, id_tvdb, episode=None, season=None, page=1, lang="en", cache=True ): """ Allows the user to query against episodes for the given series. Note: Paginated with 100 results per page; omitted imdbId-- when would you ever need to query against both tvdb and imdb series ids? Online docs: api.thetvdb.com/swagger#!/Series/get_series_id_episodes_query. """ if lang not in TVDB_LANGUAGE_CODES: raise MapiProviderException( "'lang' must be one of %s" % ",".join(TVDB_LANGUAGE_CODES) ) try: url = "https://api.thetvdb.com/series/%d/episodes/query" % int(id_tvdb) except ValueError: raise MapiProviderException("id_tvdb must be numeric") headers = {"Accept-Language": lang, "Authorization": "Bearer %s" % token} parameters = {"airedSeason": season, "airedEpisode": episode, "page": page} status, content = request_json( url, parameters, headers=headers, cache=cache ) if status == 401: raise MapiProviderException("invalid token") elif status == 404: raise MapiNotFoundException elif status != 200 or not content.get("data"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content def tvdb_search_series( token, series=None, id_imdb=None, id_zap2it=None, lang="en", cache=True ): """ Allows the user to search for a series based on the following parameters. Online docs: https://api.thetvdb.com/swagger#!/Search/get_search_series Note: results a maximum of 100 entries per page, no option for pagination. """ if lang not in TVDB_LANGUAGE_CODES: raise MapiProviderException( "'lang' must be one of %s" % ",".join(TVDB_LANGUAGE_CODES) ) url = "https://api.thetvdb.com/search/series" parameters = {"name": series, "imdbId": id_imdb, "zap2itId": id_zap2it} headers = {"Accept-Language": lang, "Authorization": "Bearer %s" % token} status, content = request_json( url, parameters, headers=headers, cache=cache ) if status == 401: raise MapiProviderException("invalid token") elif status == 405: raise MapiProviderException( "series, id_imdb, id_zap2it parameters are mutually exclusive" ) elif status == 404: # pragma: no cover raise MapiNotFoundException elif status != 200 or not content.get("data"): # pragma: no cover raise MapiNetworkException("TVDb down or unavailable?") return content<|fim▁end|>
except ValueError: raise MapiProviderException("id_tvdb must be numeric")
<|file_name|>stability_cfg2.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // compile-flags:--cfg foo #![cfg_attr(foo, unstable)] #![cfg_attr(not(foo), stable)] #![staged_api]<|fim▁end|>
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
<|file_name|>WindowTools.java<|end_file_name|><|fim▁begin|>/* * #%L * Bio-Formats Plugins for ImageJ: a collection of ImageJ plugins including the * Bio-Formats Importer, Bio-Formats Exporter, Bio-Formats Macro Extensions, * Data Browser and Stack Slicer. * %% * Copyright (C) 2006 - 2015 Open Microscopy Environment: * - Board of Regents of the University of Wisconsin-Madison * - Glencoe Software, Inc. * - University of Dundee * %% * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as * published by the Free Software Foundation, either version 2 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public * License along with this program. If not, see * <http://www.gnu.org/licenses/gpl-2.0.html>. * #L% */ package loci.plugins.util; import ij.IJ; import ij.ImageJ; import ij.gui.GenericDialog; import java.awt.BorderLayout; import java.awt.Checkbox; import java.awt.Choice; import java.awt.Component; import java.awt.Container; import java.awt.Dimension; import java.awt.Frame; import java.awt.GridBagConstraints; import java.awt.GridBagLayout; import java.awt.Panel; import java.awt.Point; import java.awt.Rectangle; import java.awt.ScrollPane; import java.awt.TextField; import java.awt.Toolkit; import java.awt.Window; import java.util.List; import java.util.StringTokenizer; import loci.common.DebugTools; import loci.plugins.BF; /** * Utility methods for managing ImageJ dialogs and windows. */ public final class WindowTools { // -- Constructor -- private WindowTools() { } // -- Utility methods -- /** Adds AWT scroll bars to the given container. */ public static void addScrollBars(Container pane) { GridBagLayout layout = (GridBagLayout) pane.getLayout(); // extract components int count = pane.getComponentCount(); Component[] c = new Component[count]; GridBagConstraints[] gbc = new GridBagConstraints[count]; for (int i=0; i<count; i++) { c[i] = pane.getComponent(i); gbc[i] = layout.getConstraints(c[i]); } // clear components pane.removeAll(); layout.invalidateLayout(pane); // create new container panel Panel newPane = new Panel(); GridBagLayout newLayout = new GridBagLayout(); newPane.setLayout(newLayout); for (int i=0; i<count; i++) { newLayout.setConstraints(c[i], gbc[i]); newPane.add(c[i]); } // HACK - get preferred size for container panel // NB: don't know a better way: // - newPane.getPreferredSize() doesn't work // - newLayout.preferredLayoutSize(newPane) doesn't work Frame f = new Frame(); f.setLayout(new BorderLayout()); f.add(newPane, BorderLayout.CENTER); f.pack(); final Dimension size = newPane.getSize(); f.remove(newPane); f.dispose(); // compute best size for scrollable viewport size.width += 25; size.height += 15; Dimension screen = Toolkit.getDefaultToolkit().getScreenSize(); int maxWidth = 7 * screen.width / 8; int maxHeight = 3 * screen.height / 4; if (size.width > maxWidth) size.width = maxWidth; if (size.height > maxHeight) size.height = maxHeight; // create scroll pane ScrollPane scroll = new ScrollPane() { @Override public Dimension getPreferredSize() { return size; } }; scroll.add(newPane); // add scroll pane to original container GridBagConstraints constraints = new GridBagConstraints(); constraints.gridwidth = GridBagConstraints.REMAINDER; constraints.fill = GridBagConstraints.BOTH; constraints.weightx = 1.0; constraints.weighty = 1.0; layout.setConstraints(scroll, constraints); pane.add(scroll); } /** * Places the given window at a nice location on screen, either centered * below the ImageJ window if there is one, or else centered on screen. */ public static void placeWindow(Window w) { Dimension size = w.getSize(); Dimension screen = Toolkit.getDefaultToolkit().getScreenSize();<|fim▁hole|> if (ij == null) { // center config window on screen p.x = (screen.width - size.width) / 2; p.y = (screen.height - size.height) / 2; } else { // place config window below ImageJ window Rectangle ijBounds = ij.getBounds(); p.x = ijBounds.x + (ijBounds.width - size.width) / 2; p.y = ijBounds.y + ijBounds.height + 5; } // nudge config window away from screen edges final int pad = 10; if (p.x < pad) p.x = pad; else if (p.x + size.width + pad > screen.width) { p.x = screen.width - size.width - pad; } if (p.y < pad) p.y = pad; else if (p.y + size.height + pad > screen.height) { p.y = screen.height - size.height - pad; } w.setLocation(p); } /** Reports the given exception with stack trace in an ImageJ error dialog. */ public static void reportException(Throwable t) { reportException(t, false, null); } /** Reports the given exception with stack trace in an ImageJ error dialog. */ public static void reportException(Throwable t, boolean quiet) { reportException(t, quiet, null); } /** Reports the given exception with stack trace in an ImageJ error dialog. */ public static void reportException(Throwable t, boolean quiet, String msg) { if (quiet) return; BF.status(quiet, ""); if (t != null) { String s = DebugTools.getStackTrace(t); StringTokenizer st = new StringTokenizer(s, "\n\r"); while (st.hasMoreTokens()) IJ.log(st.nextToken()); } if (msg != null) IJ.error("Bio-Formats Importer", msg); } @SuppressWarnings("unchecked") public static List<TextField> getNumericFields(GenericDialog gd) { return gd.getNumericFields(); } @SuppressWarnings("unchecked") public static List<Checkbox> getCheckboxes(GenericDialog gd) { return gd.getCheckboxes(); } @SuppressWarnings("unchecked") public static List<Choice> getChoices(GenericDialog gd) { return gd.getChoices(); } }<|fim▁end|>
ImageJ ij = IJ.getInstance(); Point p = new Point();