prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>sales_order.py<|end_file_name|><|fim▁begin|># Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe import json import frappe.utils from frappe.utils import cstr, flt, getdate, comma_and, cint from frappe import _ from frappe.model.utils import get_fetch_values from frappe.model.mapper import get_mapped_doc from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty from frappe.desk.notifications import clear_doctype_notifications from frappe.contacts.doctype.address.address import get_company_address from erpnext.controllers.selling_controller import SellingController from erpnext.subscription.doctype.subscription.subscription import month_map, get_next_date form_grid_templates = { "items": "templates/form_grid/item_grid.html" } class WarehouseRequired(frappe.ValidationError): pass class SalesOrder(SellingController): def __init__(self, arg1, arg2=None): super(SalesOrder, self).__init__(arg1, arg2) def validate(self): super(SalesOrder, self).validate() self.validate_order_type() self.validate_delivery_date() self.validate_proj_cust() self.validate_po() self.validate_uom_is_integer("stock_uom", "stock_qty") self.validate_uom_is_integer("uom", "qty") self.validate_for_items() self.validate_warehouse() self.validate_drop_ship() from erpnext.stock.doctype.packed_item.packed_item import make_packing_list make_packing_list(self) self.validate_with_previous_doc() self.set_status() if not self.billing_status: self.billing_status = 'Not Billed' if not self.delivery_status: self.delivery_status = 'Not Delivered' def validate_po(self): # validate p.o date v/s delivery date if self.po_date: for d in self.get("items"): if d.delivery_date and getdate(self.po_date) > getdate(d.delivery_date): frappe.throw(_("Row #{0}: Expected Delivery Date cannot be before Purchase Order Date") .format(d.idx)) if self.po_no and self.customer: so = frappe.db.sql("select name from `tabSales Order` \ where ifnull(po_no, '') = %s and name != %s and docstatus < 2\ and customer = %s", (self.po_no, self.name, self.customer)) if so and so[0][0] and not cint(frappe.db.get_single_value("Selling Settings", "allow_against_multiple_purchase_orders")): frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no)) def validate_for_items(self): check_list = [] for d in self.get('items'): check_list.append(cstr(d.item_code)) # used for production plan d.transaction_date = self.transaction_date tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \ where item_code = %s and warehouse = %s", (d.item_code, d.warehouse)) d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0 # check for same entry multiple times unique_chk_list = set(check_list) if len(unique_chk_list) != len(check_list) and \ not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")): frappe.msgprint(_("Same item has been entered multiple times"), title=_("Warning"), indicator='orange') def product_bundle_has_stock_item(self, product_bundle): """Returns true if product bundle has stock item""" ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle)) return ret def validate_sales_mntc_quotation(self): for d in self.get('items'): if d.prevdoc_docname: res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s", (d.prevdoc_docname, self.order_type)) if not res: frappe.msgprint(_("Quotation {0} not of type {1}") .format(d.prevdoc_docname, self.order_type)) def validate_order_type(self): super(SalesOrder, self).validate_order_type() def validate_delivery_date(self): if self.order_type == 'Sales': if not self.delivery_date: self.delivery_date = max([d.delivery_date for d in self.get("items")]) if self.delivery_date: for d in self.get("items"): if not d.delivery_date: d.delivery_date = self.delivery_date if getdate(self.transaction_date) > getdate(d.delivery_date): frappe.msgprint(_("Expected Delivery Date should be after Sales Order Date"), indicator='orange', title=_('Warning')) else: frappe.throw(_("Please enter Delivery Date")) self.validate_sales_mntc_quotation() def validate_proj_cust(self): if self.project and self.customer_name: res = frappe.db.sql("""select name from `tabProject` where name = %s and (customer = %s or ifnull(customer,'')='')""", (self.project, self.customer)) if not res: frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project)) def validate_warehouse(self): super(SalesOrder, self).validate_warehouse() for d in self.get("items"): if (frappe.db.get_value("Item", d.item_code, "is_stock_item") == 1 or (self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \ and not d.warehouse and not cint(d.delivered_by_supplier): frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code), WarehouseRequired) def validate_with_previous_doc(self): super(SalesOrder, self).validate_with_previous_doc({ "Quotation": { "ref_dn_field": "prevdoc_docname", "compare_fields": [["company", "="], ["currency", "="]] } }) def update_enquiry_status(self, prevdoc, flag): enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc) if enq: frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0])) def update_prevdoc_status(self, flag): for quotation in list(set([d.prevdoc_docname for d in self.get("items")])): if quotation: doc = frappe.get_doc("Quotation", quotation) if doc.docstatus==2: frappe.throw(_("Quotation {0} is cancelled").format(quotation)) doc.set_status(update=True) doc.update_opportunity() def validate_drop_ship(self): for d in self.get('items'): if d.delivered_by_supplier and not d.supplier: frappe.throw(_("Row #{0}: Set Supplier for item {1}").format(d.idx, d.item_code)) def on_submit(self): self.check_credit_limit() self.update_reserved_qty() frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.company, self.base_grand_total, self) self.update_project() self.update_prevdoc_status('submit') def on_cancel(self): # Cannot cancel closed SO if self.status == 'Closed': frappe.throw(_("Closed order cannot be cancelled. Unclose to cancel.")) self.check_nextdoc_docstatus() self.update_reserved_qty() self.update_project() self.update_prevdoc_status('cancel') frappe.db.set(self, 'status', 'Cancelled') def update_project(self): project_list = [] if self.project: project = frappe.get_doc("Project", self.project) project.flags.dont_sync_tasks = True project.update_sales_costing() project.save() project_list.append(self.project) def check_credit_limit(self): from erpnext.selling.doctype.customer.customer import check_credit_limit check_credit_limit(self.customer, self.company) def check_nextdoc_docstatus(self): # Checks Delivery Note submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2 where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name) if submit_dn: frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn))) # Checks Sales Invoice submit_rv = frappe.db.sql_list("""select t1.name from `tabSales Invoice` t1,`tabSales Invoice Item` t2 where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""", self.name) if submit_rv: frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv))) #check maintenance schedule submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.sales_order = %s and t1.docstatus = 1""", self.name) if submit_ms: frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms))) # check maintenance visit submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name) if submit_mv: frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv))) # check production order pro_order = frappe.db.sql_list("""select name from `tabProduction Order` where sales_order = %s and docstatus = 1""", self.name) if pro_order: frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order))) def check_modified_date(self): mod_db = frappe.db.get_value("Sales Order", self.name, "modified") date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" % ( mod_db, cstr(self.modified))) if date_diff and date_diff[0][0]: frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name)) def update_status(self, status): self.check_modified_date() self.set_status(update=True, status=status) self.update_reserved_qty() self.notify_update() clear_doctype_notifications(self) def update_reserved_qty(self, so_item_rows=None): """update requested qty (before ordered_qty is updated)""" item_wh_list = [] def _valid_for_reserve(item_code, warehouse): if item_code and warehouse and [item_code, warehouse] not in item_wh_list \ and frappe.db.get_value("Item", item_code, "is_stock_item"): item_wh_list.append([item_code, warehouse]) for d in self.get("items"): if (not so_item_rows or d.name in so_item_rows) and not d.delivered_by_supplier: if self.has_product_bundle(d.item_code): for p in self.get("packed_items"): if p.parent_detail_docname == d.name and p.parent_item == d.item_code: _valid_for_reserve(p.item_code, p.warehouse) else: _valid_for_reserve(d.item_code, d.warehouse) for item_code, warehouse in item_wh_list: update_bin_qty(item_code, warehouse, { "reserved_qty": get_reserved_qty(item_code, warehouse) }) def on_update(self): pass def before_update_after_submit(self): self.validate_po() self.validate_drop_ship() self.validate_supplier_after_submit() def validate_supplier_after_submit(self): """Check that supplier is the same after submit if PO is already made""" exc_list = [] for item in self.items: if item.supplier: supplier = frappe.db.get_value("Sales Order Item", {"parent": self.name, "item_code": item.item_code}, "supplier") if item.ordered_qty > 0.0 and item.supplier != supplier: exc_list.append(_("Row #{0}: Not allowed to change Supplier as Purchase Order already exists").format(item.idx)) if exc_list: frappe.throw('\n'.join(exc_list)) def update_delivery_status(self): """Update delivery status from Purchase Order for drop shipping""" tot_qty, delivered_qty = 0.0, 0.0 for item in self.items: if item.delivered_by_supplier: item_delivered_qty = frappe.db.sql("""select sum(qty)<|fim▁hole|> where poi.sales_order_item = %s and poi.item_code = %s and poi.parent = po.name and po.docstatus = 1 and po.status = 'Delivered'""", (item.name, item.item_code)) item_delivered_qty = item_delivered_qty[0][0] if item_delivered_qty else 0 item.db_set("delivered_qty", flt(item_delivered_qty), update_modified=False) delivered_qty += item.delivered_qty tot_qty += item.qty self.db_set("per_delivered", flt(delivered_qty/tot_qty) * 100, update_modified=False) def set_indicator(self): """Set indicator for portal""" if self.per_billed < 100 and self.per_delivered < 100: self.indicator_color = "orange" self.indicator_title = _("Not Paid and Not Delivered") elif self.per_billed == 100 and self.per_delivered < 100: self.indicator_color = "orange" self.indicator_title = _("Paid and Not Delivered") else: self.indicator_color = "green" self.indicator_title = _("Paid") def get_production_order_items(self): '''Returns items with BOM that already do not have a linked production order''' items = [] for table in [self.items, self.packed_items]: for i in table: bom = get_default_bom_item(i.item_code) if bom: stock_qty = i.qty if i.doctype == 'Packed Item' else i.stock_qty items.append(dict( item_code= i.item_code, bom = bom, warehouse = i.warehouse, pending_qty= stock_qty - flt(frappe.db.sql('''select sum(qty) from `tabProduction Order` where production_item=%s and sales_order=%s''', (i.item_code, self.name))[0][0]) )) return items def on_recurring(self, reference_doc, subscription_doc): mcount = month_map[subscription_doc.frequency] self.set("delivery_date", get_next_date(reference_doc.delivery_date, mcount, cint(subscription_doc.repeat_on_day))) for d in self.get("items"): reference_delivery_date = frappe.db.get_value("Sales Order Item", {"parent": reference_doc.name, "item_code": d.item_code, "idx": d.idx}, "delivery_date") d.set("delivery_date", get_next_date(reference_delivery_date, mcount, cint(subscription_doc.repeat_on_day))) def get_list_context(context=None): from erpnext.controllers.website_list_for_contact import get_list_context list_context = get_list_context(context) list_context.update({ 'show_sidebar': True, 'show_search': True, 'no_breadcrumbs': True, 'title': _('Orders'), }) return list_context @frappe.whitelist() def close_or_unclose_sales_orders(names, status): if not frappe.has_permission("Sales Order", "write"): frappe.throw(_("Not permitted"), frappe.PermissionError) names = json.loads(names) for name in names: so = frappe.get_doc("Sales Order", name) if so.docstatus == 1: if status == "Closed": if so.status not in ("Cancelled", "Closed") and (so.per_delivered < 100 or so.per_billed < 100): so.update_status(status) else: if so.status == "Closed": so.update_status('Draft') frappe.local.message_log = [] @frappe.whitelist() def make_material_request(source_name, target_doc=None): def postprocess(source, doc): doc.material_request_type = "Purchase" def update_item(source, target, source_parent): target.project = source_parent.project doc = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Material Request", "validation": { "docstatus": ["=", 1] } }, "Packed Item": { "doctype": "Material Request Item", "field_map": { "parent": "sales_order", "stock_uom": "uom" }, "postprocess": update_item }, "Sales Order Item": { "doctype": "Material Request Item", "field_map": { "parent": "sales_order", "stock_uom": "uom", "stock_qty": "qty" }, "condition": lambda doc: not frappe.db.exists('Product Bundle', doc.item_code), "postprocess": update_item } }, target_doc, postprocess) return doc @frappe.whitelist() def make_project(source_name, target_doc=None): def postprocess(source, doc): doc.project_type = "External" doc.project_name = source.name doc = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Project", "validation": { "docstatus": ["=", 1] }, "field_map":{ "name" : "sales_order", "base_grand_total" : "estimated_costing", } }, "Sales Order Item": { "doctype": "Project Task", "field_map": { "description": "title", }, } }, target_doc, postprocess) return doc @frappe.whitelist() def make_delivery_note(source_name, target_doc=None): def set_missing_values(source, target): if source.po_no: if target.po_no: target_po_no = target.po_no.split(", ") target_po_no.append(source.po_no) target.po_no = ", ".join(list(set(target_po_no))) if len(target_po_no) > 1 else target_po_no[0] else: target.po_no = source.po_no target.ignore_pricing_rule = 1 target.run_method("set_missing_values") target.run_method("calculate_taxes_and_totals") # set company address target.update(get_company_address(target.company)) if target.company_address: target.update(get_fetch_values("Delivery Note", 'company_address', target.company_address)) def update_item(source, target, source_parent): target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate) target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate) target.qty = flt(source.qty) - flt(source.delivered_qty) item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1) target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \ or item.selling_cost_center \ or frappe.db.get_value("Item Group", item.item_group, "default_cost_center") target_doc = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Delivery Note", "validation": { "docstatus": ["=", 1] } }, "Sales Order Item": { "doctype": "Delivery Note Item", "field_map": { "rate": "rate", "name": "so_detail", "parent": "against_sales_order", }, "postprocess": update_item, "condition": lambda doc: abs(doc.delivered_qty) < abs(doc.qty) and doc.delivered_by_supplier!=1 }, "Sales Taxes and Charges": { "doctype": "Sales Taxes and Charges", "add_if_empty": True }, "Sales Team": { "doctype": "Sales Team", "add_if_empty": True } }, target_doc, set_missing_values) return target_doc @frappe.whitelist() def make_sales_invoice(source_name, target_doc=None, ignore_permissions=False): def postprocess(source, target): set_missing_values(source, target) #Get the advance paid Journal Entries in Sales Invoice Advance target.set_advances() def set_missing_values(source, target): target.is_pos = 0 target.ignore_pricing_rule = 1 target.flags.ignore_permissions = True target.run_method("set_missing_values") target.run_method("calculate_taxes_and_totals") # set company address target.update(get_company_address(target.company)) if target.company_address: target.update(get_fetch_values("Sales Invoice", 'company_address', target.company_address)) def update_item(source, target, source_parent): target.amount = flt(source.amount) - flt(source.billed_amt) target.base_amount = target.amount * flt(source_parent.conversion_rate) target.qty = target.amount / flt(source.rate) if (source.rate and source.billed_amt) else source.qty item = frappe.db.get_value("Item", target.item_code, ["item_group", "selling_cost_center"], as_dict=1) target.cost_center = frappe.db.get_value("Project", source_parent.project, "cost_center") \ or item.selling_cost_center \ or frappe.db.get_value("Item Group", item.item_group, "default_cost_center") doclist = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Sales Invoice", "field_map": { "party_account_currency": "party_account_currency" }, "validation": { "docstatus": ["=", 1] } }, "Sales Order Item": { "doctype": "Sales Invoice Item", "field_map": { "name": "so_detail", "parent": "sales_order", }, "postprocess": update_item, "condition": lambda doc: doc.qty and (doc.base_amount==0 or abs(doc.billed_amt) < abs(doc.amount)) }, "Sales Taxes and Charges": { "doctype": "Sales Taxes and Charges", "add_if_empty": True }, "Sales Team": { "doctype": "Sales Team", "add_if_empty": True } }, target_doc, postprocess, ignore_permissions=ignore_permissions) return doclist @frappe.whitelist() def make_maintenance_schedule(source_name, target_doc=None): maint_schedule = frappe.db.sql("""select t1.name from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2 where t2.parent=t1.name and t2.sales_order=%s and t1.docstatus=1""", source_name) if not maint_schedule: doclist = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Maintenance Schedule", "validation": { "docstatus": ["=", 1] } }, "Sales Order Item": { "doctype": "Maintenance Schedule Item", "field_map": { "parent": "sales_order" }, "add_if_empty": True } }, target_doc) return doclist @frappe.whitelist() def make_maintenance_visit(source_name, target_doc=None): visit = frappe.db.sql("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2 where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name) if not visit: doclist = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Maintenance Visit", "validation": { "docstatus": ["=", 1] } }, "Sales Order Item": { "doctype": "Maintenance Visit Purpose", "field_map": { "parent": "prevdoc_docname", "parenttype": "prevdoc_doctype" }, "add_if_empty": True } }, target_doc) return doclist @frappe.whitelist() def get_events(start, end, filters=None): """Returns events for Gantt / Calendar view rendering. :param start: Start date-time. :param end: End date-time. :param filters: Filters (JSON). """ from frappe.desk.calendar import get_event_conditions conditions = get_event_conditions("Sales Order", filters) data = frappe.db.sql(""" select `tabSales Order`.name, `tabSales Order`.customer_name, `tabSales Order`.status, `tabSales Order`.delivery_status, `tabSales Order`.billing_status, `tabSales Order Item`.delivery_date from `tabSales Order`, `tabSales Order Item` where `tabSales Order`.name = `tabSales Order Item`.parent and (ifnull(`tabSales Order Item`.delivery_date, '0000-00-00')!= '0000-00-00') \ and (`tabSales Order Item`.delivery_date between %(start)s and %(end)s) and `tabSales Order`.docstatus < 2 {conditions} """.format(conditions=conditions), { "start": start, "end": end }, as_dict=True, update={"allDay": 0}) return data @frappe.whitelist() def make_purchase_order_for_drop_shipment(source_name, for_supplier, target_doc=None): def set_missing_values(source, target): target.supplier = for_supplier target.apply_discount_on = "" target.additional_discount_percentage = 0.0 target.discount_amount = 0.0 default_price_list = frappe.get_value("Supplier", for_supplier, "default_price_list") if default_price_list: target.buying_price_list = default_price_list if any( item.delivered_by_supplier==1 for item in source.items): if source.shipping_address_name: target.shipping_address = source.shipping_address_name target.shipping_address_display = source.shipping_address else: target.shipping_address = source.customer_address target.shipping_address_display = source.address_display target.customer_contact_person = source.contact_person target.customer_contact_display = source.contact_display target.customer_contact_mobile = source.contact_mobile target.customer_contact_email = source.contact_email else: target.customer = "" target.customer_name = "" target.run_method("set_missing_values") target.run_method("calculate_taxes_and_totals") def update_item(source, target, source_parent): target.schedule_date = source.delivery_date target.qty = flt(source.qty) - flt(source.ordered_qty) target.stock_qty = (flt(source.qty) - flt(source.ordered_qty)) * flt(source.conversion_factor) doclist = get_mapped_doc("Sales Order", source_name, { "Sales Order": { "doctype": "Purchase Order", "field_no_map": [ "address_display", "contact_display", "contact_mobile", "contact_email", "contact_person" ], "validation": { "docstatus": ["=", 1] } }, "Sales Order Item": { "doctype": "Purchase Order Item", "field_map": [ ["name", "sales_order_item"], ["parent", "sales_order"], ["stock_uom", "stock_uom"], ["uom", "uom"], ["conversion_factor", "conversion_factor"], ["delivery_date", "schedule_date"] ], "field_no_map": [ "rate", "price_list_rate" ], "postprocess": update_item, "condition": lambda doc: doc.ordered_qty < doc.qty and doc.supplier == for_supplier } }, target_doc, set_missing_values) return doclist @frappe.whitelist() def get_supplier(doctype, txt, searchfield, start, page_len, filters): supp_master_name = frappe.defaults.get_user_default("supp_master_name") if supp_master_name == "Supplier Name": fields = ["name", "supplier_type"] else: fields = ["name", "supplier_name", "supplier_type"] fields = ", ".join(fields) return frappe.db.sql("""select {field} from `tabSupplier` where docstatus < 2 and ({key} like %(txt)s or supplier_name like %(txt)s) and name in (select supplier from `tabSales Order Item` where parent = %(parent)s) order by if(locate(%(_txt)s, name), locate(%(_txt)s, name), 99999), if(locate(%(_txt)s, supplier_name), locate(%(_txt)s, supplier_name), 99999), name, supplier_name limit %(start)s, %(page_len)s """.format(**{ 'field': fields, 'key': frappe.db.escape(searchfield) }), { 'txt': "%%%s%%" % txt, '_txt': txt.replace("%", ""), 'start': start, 'page_len': page_len, 'parent': filters.get('parent') }) @frappe.whitelist() def make_production_orders(items, sales_order, company, project=None): '''Make Production Orders against the given Sales Order for the given `items`''' items = json.loads(items).get('items') out = [] for i in items: production_order = frappe.get_doc(dict( doctype='Production Order', production_item=i['item_code'], bom_no=i['bom'], qty=i['pending_qty'], company=company, sales_order=sales_order, project=project, fg_warehouse=i['warehouse'] )).insert() production_order.set_production_order_operations() production_order.save() out.append(production_order) return [p.name for p in out] @frappe.whitelist() def update_status(status, name): so = frappe.get_doc("Sales Order", name) so.update_status(status) def get_default_bom_item(item_code): bom = frappe.get_all('BOM', dict(item=item_code, is_active=True), order_by='is_default desc') bom = bom[0].name if bom else None return bom<|fim▁end|>
from `tabPurchase Order Item` poi, `tabPurchase Order` po
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>""" SleekXMPP: The Sleek XMPP Library Copyright (C) 2010 Nathanael C. Fritz This file is part of SleekXMPP. See the file LICENSE for copying permission. """ from sleekxmpp.plugins.base import PluginManager, PluginNotFound, BasePlugin from sleekxmpp.plugins.base import register_plugin, load_plugin __all__ = [ # XEPS 'xep_0004', # Data Forms 'xep_0009', # Jabber-RPC 'xep_0012', # Last Activity 'xep_0013', # Flexible Offline Message Retrieval 'xep_0016', # Privacy Lists 'xep_0020', # Feature Negotiation 'xep_0027', # Current Jabber OpenPGP Usage 'xep_0030', # Service Discovery 'xep_0033', # Extended Stanza Addresses 'xep_0045', # Multi-User Chat (Client) 'xep_0047', # In-Band Bytestreams 'xep_0048', # Bookmarks 'xep_0049', # Private XML Storage 'xep_0050', # Ad-hoc Commands 'xep_0054', # vcard-temp 'xep_0059', # Result Set Management 'xep_0060', # Pubsub (Client) 'xep_0065', # SOCKS5 Bytestreams 'xep_0066', # Out of Band Data 'xep_0071', # XHTML-IM 'xep_0077', # In-Band Registration # 'xep_0078', # Non-SASL auth. Don't automatically load 'xep_0079', # Advanced Message Processing 'xep_0080', # User Location 'xep_0082', # XMPP Date and Time Profiles 'xep_0084', # User Avatar 'xep_0085', # Chat State Notifications 'xep_0086', # Legacy Error Codes 'xep_0091', # Legacy Delayed Delivery 'xep_0092', # Software Version 'xep_0106', # JID Escaping 'xep_0107', # User Mood 'xep_0108', # User Activity 'xep_0115', # Entity Capabilities 'xep_0118', # User Tune 'xep_0122', # Data Forms Validation 'xep_0128', # Extended Service Discovery 'xep_0131', # Standard Headers and Internet Metadata 'xep_0133', # Service Administration 'xep_0152', # Reachability Addresses 'xep_0153', # vCard-Based Avatars 'xep_0163', # Personal Eventing Protocol 'xep_0172', # User Nickname 'xep_0184', # Message Receipts 'xep_0186', # Invisible Command 'xep_0191', # Blocking Command 'xep_0196', # User Gaming 'xep_0198', # Stream Management 'xep_0199', # Ping 'xep_0202', # Entity Time 'xep_0203', # Delayed Delivery 'xep_0221', # Data Forms Media Element 'xep_0222', # Persistent Storage of Public Data via Pubsub 'xep_0223', # Persistent Storage of Private Data via Pubsub 'xep_0224', # Attention<|fim▁hole|> 'xep_0242', # XMPP Client Compliance 2009 'xep_0249', # Direct MUC Invitations 'xep_0256', # Last Activity in Presence 'xep_0257', # Client Certificate Management for SASL EXTERNAL 'xep_0258', # Security Labels in XMPP 'xep_0270', # XMPP Compliance Suites 2010 'xep_0279', # Server IP Check 'xep_0280', # Message Carbons 'xep_0297', # Stanza Forwarding 'xep_0302', # XMPP Compliance Suites 2012 'xep_0308', # Last Message Correction 'xep_0313', # Message Archive Management 'xep_0319', # Last User Interaction in Presence 'xep_0323', # IoT Systems Sensor Data 'xep_0325', # IoT Systems Control 'xep_0332', # HTTP Over XMPP Transport ]<|fim▁end|>
'xep_0231', # Bits of Binary 'xep_0235', # OAuth Over XMPP
<|file_name|>raytrace.rs<|end_file_name|><|fim▁begin|>use rand; use std; use prelude::*; use scene; pub struct Output { data : Vec<RGB>, w : u32, h : u32, } impl Output { pub fn new(w: u32, h: u32) -> Self { Output { data : std::iter::repeat(RGB { r: 0.0, g: 0.0, b: 0.0 }).take((w * h) as usize).collect(), w : w, h : h, } } pub fn pixel_mut(&mut self, x: u32, y: u32) -> &mut RGB { self.data.get_mut((y * self.w + x) as usize).unwrap() } pub fn to_vec(self) -> Vec<RGB> { self.data } } /// A unit of work to be done. /// Consists of a ray to trace, an attenuation, and a pixel location to draw to. struct Work { pub ray : Ray, pub pixel_x : u32, pub pixel_y : u32, pub attenuation : RGB, } fn cast<'a>(s: &'a scene::T, ray: &Ray) -> Option<scene::Collision<'a>> { let mut first_collision: Option<scene::Collision<'a>> = None; for object in &s.objects { if let Some(collision) = object.intersect_ray(ray) { if let Some(first_collision) = first_collision.as_ref() { if first_collision.toi < collision.toi { continue } } first_collision = Some(collision); } } first_collision } fn perturb<Rng: rand::Rng>(unperturbed: Vector, normal: Vector, shininess: f32, rng: &mut Rng) -> Vector { let rotation = { let y = unperturbed; let x = if unperturbed.x <= 0.5 { // specialized cross-product for crossing with x axis Vector::new(0.0, unperturbed.z, -unperturbed.y) } else { // specialized cross-product for crossing with y axis Vector::new(-unperturbed.z, 0.0, unperturbed.x) }; let x = normalize(x); let z = cross(y, x); Matrix::from_cols(x, y, z) }; for _ in 0..4 { let altitude = rng.next_f32().asin(); let altitude = std::f32::consts::FRAC_PI_2 * (altitude / std::f32::consts::FRAC_PI_2).powf(shininess.exp()); let altitude = std::f32::consts::FRAC_PI_2 - altitude; let azimuth = rng.next_f32() * 2.0 * std::f32::consts::PI; let xz = altitude.cos(); let direction = rotation * Vector::new(azimuth.cos() * xz, altitude.sin(), azimuth.sin() * xz); if dot(direction, normal) >= 0.0 { return direction } } // If we failed this many times, we're probably hitting some corner case (e.g. divide-by-zero). unperturbed } fn do_work<Rng: rand::Rng, AddWork: FnMut(Work)> ( s: &scene::T, work: &Work, rng: &mut Rng, add_work: &mut AddWork, output: &mut Output, ) { let min_attenuation = 0.01; if work.attenuation.r < min_attenuation && work.attenuation.g < min_attenuation && work.attenuation.b < min_attenuation { return } let collision = match cast(s, &work.ray) { None => return, Some(c) => c, }; let color = match collision.object.texture { scene::Texture::SolidColor(color) => color, }; let color = work.attenuation * color; *output.pixel_mut(work.pixel_x, work.pixel_y) += color * collision.object.emittance; let make_ray = { let location = collision.location; move |direction| { Ray { direction : direction, origin : location + 0.01 * direction, } } }; let make_work = { let pixel_x = work.pixel_x; let pixel_y = work.pixel_y; move |ray, attenuation| { Work { ray : ray, attenuation : attenuation, pixel_x : pixel_x, pixel_y : pixel_y, } } }; let reflected = work.ray.direction - 2.0 * dot(work.ray.direction, collision.normal) * collision.normal; let reflected = perturb(reflected, collision.normal, collision.object.shininess, rng); add_work(make_work(make_ray(reflected), color * collision.object.reflectance)); let transmitted = work.ray.direction; let transmitted = perturb(transmitted, -collision.normal, collision.object.shininess, rng); add_work(make_work(make_ray(transmitted), color * collision.object.transmittance)); } pub fn scene<Rng: rand::Rng>(s: &scene::T, width: u32, height: u32, rng: &mut Rng) -> Output { let mut output = Output::new(width, height); let mut work_items = std::collections::VecDeque::new(); let aspect = width as f32 / height as f32; let max_y = (s.fovy / 2.0).tan(); let scale = 2.0 * max_y / height as f32; let shift = -max_y; let view_to_world = Matrix::from_cols(s.x(), s.y(), s.z()); for y in 0 .. height { for x in 0 .. width { // in view coordinates let ray =<|fim▁hole|> scale * x as f32 + shift * aspect, scale * y as f32 + shift, 1.0, ); work_items.push_back( Work { ray : Ray { origin : s.eye, direction : normalize(view_to_world * ray), }, pixel_x : x, pixel_y : y, attenuation : RGB { r: 1.0, g: 1.0, b: 1.0 }, } ); } } while let Some(work) = work_items.pop_front() { let mut add_work = |work| work_items.push_back(work); do_work(s, &work, rng, &mut add_work, &mut output); } output }<|fim▁end|>
Vector::new(
<|file_name|>layer_utils.py<|end_file_name|><|fim▁begin|>from __future__ import print_function import inspect import numpy as np import theano from ..layers.advanced_activations import LeakyReLU, PReLU from ..layers.core import Dense, Merge, Dropout, Activation, Reshape, Flatten, RepeatVector, Layer from ..layers.core import ActivityRegularization, TimeDistributedDense, AutoEncoder, MaxoutDense from ..layers.embeddings import Embedding, WordContextProduct from ..layers.noise import GaussianNoise, GaussianDropout from ..layers.normalization import BatchNormalization from ..layers.recurrent import SimpleRNN, SimpleDeepRNN, GRU, LSTM, JZS1, JZS2, JZS3 from ..layers import containers from .. import regularizers from .. import constraints def container_from_config(layer_dict): name = layer_dict.get('name') hasParams = False if name == 'Merge': mode = layer_dict.get('mode') layers = layer_dict.get('layers') layer_list = [] for layer in layers: init_layer = container_from_config(layer) layer_list.append(init_layer) merge_layer = Merge(layer_list, mode) return merge_layer elif name == 'Sequential': layers = layer_dict.get('layers') layer_list = [] for layer in layers: init_layer = container_from_config(layer) layer_list.append(init_layer) seq_layer = containers.Sequential(layer_list) return seq_layer elif name == 'Graph': graph_layer = containers.Graph() inputs = layer_dict.get('input_config') for input in inputs: graph_layer.add_input(**input) nodes = layer_dict.get('node_config') for node in nodes: layer = container_from_config(layer_dict['nodes'].get(node['name'])) node['layer'] = layer graph_layer.add_node(**node) outputs = layer_dict.get('output_config') for output in outputs: graph_layer.add_output(**output) return graph_layer else: # The case in which layer_dict represents an "atomic" layer layer_dict.pop('name') if 'parameters' in layer_dict: params = layer_dict.get('parameters') layer_dict.pop('parameters') hasParams = True for k, v in layer_dict.items(): # For now, this can only happen for regularizers and constraints if isinstance(v, dict): vname = v.get('name') v.pop('name') if vname in [x for x, y in inspect.getmembers(constraints, predicate=inspect.isclass)]: layer_dict[k] = constraints.get(vname, v) if vname in [x for x, y in inspect.getmembers(regularizers, predicate=inspect.isclass)]: layer_dict[k] = regularizers.get(vname, v) base_layer = get_layer(name, layer_dict) if hasParams: shaped_params = [] for param in params: data = np.asarray(param.get('data')) shape = tuple(param.get('shape')) shaped_params.append(data.reshape(shape)) base_layer.set_weights(shaped_params) return base_layer def print_layer_shapes(model, input_shapes): """ Utility function to print the shape of the output at each layer of a Model Arguments: model: instance of Model / Merge<|fim▁hole|> if model.__class__.__name__ in ['Sequential', 'Merge']: # in this case input_shapes is a tuple, or a list [shape1, shape2] if not isinstance(input_shapes[0], tuple): input_shapes = [input_shapes] inputs = model.get_input(train=False) if not isinstance(inputs, list): inputs = [inputs] input_dummy = [np.zeros(shape, dtype=np.float32) for shape in input_shapes] layers = model.layers elif model.__class__.__name__ == 'Graph': # in this case input_shapes is a dictionary inputs = [model.inputs[name].input for name in model.input_order] input_dummy = [np.zeros(input_shapes[name], dtype=np.float32) for name in model.input_order] layers = [model.nodes[c['name']] for c in model.node_config] print("input shapes : ", input_shapes) for l in layers: shape_f = theano.function(inputs, l.get_output(train=False).shape, on_unused_input='ignore') out_shape = tuple(shape_f(*input_dummy)) config = l.get_config() print('shape after %s: %s' % (config['name'], out_shape)) from .generic_utils import get_from_module def get_layer(identifier, kwargs=None): return get_from_module(identifier, globals(), 'layer', instantiate=True, kwargs=kwargs)<|fim▁end|>
input_shapes: dict (Graph), list of tuples (Merge) or tuple (Sequential) """
<|file_name|>shippingInvoice.js<|end_file_name|><|fim▁begin|>import accounting from "accounting-js"; import _ from "lodash"; import { Meteor } from "meteor/meteor"; import $ from "jquery"; import { Template } from "meteor/templating"; import { ReactiveVar } from "meteor/reactive-var"; import { i18next, Logger, formatNumber, Reaction } from "/client/api"; import { NumericInput } from "/imports/plugins/core/ui/client/components"; import { Orders, Shops, Packages } from "/lib/collections"; import { ButtonSelect } from "../../../../ui/client/components/button"; import DiscountList from "/imports/plugins/core/discounts/client/components/list"; import InvoiceContainer from "../../containers/invoiceContainer.js"; import LineItemsContainer from "../../containers/lineItemsContainer.js"; import TotalActionsContainer from "../../containers/totalActionsContainer.js"; // helper to return the order payment object // the first credit paymentMethod on the order // returns entire payment method function orderCreditMethod(order) { return order.billing.filter(value => value.paymentMethod.method === "credit")[0]; } // // core order shipping invoice templates // Template.coreOrderShippingInvoice.onCreated(function () { this.state = new ReactiveDict(); this.refunds = new ReactiveVar([]); this.refundAmount = new ReactiveVar(0.00); this.state.setDefault({ isCapturing: false, isRefunding: false, isFetching: true }); this.autorun(() => { const currentData = Template.currentData(); const order = Orders.findOne(currentData.orderId); const shop = Shops.findOne({}); this.state.set("order", order); this.state.set("currency", shop.currencies[shop.currency]); if (order) { Meteor.call("orders/refunds/list", order, (error, result) => { if (error) Logger.warn(error); this.refunds.set(result); this.state.set("isFetching", false); }); } }); }); Template.coreOrderShippingInvoice.helpers({ isCapturing() { const instance = Template.instance(); if (instance.state.get("isCapturing")) { instance.$(":input").attr("disabled", true); instance.$("#btn-capture-payment").text("Capturing"); return true; } return false; }, isRefunding() { const instance = Template.instance(); if (instance.state.get("isRefunding")) { instance.$("#btn-refund-payment").text(i18next.t("order.refunding")); return true; } return false; }, isFetching() { const instance = Template.instance(); if (instance.state.get("isFetching")) { return true; } return false; }, DiscountList() { return DiscountList; }, InvoiceContainer() { return InvoiceContainer; }, buttonSelectComponent() { return { component: ButtonSelect, buttons: [ { name: "Approve", i18nKeyLabel: "order.approveInvoice", active: true, status: "info", eventAction: "approveInvoice", bgColor: "bg-info", buttonType: "submit" }, { name: "Cancel", i18nKeyLabel: "order.cancelInvoice", active: false, status: "danger", eventAction: "cancelOrder", bgColor: "bg-danger", buttonType: "button" } ] }; }, LineItemsContainer() { return LineItemsContainer; }, TotalActionsContainer() { return TotalActionsContainer; }, orderId() { const instance = Template.instance(); const state = instance.state; const order = state.get("order"); return order._id; } }); /** * coreOrderAdjustments events */ Template.coreOrderShippingInvoice.events({ /** * Click Start Cancel Order * @param {Event} event - Event Object * @param {Template} instance - Blaze Template * @return {void} */ "click [data-event-action=cancelOrder]": (event, instance) => { event.preventDefault(); const order = instance.state.get("order"); const invoiceTotal = order.billing[0].invoice.total; const currencySymbol = instance.state.get("currency").symbol; Meteor.subscribe("Packages"); const packageId = order.billing[0].paymentMethod.paymentPackageId; const settingsKey = order.billing[0].paymentMethod.paymentSettingsKey; // check if payment provider supports de-authorize const checkSupportedMethods = Packages.findOne({ _id: packageId, shopId: Reaction.getShopId() }).settings[settingsKey].support; const orderStatus = order.billing[0].paymentMethod.status; const orderMode = order.billing[0].paymentMethod.mode; let alertText; if (_.includes(checkSupportedMethods, "de-authorize") || (orderStatus === "completed" && orderMode === "capture")) { alertText = i18next.t("order.applyRefundDuringCancelOrder", { currencySymbol, invoiceTotal }); } Alerts.alert({ title: i18next.t("order.cancelOrder"), text: alertText, type: "warning", showCancelButton: true, showCloseButton: true, confirmButtonColor: "#98afbc", cancelButtonColor: "#98afbc", confirmButtonText: i18next.t("order.cancelOrderNoRestock"), cancelButtonText: i18next.t("order.cancelOrderThenRestock") }, (isConfirm, cancel)=> { let returnToStock; if (isConfirm) { returnToStock = false; return Meteor.call("orders/cancelOrder", order, returnToStock, err => { if (err) { $(".alert").removeClass("hidden").text(err.message); } }); } if (cancel === "cancel") { returnToStock = true; return Meteor.call("orders/cancelOrder", order, returnToStock, err => { if (err) { $(".alert").removeClass("hidden").text(err.message); } }); } }); }, /** * Submit form * @param {Event} event - Event object * @param {Template} instance - Blaze Template * @return {void} */ "submit form[name=capture]": (event, instance) => { event.preventDefault(); const state = instance.state; const order = state.get("order"); const paymentMethod = orderCreditMethod(order); const orderTotal = accounting.toFixed( paymentMethod.invoice.subtotal + paymentMethod.invoice.shipping + paymentMethod.invoice.taxes , 2); const discount = state.get("field-discount") || order.discount; // TODO: review Discount cannot be greater than original total price // logic is probably not valid any more. Discounts aren't valid below 0 order. if (discount > orderTotal) { Alerts.inline("Discount cannot be greater than original total price", "error", { placement: "coreOrderShippingInvoice", i18nKey: "order.invalidDiscount", autoHide: 10000 }); } else if (orderTotal === accounting.toFixed(discount, 2)) { Alerts.alert({ title: i18next.t("order.fullDiscountWarning"), showCancelButton: true, confirmButtonText: i18next.t("order.applyDiscount") }, (isConfirm) => { if (isConfirm) { Meteor.call("orders/approvePayment", order, (error) => { if (error) { Logger.warn(error); } }); } }); } else { Meteor.call("orders/approvePayment", order, (error) => { if (error) { Logger.warn(error); if (error.error === "orders/approvePayment.discount-amount") { Alerts.inline("Discount cannot be greater than original total price", "error", { placement: "coreOrderShippingInvoice", i18nKey: "order.invalidDiscount", autoHide: 10000 }); } } }); } }, /** * Submit form * @param {Event} event - Event object * @param {Template} instance - Blaze Template * @return {void} */ "click [data-event-action=applyRefund]": (event, instance) => { event.preventDefault(); const { state } = Template.instance(); const currencySymbol = state.get("currency").symbol; const order = instance.state.get("order"); const paymentMethod = orderCreditMethod(order).paymentMethod; const orderTotal = paymentMethod.amount; const discounts = paymentMethod.discounts; const refund = state.get("field-refund") || 0; const refunds = Template.instance().refunds.get(); let refundTotal = 0; _.each(refunds, function (item) { refundTotal += parseFloat(item.amount); }); let adjustedTotal; // TODO extract Stripe specific fullfilment payment handling out of core. // Stripe counts discounts as refunds, so we need to re-add the discount to not "double discount" in the adjustedTotal if (paymentMethod.processor === "Stripe") { adjustedTotal = accounting.toFixed(orderTotal + discounts - refundTotal, 2); } else { adjustedTotal = accounting.toFixed(orderTotal - refundTotal, 2); } if (refund > adjustedTotal) { Alerts.inline("Refund(s) total cannot be greater than adjusted total", "error", { placement: "coreOrderRefund", i18nKey: "order.invalidRefund", autoHide: 10000 }); } else { Alerts.alert({ title: i18next.t("order.applyRefundToThisOrder", { refund: refund, currencySymbol: currencySymbol }), showCancelButton: true, confirmButtonText: i18next.t("order.applyRefund") }, (isConfirm) => { if (isConfirm) { state.set("isRefunding", true); Meteor.call("orders/refunds/create", order._id, paymentMethod, refund, (error, result) => { if (error) { Alerts.alert(error.reason); } if (result) { Alerts.toast(i18next.t("mail.alerts.emailSent"), "success"); } $("#btn-refund-payment").text(i18next.t("order.applyRefund")); state.set("field-refund", 0); state.set("isRefunding", false); }); } }); } }, "click [data-event-action=makeAdjustments]": (event, instance) => { event.preventDefault(); Meteor.call("orders/makeAdjustmentsToInvoice", instance.state.get("order")); }, "click [data-event-action=capturePayment]": (event, instance) => { event.preventDefault(); instance.state.set("isCapturing", true); const order = instance.state.get("order"); Meteor.call("orders/capturePayments", order._id); if (order.workflow.status === "new") { Meteor.call("workflow/pushOrderWorkflow", "coreOrderWorkflow", "processing", order); Reaction.Router.setQueryParams({ filter: "processing", _id: order._id }); } }, "change input[name=refund_amount], keyup input[name=refund_amount]": (event, instance) => { instance.refundAmount.set(accounting.unformat(event.target.value)); } }); /** * coreOrderShippingInvoice helpers */ Template.coreOrderShippingInvoice.helpers({ NumericInput() { return NumericInput; }, numericInputProps(fieldName, value = 0, enabled = true) { const { state } = Template.instance(); const order = state.get("order"); const paymentMethod = orderCreditMethod(order); const status = paymentMethod.status; const isApprovedAmount = (status === "approved" || status === "completed"); return { component: NumericInput, numericType: "currency", value: value, disabled: !enabled, isEditing: !isApprovedAmount, // Dont allow editing if its approved format: state.get("currency"), classNames: { input: { amount: true }, text: { "text-success": status === "completed" } }, onChange(event, data) { state.set(`field-${fieldName}`, data.numberValue); } }; }, refundInputProps() { const { state } = Template.instance(); const order = state.get("order"); const paymentMethod = orderCreditMethod(order).paymentMethod; const refunds = Template.instance().refunds.get(); let refundTotal = 0; _.each(refunds, function (item) { refundTotal += parseFloat(item.amount); }); const adjustedTotal = paymentMethod.amount - refundTotal; return { component: NumericInput, numericType: "currency", value: state.get("field-refund") || 0, maxValue: adjustedTotal, format: state.get("currency"), classNames: { input: { amount: true } }, onChange(event, data) { state.set("field-refund", data.numberValue); } }; },<|fim▁hole|> }, invoice() { const instance = Template.instance(); const order = instance.state.get("order"); const invoice = Object.assign({}, order.billing[0].invoice, { totalItems: _.sumBy(order.items, (o) => o.quantity) }); return invoice; }, money(amount) { return formatNumber(amount); }, disabled() { const instance = Template.instance(); const order = instance.state.get("order"); const status = orderCreditMethod(order).paymentMethod.status; if (status === "approved" || status === "completed") { return "disabled"; } return ""; }, paymentPendingApproval() { const instance = Template.instance(); const order = instance.state.get("order"); const status = orderCreditMethod(order).paymentMethod.status; return status === "created" || status === "adjustments" || status === "error"; }, canMakeAdjustments() { const instance = Template.instance(); const order = instance.state.get("order"); const status = orderCreditMethod(order).paymentMethod.status; if (status === "approved" || status === "completed") { return false; } return true; }, showAfterPaymentCaptured() { const instance = Template.instance(); const order = instance.state.get("order"); const orderStatus = orderCreditMethod(order).paymentMethod.status; return orderStatus === "completed"; }, paymentApproved() { const instance = Template.instance(); const order = instance.state.get("order"); return order.billing[0].paymentMethod.status === "approved"; }, paymentCaptured() { const instance = Template.instance(); const order = instance.state.get("order"); const orderStatus = orderCreditMethod(order).paymentMethod.status; const orderMode = orderCreditMethod(order).paymentMethod.mode; return orderStatus === "completed" || (orderStatus === "refunded" && orderMode === "capture"); }, refundTransactions() { const instance = Template.instance(); const order = instance.state.get("order"); const transactions = orderCreditMethod(order).paymentMethod.transactions; return _.filter(transactions, (transaction) => { return transaction.type === "refund"; }); }, refunds() { const refunds = Template.instance().refunds.get(); if (_.isArray(refunds)) { return refunds.reverse(); } return refunds; }, /** * Get the total after all refunds * @return {Number} the amount after all refunds */ adjustedTotal() { const instance = Template.instance(); const order = instance.state.get("order"); const paymentMethod = orderCreditMethod(order).paymentMethod; const discounts = orderCreditMethod(order).invoice.discounts; const refunds = Template.instance().refunds.get(); let refundTotal = 0; _.each(refunds, function (item) { refundTotal += parseFloat(item.amount); }); if (paymentMethod.processor === "Stripe") { return Math.abs(paymentMethod.amount + discounts - refundTotal); } return Math.abs(paymentMethod.amount - refundTotal); }, capturedDisabled() { const isLoading = Template.instance().state.get("isCapturing"); if (isLoading) { return "disabled"; } return null; }, refundSubmitDisabled() { const amount = Template.instance().state.get("field-refund") || 0; const isLoading = Template.instance().state.get("isRefunding"); if (amount === 0 || isLoading) { return "disabled"; } return null; }, /** * Order * @summary find a single order using the order id spplied with the template * data context * @return {Object} A single order */ order() { const instance = Template.instance(); const order = instance.state.get("order"); return order; }, shipment() { const instance = Template.instance(); const order = instance.state.get("order"); const shipment = _.filter(order.shipping, { _id: currentData.fulfillment._id })[0]; return shipment; }, discounts() { const enabledPaymentsArr = []; const apps = Reaction.Apps({ provides: "paymentMethod", enabled: true }); for (app of apps) { if (app.enabled === true) enabledPaymentsArr.push(app); } let discount = false; for (enabled of enabledPaymentsArr) { if (enabled.packageName === "discount-codes") { discount = true; break; } } return discount; }, items() { const instance = Template.instance(); const order = instance.state.get("order"); const currentData = Template.currentData(); const shipment = currentData.fulfillment; // returns order items with shipping detail const returnItems = _.map(order.items, (item) => { const shipping = shipment.shipmentMethod; return _.extend(item, { shipping }); }); let items; // if avalara tax has been enabled it adds a "taxDetail" field for every item if (order.taxes !== undefined) { const taxes = order.taxes.slice(0, -1); items = _.map(returnItems, (item) => { const taxDetail = _.find(taxes, { lineNumber: item._id }); return _.extend(item, { taxDetail }); }); } else { items = returnItems; } return items; } });<|fim▁end|>
refundAmount() { return Template.instance().refundAmount;
<|file_name|>ar.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- #This is generated code - do not edit encoding = 'utf-8' dict = { '&About...': '&\xd8\xb9\xd9\x86...', '&Delete Window': '&\xd8\xa7\xd8\xad\xd8\xb0\xd9\x81 \xd8\xa7\xd9\x84\xd9\x86\xd8\xa7\xd9\x81\xd8\xb0\xd8\xa9', '&Describe Action': '&\xd8\xa3\xd9\x88\xd8\xb5\xd9\x81 \xd8\xa7\xd9\x84\xd8\xb9\xd9\x85\xd9\x84\xd9\x8a\xd8\xa9', '&Execute Action': '&\xd9\x86\xd9\x81\xd8\xb0 \xd8\xa7\xd9\x84\xd8\xb9\xd9\x85\xd9\x84\xd9\x8a\xd8\xa9', '&Folding': '&\xd8\xa7\xd9\x84\xd8\xb7\xd9\x8a',<|fim▁hole|>'&Revert': '&\xd8\xa5\xd8\xb3\xd8\xaa\xd8\xb1\xd8\xac\xd8\xb9', '&Save...': '&\xd8\xad\xd9\x81\xd8\xb8...', '&Show Toolbars': '&\xd8\xb9\xd8\xb1\xd8\xb6 \xd8\xb4\xd8\xb1\xd9\x8a\xd8\xb7 \xd8\xa7\xd9\x84\xd8\xa3\xd8\xaf\xd9\x88\xd8\xa7\xd8\xa9', '&Word Count': '&\xd8\xb9\xd8\xaf \xd8\xa7\xd9\x84\xd9\x83\xd9\x84\xd9\x85\xd8\xa7\xd8\xaa', 'About this program': '\xd8\xad\xd9\x88\xd9\x92\xd9\x84 \xd9\x87\xd8\xb0\xd8\xa7 \xd8\xa7\xd9\x84\xd8\xa8\xd8\xb1\xd9\x86\xd8\xa7\xd9\x85\xd8\xac', 'Actions': '\xd8\xa5\xd8\xac\xd8\xb1\xd8\xa7\xd8\xa1\xd8\xa7\xd8\xaa', 'Attributes': '\xd8\xa7\xd9\x84\xd8\xb5\xd9\x91\xd9\x81\xd8\xa7\xd8\xaa', 'Background': '\xd8\xa7\xd9\x84\xd8\xae\xd9\x84\xd9\x81\xd9\x8a\xd9\x91\xd8\xa9', 'Cancel': '\xd8\xa5\xd9\x84\xd8\xba\xd8\xa7\xef\xba\x80', 'Case': '\xd8\xa7\xd9\x84\xd8\xad\xd8\xa7\xd9\x84\xd8\xa9', 'Clear Playlist': '\xd9\x85\xd8\xb3\xd8\xad \xd9\x82\xd8\xa7\xd8\xa6\xd9\x85\xd8\xa9 \xd8\xa7\xd9\x84\xd8\xaa\xd8\xb4\xd8\xba\xd9\x8a\xd9\x84', 'Close Tab': '\xd8\xa3\xd8\xba\xd9\x84\xd9\x82 \xd8\xa7\xd9\x84\xd9\x84\xd8\xb3\xd8\xa7\xd9\x86', 'Close the current tab': '\xd8\xa3\xd8\xba\xd9\x84\xd9\x82 \xd8\xa7\xd9\x84\xd9\x84\xd8\xb3\xd8\xa7\xd9\x86 \xd8\xa7\xd9\x84\xd8\xad\xd8\xa7\xd9\x84\xd9\x8a', 'Color': '\xd8\xa7\xd9\x84\xd9\x84\xd9\x88\xd9\x86', 'Contrast': '\xd8\xa7\xd9\x84\xd8\xaa\xd8\xa8\xd8\xa7\xd9\x8a\xd9\x86', 'Copy': '\xd9\x86\xd8\xb3\xd8\xae', 'Cut': '\xd9\x82\xd8\xb5', 'Debug': '\xd8\xaa\xd9\x86\xd9\x82\xd9\x8a\xd8\xad', 'Documents': '\xd8\xa7\xd9\x84\xd9\x85\xd8\xb3\xd8\xaa\xd9\x86\xd8\xaf\xd8\xa7\xd8\xaa', 'E&xit': '&\xd8\xae\xd8\xb1\xd9\x88\xd8\xac', }<|fim▁end|>
'&Help': '&\xd9\x85\xd8\xb3\xd8\xa7\xd8\xb9\xd8\xaf\xd8\xa9', '&Line Numbers': '&\xd8\xb9\xd8\xaf\xd8\xaf \xd8\xa7\xd9\x84\xd8\xb3\xd8\xb7\xd9\x88\xd8\xb1', '&New Window': '&\xd9\x86\xd8\xa7\xd9\x81\xd8\xb0\xd8\xa9 \xd8\xac\xd8\xaf\xd9\x8a\xd8\xaf\xd8\xa9', '&Preferences...': '&\xd8\xa7\xd9\x84\xd8\xaa\xd9\x81\xd8\xb6\xd9\x8a\xd9\x84\xd8\xa7\xd8\xaa...',
<|file_name|>problem_019.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ This module, problem_019.py, solves the nineteenth project euler problem. """ from project_euler_problems.problem import Problem from datetime import date ''' You are given the following information, but you may prefer to do some research for yourself. 1 Jan 1900 was a Monday. Thirty days has September, April, June and November. All the rest have thirty-one, Saving February alone, Which has twenty-eight, rain or shine.<|fim▁hole|>''' # Solution from captainsafia, thanks! Link : https://gist.github.com/captainsafia/3390092 class ImplementedProblem(Problem): """This specific problem's implementation. """ def get_solution(self): """Solves the solution for problem 019. :return: The solution for problem 019. """ number_of_sundays = 0 for year in range(1901, 2001): for month in range(1, 13): # date(...) will create a Date() instance. # weekday() gets the current day as an integer between 0-6. if date(year, month, 1).weekday() == 6: number_of_sundays += 1 return number_of_sundays<|fim▁end|>
And on leap years, twenty-nine. A leap year occurs on any year evenly divisible by 4, but not on a century unless it is divisible by 400. How many Sundays fell on the first of the month during the twentieth century (1 Jan 1901 to 31 Dec 2000)?
<|file_name|>NotificationController.java<|end_file_name|><|fim▁begin|>package net.therap.controller; import net.therap.domain.Notification; import net.therap.domain.User; import net.therap.service.NotificationService; import net.therap.service.UserService; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Controller; import org.springframework.web.bind.annotation.*; import org.springframework.web.servlet.ModelAndView; import javax.servlet.http.HttpSession; import java.util.List; /** * @author rifatul.islam * @since 8/12/14. */ @Controller public class NotificationController { @Autowired private NotificationService notificationService; @Autowired private UserService userService; @RequestMapping(value = "/notification/{receiverId}", method = RequestMethod.GET) public ModelAndView showAllNotification(@PathVariable int receiverId) { List<Notification> notificationList = notificationService.getAllNotification(receiverId); ModelAndView modelAndView = new ModelAndView("user/notification"); modelAndView.addObject("notificationList", notificationList); return modelAndView; } @RequestMapping("/getSenderImage/{userId}") @ResponseBody public byte[] getProfilePicture(@PathVariable int userId) { User user = userService.getUserById(userId); return user.getProfilePicture(); } @RequestMapping(value = "/sendNotification", method = RequestMethod.POST) @ResponseBody public void sendNotification(@RequestParam("receiverId") int receiverId, @RequestParam("bookId") int bookId , @RequestParam("type") int type, @RequestParam("isSeen") boolean isSeen, HttpSession session) { User user = (User) session.getAttribute("user"); notificationService.addNewNotification(user.getUserId(), receiverId, bookId, type, isSeen); } @RequestMapping(value = "/updateNotification", method = RequestMethod.POST) @ResponseBody public void updateNotification(@RequestParam("id") int notificationId, @RequestParam("receiverId") int receiverId, @RequestParam("bookId") int bookId, @RequestParam("type") int type, @RequestParam("isSeen") boolean isSeen, HttpSession session) { <|fim▁hole|> user.getUserId(), receiverId, bookId, type, isSeen); } @RequestMapping(value = "/notification/getNotificationCounter", method = RequestMethod.GET) @ResponseBody public long getNotificationCounter(HttpSession session) { User user = (User) session.getAttribute("user"); long total = notificationService.getUnseenNotification(user.getUserId()); System.out.println("total = " + total); return total; } }<|fim▁end|>
User user = (User) session.getAttribute("user"); notificationService.updateAndInsertNotification(notificationId,
<|file_name|>0002_auto_20150211_2011.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('zeltlager_registration', '0001_initial'), ] operations = [ migrations.RemoveField( model_name='jugendgruppe', name='address', ), migrations.DeleteModel( name='Jugendgruppe', ), migrations.RemoveField( model_name='zeltlagerdurchgang', name='address', ), migrations.RemoveField( model_name='zeltlagerdurchgang',<|fim▁hole|> ), ]<|fim▁end|>
name='description',
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2017 Onestein (<http://www.onestein.eu>)<|fim▁hole|> from . import test_employee_display_own_info<|fim▁end|>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
<|file_name|>EstimationOfPi.py<|end_file_name|><|fim▁begin|>import random import math def estimatePi(error): itt=1000 #itterations previousPI=0 while True: hits=0 # number of hits for i in range(0,itt): x=random.uniform(0,1) y=random.uniform(0,1) z=x*x+y*y #Pythagorean Theorum if math.sqrt(z)<=1: #if point(x,y)lies within the triangle hits=hits+1 currentPI=(hits*4)/itt #print(currentPI) if previousPI==0: previousPI=currentPI continue if (math.fabs(previousPI-currentPI)<error) is True: return currentPI#return The estimation of pi is 4*hits/shots previousPI=(currentPI+previousPI)/2 #previousPI=currentPI error=float(input("Enter the error value :")) pi = estimatePi(error)<|fim▁hole|><|fim▁end|>
print("Pi : ",pi)
<|file_name|>slidercc.core.js<|end_file_name|><|fim▁begin|>/*! * CC Slider by Chop-chop * http://shop.chop-chop.org */ (function($){ 'use strict'; var defaults = { // basic mode: 'fade', autoPlay: 3000, pauseOnHover: true, animationSpeed: 1000, pagination: true, arrows: true, loop: false, keyboard: true, mousewheel: true, touch: true, lockDuringAnimation: false, synchronize: null, remoteControl: null, // advanced startAt: 0, // zero-based changePaginations: 'before', // before or after animationMethod: 'css', // css, velocity or js slidesListSelector: 'ul.slides', slidesSelector: 'ul.slides > li', // technical namespace: 'scc',<|fim▁hole|> syncNamespace: 'scc', remoteNamespace: 'scc', arrowTexts: { prev: 'Previous slide', next: 'Next slide' }, limitWidthToParent: true }; if(!console){ var consoleFix = function() {}; window.console = {log:consoleFix, info:consoleFix, warn:consoleFix, debug:consoleFix, error:consoleFix}; } if(!$) { console.error('[CC Slider] No jQuery detected.'); return false; } var Slider = function(elem, opts){ this.$wrap = $(elem); if(!this.$wrap.data('slidercc')){ if($.isEmptyObject(this.modes)) { console.error('[CC Slider] No modules found.'); return false; } this.mode = this.modes[opts.mode || defaults.mode]; if(!this.mode) { for(var firstMod in this.modes) { break; } this.mode = this.modes[firstMod]; } this.mode = new this.mode(); this.mode.slider = this; this.opts = $.extend({}, $.fn.slidercc.defaults, this.mode.defaults, opts); this.$wrap.data('slidercc', this.opts); this.namespace = this.opts.namespace; this.$wrap.addClass(this.namespace+'-wrapper'); this.init(); } else { this.$wrap.trigger(this.$wrap.data('slidercc').namespace+'-reset', opts); } }; Slider.prototype.modes = {}; Slider.prototype.init = function(){ var _this = this; this.loadSlides(); if(this.testSlideLength()){ this.makeViewport(); this.$wrap.css('opacity', 0); if(this.opts.arrows) this.makeArrows(); if(this.opts.pagination) this.makePagination(); this.current = Math.min(this.opts.startAt, this.slideCount-1); this.next = this.current; this.wrapWidth = this.$wrap.width(); this.setActiveClasses(); this.bind(); this.checkAnimationMethod(); this.setAnimations(); if(this.opts.synchronize!=null){ this.$sync = $(this.opts.synchronize); if(!this.$sync.length) this.opts.synchronize = null; else { this.synchronize(); } } if(this.opts.remoteControl!=null){ this.$remote = $(this.opts.remoteControl); if(!this.$remote.length) this.opts.remoteControl = null; else { this.remoteControl(); } } this.mode.init(); this.$slides.eq(this.current).imagesLoaded(function(){ _this.animate.fadeIn(_this.$wrap); _this.ready = true; // console.log('Slider.init imagesLoaded',_this.$wrap.attr('id')); _this.$wrap.trigger(_this.namespace+'-ready'); }); } }; Slider.prototype.loadSlides = function(){ this.$slideList = $(this.$wrap).find(this.opts.slidesListSelector); this.$slides = $(this.$wrap).find(this.opts.slidesSelector); this.slideCount = this.$slides.length; this.opts.$slides = this.$slides; }; Slider.prototype.testSlideLength = function(){ if(!this.slideCount){ console.warn('[CC Slider] No slides found.'); return false; } return true; }; Slider.prototype.bind = function(){ var _this = this; $(window).on('resize', $.proxy(this.resize, this)); this.$wrap.on(this.namespace+'-reset', $.proxy(this.reset, this)); this.$wrap.on(this.namespace+'-'+this.opts.changePaginations+' '+this.namespace+'-ready', $.proxy(this.setActiveClasses, this)); if(this.opts.arrows){ this.arrows.$prev.on('click.'+this.namespace, function(e){ e.preventDefault(); _this.$wrap.trigger(_this.namespace+'-prev'); }); this.arrows.$next.on('click.'+this.namespace, function(e){ e.preventDefault(); _this.$wrap.trigger(_this.namespace+'-next'); }); } if(this.opts.pagination){ this.$pagination.find('a').on('click.'+this.namespace, function(e){ e.preventDefault(); _this.$wrap.trigger(_this.namespace+'-slideTo', $(this).attr('href').replace('#', '')); }); } if(this.opts.keyboard && !$('body').data('slidercc-keyboard')) this.bindKeyboard(); if(this.opts.mousewheel && $.event.special.mousewheel) this.bindMousewheel(); if(this.opts.touch && $.event.special.swipeleft && $.event.special.swiperight) this.bindTouch(); if(this.opts.autoPlay){ _this.startAutoplay(); if(this.opts.pauseOnHover){ this.$wrap.on('mouseenter.'+this.namespace, function(e){ // console.log('mouseenter', _this.$wrap.attr('id')); e.preventDefault(); _this.stopAutoplay(); }); this.$wrap.on('mouseleave.'+this.namespace, function(e){ e.preventDefault(); _this.startAutoplay(); }); } } }; Slider.prototype.bindKeyboard = function(){ var _this = this; $('body').data('slidercc-keyboard', this.$wrap); $(window).on('keyup', function(e){ var key = e.keyCode; if(key==37) _this.$wrap.trigger(_this.namespace+'-prev'); else if(key==39) _this.$wrap.trigger(_this.namespace+'-next'); }); }; Slider.prototype.bindMousewheel = function(){ var _this = this; this.$wrap.on('mousewheel', function(e){ e.preventDefault(); if(e.deltaY<0 || e.deltaY==0 && e.deltaX>0){ _this.$wrap.trigger(_this.namespace+'-next'); } else { _this.$wrap.trigger(_this.namespace+'-prev'); } }); }; Slider.prototype.bindTouch = function(){ var _this = this; this.$wrap.on('swiperight', function(e){ e.preventDefault(); _this.$wrap.trigger(_this.namespace+'-prev'); _this.opts.autoPlay = false; _this.stopAutoplay(); }); this.$wrap.on('swipeleft', function(e){ console.log(e); e.preventDefault(); _this.$wrap.trigger(_this.namespace+'-next'); _this.opts.autoPlay = false; _this.stopAutoplay(); }); }; Slider.prototype.synchronize = function(){ this.$wrap.on(this.namespace+'-slideTo', $.proxy(function(e, d, force, squash){ if(squash) return; var target = e instanceof jQuery.Event ? d : e; this.$sync.trigger(this.opts.syncNamespace+'-slideTo', [target, force, true]); }, this)); }; Slider.prototype.remoteControl = function(){ var _this = this; this.$slides.on('click.'+this.namespace, $.proxy(function(e){ e.preventDefault(); var target = this.$slides.index(e.currentTarget); this.$remote.add(this.$wrap).trigger(this.opts.remoteNamespace+'-slideTo', [target, null, true]); }, this)); this.$wrap.on('mouseenter.'+this.namespace, function(e){ _this.$remote.trigger('mouseenter'); }); this.$wrap.on('mouseleave.'+this.namespace, function(e){ _this.$remote.trigger('mouseleave'); }); }; Slider.prototype.unbind = function(){ this.$wrap.off(this.namespace+'-reset'); }; Slider.prototype.destroy = function(){ this.unbind(); this.$arrows.remove(); this.$pagination.remove(); this.$viewport.after(this.$viewport.children()).remove(); this.removeData('slidercc'); }; Slider.prototype.reset = function(){ $.extend(this.opts, arguments[arguments.length-1]); this.mode.reset(); }; Slider.prototype.checkAnimationMethod = function(){ // var _this = this; if(this.opts.animationMethod=='css' && window.Modernizr && (Modernizr.csstransforms==false || Modernizr.csstransitions==false)) { // if($.fn.velocity) { // this.opts.animationMethod = 'velocity'; // } // else { this.opts.animationMethod = 'js'; // } } }; Slider.prototype.setAnimations = function(){ var _this = this; if(this.opts.animationMethod=='css'){ this.animate = { fadeOut: function(elem){ elem.css('opacity', 0); }, fadeIn: function(elem){ elem.css('opacity', 1); }, slide: function(elem, val, force){ if(force) { elem.addClass(_this.namespace+'-no-trans'); setTimeout(function(){ elem.removeClass(_this.namespace+'-no-trans'); }, 10); } elem.css('transform', 'translate3d('+val+'px,0,0)'); } }; this.$wrap.css('transition', 'opacity '+this.opts.animationSpeed+'ms'); if(this.opts.animateHeight){ this.$viewport.css('transition', 'height '+this.opts.animationSpeed+'ms'); } } else if(this.opts.animationMethod=='velocity') { this.animate = { fadeOut: function(elem){ elem.velocity('stop').velocity('fadeOut', {duration: _this.opts.animationSpeed, display: 'block'}); }, fadeIn: function(elem){ elem.velocity('stop').velocity('fadeIn', {duration: _this.opts.animationSpeed}); }, slide: function(elem, val, force){ elem.velocity('stop').velocity({translateX: val}, {duration: force ? 0 : _this.opts.animationSpeed}); } }; } else { this.animate = { fadeOut: function(elem){ elem.stop(1,0).fadeTo(_this.opts.animationSpeed, 0); }, fadeIn: function(elem){ elem.stop(1,0).fadeTo(_this.opts.animationSpeed, 1); }, slide: function(elem, val, force){ elem.stop(1,0).animate({marginLeft: val}, force ? 0 : _this.opts.animationSpeed); } }; } }; Slider.prototype.makeViewport = function(){ this.$viewport = $('<div class="'+this.namespace+'-viewport"></div>'); this.$wrap.append(this.$viewport); this.$viewport.append(this.$wrap.children()); }; Slider.prototype.makeArrows = function(){ this.$arrows = $('<div class="'+this.namespace+'-arrows"></div>'); this.arrows = { $prev: $('<a href="#" class="'+this.namespace+'-prev">'+this.opts.arrowTexts.prev+'</a>'), $next: $('<a href="#" class="'+this.namespace+'-next">'+this.opts.arrowTexts.next+'</a>') }; this.$arrows.append(this.arrows.$prev, this.arrows.$next); this.$wrap.append(this.$arrows); }; Slider.prototype.makePagination = function(){ var html = '<div class="'+this.namespace+'-pagination">'; for(var i=0; i<this.slideCount; i++){ html += '<a href="#'+i+'">'+(i+1)+'</a>'; } html += '</div>'; this.$pagination = $(html); this.$wrap.append(this.$pagination); }; Slider.prototype.setActiveClasses = function(){ var current = this.opts.changePaginations=='after' ? this.current : this.next; this.$slides.removeClass(this.namespace+'-active'); this.$slides.eq(current).addClass(this.namespace+'-active'); if(this.opts.arrows && !this.opts.loop){ if(current==0){ this.arrows.$prev.addClass(this.namespace+'-disabled'); } else { this.arrows.$prev.removeClass(this.namespace+'-disabled'); } if(current==this.slideCount-1){ this.arrows.$next.addClass(this.namespace+'-disabled'); } else { this.arrows.$next.removeClass(this.namespace+'-disabled'); } } if(this.opts.pagination){ this.$pagination.find('a[href=#'+current+']').addClass(this.namespace+'-active').siblings().removeClass(this.namespace+'-active'); } }; Slider.prototype.startAutoplay = function(){ var _this = this; if(this.autoTimer) clearInterval(this.autoTimer); this.autoTimer = setInterval(function(){ _this.$wrap.trigger(_this.namespace+'-next', {auto: true}); }, this.opts.autoPlay); }; Slider.prototype.stopAutoplay = function(){ if(this.autoTimer) clearInterval(this.autoTimer); }; Slider.prototype.resize = function(){ if(this.opts.limitWidthToParent){ this.$wrap.width(''); if(this.$wrap.width() > this.$wrap.parent().width()){ this.$wrap.width( this.$wrap.parent().width() ); } } this.wrapWidth = this.$wrap.width(); this.mode.resize(); }; $.fn.slidercc = function(args){ return this.each(function(){ new Slider(this, args); }); }; $.fn.slidercc.defaults = defaults; $.fn.slidercc.modeDefaults = {}; $.fn.slidercc.insertMode = function(name, mode){ var newMode = {}; newMode[name] = mode; // $.extend(Slider.prototype.modes, newMode); Slider.prototype.modes[name] = mode; $.fn.slidercc.modeDefaults[name] = mode.prototype.defaults; }; })(jQuery);<|fim▁end|>
<|file_name|>swap_network_trotter_hubbard.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """A variational ansatz based on a linear swap network Trotter step.""" from typing import Iterable, Optional, Sequence, Tuple, cast import numpy import sympy import cirq from openfermioncirq import swap_network from openfermioncirq.variational.ansatz import VariationalAnsatz from openfermioncirq.variational.letter_with_subscripts import ( LetterWithSubscripts) class SwapNetworkTrotterHubbardAnsatz(VariationalAnsatz): """A Hubbard model ansatz based on the fermionic swap network Trotter step. Each Trotter step includes 3 parameters: one for the horizontal hopping terms, one for the vertical hopping terms, and one for the on-site interaction. This ansatz is similar to the one used in arXiv:1507.08969, but corresponds to a different ordering for simulating the Hamiltonian terms. """ def __init__(self, x_dim: float, y_dim: float, tunneling: float, coulomb: float, periodic: bool=True, iterations: int=1, adiabatic_evolution_time: Optional[float]=None, qubits: Optional[Sequence[cirq.Qid]]=None ) -> None: """ Args: iterations: The number of iterations of the basic template to include in the circuit. The number of parameters grows linearly with this value. adiabatic_evolution_time: The time scale for Hamiltonian evolution used to determine the default initial parameters of the ansatz. This is the value A from the docstring of this class. If not specified, defaults to the sum of the absolute values of the entries of the two-body tensor of the Hamiltonian. qubits: Qubits to be used by the ansatz circuit. If not specified, then qubits will automatically be generated by the `_generate_qubits` method.<|fim▁hole|> self.coulomb = coulomb self.periodic = periodic self.iterations = iterations if adiabatic_evolution_time is None: adiabatic_evolution_time = 0.1*abs(coulomb)*iterations self.adiabatic_evolution_time = cast(float, adiabatic_evolution_time) super().__init__(qubits) def params(self) -> Iterable[sympy.Symbol]: """The parameters of the ansatz.""" for i in range(self.iterations): if self.x_dim > 1: yield LetterWithSubscripts('Th', i) if self.y_dim > 1: yield LetterWithSubscripts('Tv', i) yield LetterWithSubscripts('V', i) def param_bounds(self) -> Optional[Sequence[Tuple[float, float]]]: """Bounds on the parameters.""" bounds = [] for param in self.params(): s = 1.0 if param.letter == 'V' else 2.0 bounds.append((-s, s)) return bounds def _generate_qubits(self) -> Sequence[cirq.Qid]: """Produce qubits that can be used by the ansatz circuit.""" n_qubits = 2*self.x_dim*self.y_dim return cirq.LineQubit.range(n_qubits) def operations(self, qubits: Sequence[cirq.Qid]) -> cirq.OP_TREE: """Produce the operations of the ansatz circuit.""" for i in range(self.iterations): # Apply one- and two-body interactions with a swap network that # reverses the order of the modes def one_and_two_body_interaction(p, q, a, b) -> cirq.OP_TREE: th_symbol = LetterWithSubscripts('Th', i) tv_symbol = LetterWithSubscripts('Tv', i) v_symbol = LetterWithSubscripts('V', i) if _is_horizontal_edge( p, q, self.x_dim, self.y_dim, self.periodic): yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b) if _is_vertical_edge( p, q, self.x_dim, self.y_dim, self.periodic): yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b) if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim): yield cirq.CZPowGate(exponent=v_symbol).on(a, b) yield swap_network( qubits, one_and_two_body_interaction, fermionic=True) qubits = qubits[::-1] # Apply one- and two-body interactions again. This time, reorder # them so that the entire iteration is symmetric def one_and_two_body_interaction_reversed_order(p, q, a, b ) -> cirq.OP_TREE: th_symbol = LetterWithSubscripts('Th', i) tv_symbol = LetterWithSubscripts('Tv', i) v_symbol = LetterWithSubscripts('V', i) if _are_same_site_opposite_spin(p, q, self.x_dim*self.y_dim): yield cirq.CZPowGate(exponent=v_symbol).on(a, b) if _is_vertical_edge( p, q, self.x_dim, self.y_dim, self.periodic): yield cirq.ISwapPowGate(exponent=-tv_symbol).on(a, b) if _is_horizontal_edge( p, q, self.x_dim, self.y_dim, self.periodic): yield cirq.ISwapPowGate(exponent=-th_symbol).on(a, b) yield swap_network( qubits, one_and_two_body_interaction_reversed_order, fermionic=True, offset=True) qubits = qubits[::-1] def default_initial_params(self) -> numpy.ndarray: """Approximate evolution by H(t) = T + (t/A)V. Sets the parameters so that the ansatz circuit consists of a sequence of second-order Trotter steps approximating the dynamics of the time-dependent Hamiltonian H(t) = T + (t/A)V, where T is the one-body term and V is the two-body term of the Hamiltonian used to generate the ansatz circuit, and t ranges from 0 to A, where A is equal to `self.adibatic_evolution_time`. The number of Trotter steps is equal to the number of iterations in the ansatz. This choice is motivated by the idea of state preparation via adiabatic evolution. The dynamics of H(t) are approximated as follows. First, the total evolution time of A is split into segments of length A / r, where r is the number of Trotter steps. Then, each Trotter step simulates H(t) for a time length of A / r, where t is the midpoint of the corresponding time segment. As an example, suppose A is 100 and the ansatz has two iterations. Then the approximation is achieved with two Trotter steps. The first Trotter step simulates H(25) for a time length of 50, and the second Trotter step simulates H(75) for a time length of 50. """ total_time = self.adiabatic_evolution_time step_time = total_time / self.iterations params = [] for param, scale_factor in zip(self.params(), self.param_scale_factors()): if param.letter == 'Th' or param.letter == 'Tv': params.append(_canonicalize_exponent( -self.tunneling * step_time / numpy.pi, 4) / scale_factor) elif param.letter == 'V': i, = param.subscripts # Use the midpoint of the time segment interpolation_progress = 0.5 * (2 * i + 1) / self.iterations params.append(_canonicalize_exponent( -0.5 * self.coulomb * interpolation_progress * step_time / numpy.pi, 2) / scale_factor) return numpy.array(params) def _is_horizontal_edge(p, q, x_dim, y_dim, periodic): n_sites = x_dim*y_dim if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites: return False if p >= n_sites and q >= n_sites: p -= n_sites q -= n_sites return (q == _right_neighbor(p, x_dim, y_dim, periodic) or p == _right_neighbor(q, x_dim, y_dim, periodic)) def _is_vertical_edge(p, q, x_dim, y_dim, periodic): n_sites = x_dim*y_dim if p < n_sites and q >= n_sites or q < n_sites and p >= n_sites: return False if p >= n_sites and q >= n_sites: p -= n_sites q -= n_sites return (q == _bottom_neighbor(p, x_dim, y_dim, periodic) or p == _bottom_neighbor(q, x_dim, y_dim, periodic)) def _are_same_site_opposite_spin(p, q, n_sites): return abs(p-q) == n_sites def _right_neighbor(site, x_dimension, y_dimension, periodic): if x_dimension == 1: return None if (site + 1) % x_dimension == 0: if periodic: return site + 1 - x_dimension else: return None return site + 1 def _bottom_neighbor(site, x_dimension, y_dimension, periodic): if y_dimension == 1: return None if site + x_dimension + 1 > x_dimension*y_dimension: if periodic: return site + x_dimension - x_dimension*y_dimension else: return None return site + x_dimension def _canonicalize_exponent(exponent: float, period: int) -> float: # Shift into [-p/2, +p/2). exponent += period / 2 exponent %= period exponent -= period / 2 # Prefer (-p/2, +p/2] over [-p/2, +p/2). if exponent <= -period / 2: exponent += period # coverage: ignore return exponent<|fim▁end|>
""" self.x_dim = x_dim self.y_dim = y_dim self.tunneling = tunneling
<|file_name|>shootout-k-nucleotide.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // ignore-android see #10393 #13206 use std::string::String; use std::slice; use std::sync::{Arc, Future}; static TABLE: [u8, ..4] = [ 'A' as u8, 'C' as u8, 'G' as u8, 'T' as u8 ]; static TABLE_SIZE: uint = 2 << 16; <|fim▁hole|> "GGTA", "GGTATT", "GGTATTTTAATT", "GGTATTTTAATTTATAGT", ]; // Code implementation #[deriving(PartialEq, PartialOrd, Ord, Eq)] struct Code(u64); impl Code { fn hash(&self) -> u64 { let Code(ret) = *self; return ret; } fn push_char(&self, c: u8) -> Code { Code((self.hash() << 2) + (pack_symbol(c) as u64)) } fn rotate(&self, c: u8, frame: uint) -> Code { Code(self.push_char(c).hash() & ((1u64 << (2 * frame)) - 1)) } fn pack(string: &str) -> Code { string.bytes().fold(Code(0u64), |a, b| a.push_char(b)) } fn unpack(&self, frame: uint) -> String { let mut key = self.hash(); let mut result = Vec::new(); for _ in range(0, frame) { result.push(unpack_symbol((key as u8) & 3)); key >>= 2; } result.reverse(); String::from_utf8(result).unwrap() } } // Hash table implementation trait TableCallback { fn f(&self, entry: &mut Entry); } struct BumpCallback; impl TableCallback for BumpCallback { fn f(&self, entry: &mut Entry) { entry.count += 1; } } struct PrintCallback(&'static str); impl TableCallback for PrintCallback { fn f(&self, entry: &mut Entry) { let PrintCallback(s) = *self; println!("{}\t{}", entry.count as int, s); } } struct Entry { code: Code, count: uint, next: Option<Box<Entry>>, } struct Table { count: uint, items: Vec<Option<Box<Entry>>> } struct Items<'a> { cur: Option<&'a Entry>, items: slice::Items<'a, Option<Box<Entry>>>, } impl Table { fn new() -> Table { Table { count: 0, items: Vec::from_fn(TABLE_SIZE, |_| None), } } fn search_remainder<C:TableCallback>(item: &mut Entry, key: Code, c: C) { match item.next { None => { let mut entry = box Entry { code: key, count: 0, next: None, }; c.f(&mut *entry); item.next = Some(entry); } Some(ref mut entry) => { if entry.code == key { c.f(&mut **entry); return; } Table::search_remainder(&mut **entry, key, c) } } } fn lookup<C:TableCallback>(&mut self, key: Code, c: C) { let index = key.hash() % (TABLE_SIZE as u64); { if self.items.get(index as uint).is_none() { let mut entry = box Entry { code: key, count: 0, next: None, }; c.f(&mut *entry); *self.items.get_mut(index as uint) = Some(entry); return; } } { let entry = &mut *self.items.get_mut(index as uint).get_mut_ref(); if entry.code == key { c.f(&mut **entry); return; } Table::search_remainder(&mut **entry, key, c) } } fn iter<'a>(&'a self) -> Items<'a> { Items { cur: None, items: self.items.iter() } } } impl<'a> Iterator<&'a Entry> for Items<'a> { fn next(&mut self) -> Option<&'a Entry> { let ret = match self.cur { None => { let i; loop { match self.items.next() { None => return None, Some(&None) => {} Some(&Some(ref a)) => { i = &**a; break } } } self.cur = Some(&*i); &*i } Some(c) => c }; match ret.next { None => { self.cur = None; } Some(ref next) => { self.cur = Some(&**next); } } return Some(ret); } } // Main program fn pack_symbol(c: u8) -> u8 { match c as char { 'A' => 0, 'C' => 1, 'G' => 2, 'T' => 3, _ => fail!("{}", c as char), } } fn unpack_symbol(c: u8) -> u8 { TABLE[c as uint] } fn generate_frequencies(mut input: &[u8], frame: uint) -> Table { let mut frequencies = Table::new(); if input.len() < frame { return frequencies; } let mut code = Code(0); // Pull first frame. for _ in range(0, frame) { code = code.push_char(input[0]); input = input.slice_from(1); } frequencies.lookup(code, BumpCallback); while input.len() != 0 && input[0] != ('>' as u8) { code = code.rotate(input[0], frame); frequencies.lookup(code, BumpCallback); input = input.slice_from(1); } frequencies } fn print_frequencies(frequencies: &Table, frame: uint) { let mut vector = Vec::new(); for entry in frequencies.iter() { vector.push((entry.count, entry.code)); } vector.as_mut_slice().sort(); let mut total_count = 0; for &(count, _) in vector.iter() { total_count += count; } for &(count, key) in vector.iter().rev() { println!("{} {:.3f}", key.unpack(frame).as_slice(), (count as f32 * 100.0) / (total_count as f32)); } println!(""); } fn print_occurrences(frequencies: &mut Table, occurrence: &'static str) { frequencies.lookup(Code::pack(occurrence), PrintCallback(occurrence)) } fn get_sequence<R: Buffer>(r: &mut R, key: &str) -> Vec<u8> { let mut res = Vec::new(); for l in r.lines().map(|l| l.ok().unwrap()) .skip_while(|l| key != l.as_slice().slice_to(key.len())).skip(1) { res.push_all(l.as_slice().trim().as_bytes()); } for b in res.mut_iter() { *b = b.to_ascii().to_upper().to_byte(); } res } fn main() { let input = if std::os::getenv("RUST_BENCH").is_some() { let fd = std::io::File::open(&Path::new("shootout-k-nucleotide.data")); get_sequence(&mut std::io::BufferedReader::new(fd), ">THREE") } else { get_sequence(&mut std::io::stdin(), ">THREE") }; let input = Arc::new(input); let nb_freqs: Vec<(uint, Future<Table>)> = range(1u, 3).map(|i| { let input = input.clone(); (i, Future::spawn(proc() generate_frequencies(input.as_slice(), i))) }).collect(); let occ_freqs: Vec<Future<Table>> = OCCURRENCES.iter().map(|&occ| { let input = input.clone(); Future::spawn(proc() generate_frequencies(input.as_slice(), occ.len())) }).collect(); for (i, freq) in nb_freqs.move_iter() { print_frequencies(&freq.unwrap(), i); } for (&occ, freq) in OCCURRENCES.iter().zip(occ_freqs.move_iter()) { print_occurrences(&mut freq.unwrap(), occ); } }<|fim▁end|>
static OCCURRENCES: [&'static str, ..5] = [ "GGT",
<|file_name|>index.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package persistentvolume import ( "fmt" "sort" "k8s.io/api/core/v1" "k8s.io/apimachinery/pkg/api/resource" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" "k8s.io/apimachinery/pkg/labels" utilfeature "k8s.io/apiserver/pkg/util/feature" "github.com/hyperhq/client-go/tools/cache" v1helper "k8s.io/kubernetes/pkg/apis/core/v1/helper" "k8s.io/kubernetes/pkg/features" "k8s.io/kubernetes/pkg/volume" volumeutil "k8s.io/kubernetes/pkg/volume/util" ) // persistentVolumeOrderedIndex is a cache.Store that keeps persistent volumes // indexed by AccessModes and ordered by storage capacity. type persistentVolumeOrderedIndex struct { store cache.Indexer } func newPersistentVolumeOrderedIndex() persistentVolumeOrderedIndex { return persistentVolumeOrderedIndex{cache.NewIndexer(cache.MetaNamespaceKeyFunc, cache.Indexers{"accessmodes": accessModesIndexFunc})} } // accessModesIndexFunc is an indexing function that returns a persistent // volume's AccessModes as a string func accessModesIndexFunc(obj interface{}) ([]string, error) { if pv, ok := obj.(*v1.PersistentVolume); ok { modes := v1helper.GetAccessModesAsString(pv.Spec.AccessModes) return []string{modes}, nil } return []string{""}, fmt.Errorf("object is not a persistent volume: %v", obj) } <|fim▁hole|> Spec: v1.PersistentVolumeSpec{ AccessModes: modes, }, } objs, err := pvIndex.store.Index("accessmodes", pv) if err != nil { return nil, err } volumes := make([]*v1.PersistentVolume, len(objs)) for i, obj := range objs { volumes[i] = obj.(*v1.PersistentVolume) } return volumes, nil } // find returns the nearest PV from the ordered list or nil if a match is not found func (pvIndex *persistentVolumeOrderedIndex) findByClaim(claim *v1.PersistentVolumeClaim, delayBinding bool) (*v1.PersistentVolume, error) { // PVs are indexed by their access modes to allow easier searching. Each // index is the string representation of a set of access modes. There is a // finite number of possible sets and PVs will only be indexed in one of // them (whichever index matches the PV's modes). // // A request for resources will always specify its desired access modes. // Any matching PV must have at least that number of access modes, but it // can have more. For example, a user asks for ReadWriteOnce but a GCEPD // is available, which is ReadWriteOnce+ReadOnlyMany. // // Searches are performed against a set of access modes, so we can attempt // not only the exact matching modes but also potential matches (the GCEPD // example above). allPossibleModes := pvIndex.allPossibleMatchingAccessModes(claim.Spec.AccessModes) for _, modes := range allPossibleModes { volumes, err := pvIndex.listByAccessModes(modes) if err != nil { return nil, err } bestVol, err := findMatchingVolume(claim, volumes, nil /* node for topology binding*/, nil /* exclusion map */, delayBinding) if err != nil { return nil, err } if bestVol != nil { return bestVol, nil } } return nil, nil } // findMatchingVolume goes through the list of volumes to find the best matching volume // for the claim. // // This function is used by both the PV controller and scheduler. // // delayBinding is true only in the PV controller path. When set, prebound PVs are still returned // as a match for the claim, but unbound PVs are skipped. // // node is set only in the scheduler path. When set, the PV node affinity is checked against // the node's labels. // // excludedVolumes is only used in the scheduler path, and is needed for evaluating multiple // unbound PVCs for a single Pod at one time. As each PVC finds a matching PV, the chosen // PV needs to be excluded from future matching. func findMatchingVolume( claim *v1.PersistentVolumeClaim, volumes []*v1.PersistentVolume, node *v1.Node, excludedVolumes map[string]*v1.PersistentVolume, delayBinding bool) (*v1.PersistentVolume, error) { var smallestVolume *v1.PersistentVolume var smallestVolumeQty resource.Quantity requestedQty := claim.Spec.Resources.Requests[v1.ResourceName(v1.ResourceStorage)] requestedClass := v1helper.GetPersistentVolumeClaimClass(claim) var selector labels.Selector if claim.Spec.Selector != nil { internalSelector, err := metav1.LabelSelectorAsSelector(claim.Spec.Selector) if err != nil { // should be unreachable code due to validation return nil, fmt.Errorf("error creating internal label selector for claim: %v: %v", claimToClaimKey(claim), err) } selector = internalSelector } // Go through all available volumes with two goals: // - find a volume that is either pre-bound by user or dynamically // provisioned for this claim. Because of this we need to loop through // all volumes. // - find the smallest matching one if there is no volume pre-bound to // the claim. for _, volume := range volumes { if _, ok := excludedVolumes[volume.Name]; ok { // Skip volumes in the excluded list continue } volumeQty := volume.Spec.Capacity[v1.ResourceStorage] // check if volumeModes do not match (Alpha and feature gate protected) isMisMatch, err := checkVolumeModeMisMatches(&claim.Spec, &volume.Spec) if err != nil { return nil, fmt.Errorf("error checking if volumeMode was a mismatch: %v", err) } // filter out mismatching volumeModes if isMisMatch { continue } nodeAffinityValid := true if node != nil { // Scheduler path, check that the PV NodeAffinity // is satisfied by the node err := volumeutil.CheckNodeAffinity(volume, node.Labels) if err != nil { nodeAffinityValid = false } } if isVolumeBoundToClaim(volume, claim) { // this claim and volume are pre-bound; return // the volume if the size request is satisfied, // otherwise continue searching for a match if volumeQty.Cmp(requestedQty) < 0 { continue } // If PV node affinity is invalid, return no match. // This means the prebound PV (and therefore PVC) // is not suitable for this node. if !nodeAffinityValid { return nil, nil } return volume, nil } if node == nil && delayBinding { // PV controller does not bind this claim. // Scheduler will handle binding unbound volumes // Scheduler path will have node != nil continue } // filter out: // - volumes bound to another claim // - volumes whose labels don't match the claim's selector, if specified // - volumes in Class that is not requested // - volumes whose NodeAffinity does not match the node if volume.Spec.ClaimRef != nil { continue } else if selector != nil && !selector.Matches(labels.Set(volume.Labels)) { continue } if v1helper.GetPersistentVolumeClass(volume) != requestedClass { continue } if !nodeAffinityValid { continue } if node != nil { // Scheduler path // Check that the access modes match if !checkAccessModes(claim, volume) { continue } } if volumeQty.Cmp(requestedQty) >= 0 { if smallestVolume == nil || smallestVolumeQty.Cmp(volumeQty) > 0 { smallestVolume = volume smallestVolumeQty = volumeQty } } } if smallestVolume != nil { // Found a matching volume return smallestVolume, nil } return nil, nil } // checkVolumeModeMatches is a convenience method that checks volumeMode for PersistentVolume // and PersistentVolumeClaims along with making sure that the Alpha feature gate BlockVolume is // enabled. // This is Alpha and could change in the future. func checkVolumeModeMisMatches(pvcSpec *v1.PersistentVolumeClaimSpec, pvSpec *v1.PersistentVolumeSpec) (bool, error) { if utilfeature.DefaultFeatureGate.Enabled(features.BlockVolume) { if pvSpec.VolumeMode != nil && pvcSpec.VolumeMode != nil { requestedVolumeMode := *pvcSpec.VolumeMode pvVolumeMode := *pvSpec.VolumeMode return requestedVolumeMode != pvVolumeMode, nil } else { // This also should retrun an error, this means that // the defaulting has failed. return true, fmt.Errorf("api defaulting for volumeMode failed") } } else { // feature gate is disabled return false, nil } } // findBestMatchForClaim is a convenience method that finds a volume by the claim's AccessModes and requests for Storage func (pvIndex *persistentVolumeOrderedIndex) findBestMatchForClaim(claim *v1.PersistentVolumeClaim, delayBinding bool) (*v1.PersistentVolume, error) { return pvIndex.findByClaim(claim, delayBinding) } // allPossibleMatchingAccessModes returns an array of AccessMode arrays that // can satisfy a user's requested modes. // // see comments in the Find func above regarding indexing. // // allPossibleMatchingAccessModes gets all stringified accessmodes from the // index and returns all those that contain at least all of the requested // mode. // // For example, assume the index contains 2 types of PVs where the stringified // accessmodes are: // // "RWO,ROX" -- some number of GCEPDs // "RWO,ROX,RWX" -- some number of NFS volumes // // A request for RWO could be satisfied by both sets of indexed volumes, so // allPossibleMatchingAccessModes returns: // // [][]v1.PersistentVolumeAccessMode { // []v1.PersistentVolumeAccessMode { // v1.ReadWriteOnce, v1.ReadOnlyMany, // }, // []v1.PersistentVolumeAccessMode { // v1.ReadWriteOnce, v1.ReadOnlyMany, v1.ReadWriteMany, // }, // } // // A request for RWX can be satisfied by only one set of indexed volumes, so // the return is: // // [][]v1.PersistentVolumeAccessMode { // []v1.PersistentVolumeAccessMode { // v1.ReadWriteOnce, v1.ReadOnlyMany, v1.ReadWriteMany, // }, // } // // This func returns modes with ascending levels of modes to give the user // what is closest to what they actually asked for. func (pvIndex *persistentVolumeOrderedIndex) allPossibleMatchingAccessModes(requestedModes []v1.PersistentVolumeAccessMode) [][]v1.PersistentVolumeAccessMode { matchedModes := [][]v1.PersistentVolumeAccessMode{} keys := pvIndex.store.ListIndexFuncValues("accessmodes") for _, key := range keys { indexedModes := v1helper.GetAccessModesFromString(key) if volume.AccessModesContainedInAll(indexedModes, requestedModes) { matchedModes = append(matchedModes, indexedModes) } } // sort by the number of modes in each array with the fewest number of // modes coming first. this allows searching for volumes by the minimum // number of modes required of the possible matches. sort.Sort(byAccessModes{matchedModes}) return matchedModes } // byAccessModes is used to order access modes by size, with the fewest modes first type byAccessModes struct { modes [][]v1.PersistentVolumeAccessMode } func (c byAccessModes) Less(i, j int) bool { return len(c.modes[i]) < len(c.modes[j]) } func (c byAccessModes) Swap(i, j int) { c.modes[i], c.modes[j] = c.modes[j], c.modes[i] } func (c byAccessModes) Len() int { return len(c.modes) } func claimToClaimKey(claim *v1.PersistentVolumeClaim) string { return fmt.Sprintf("%s/%s", claim.Namespace, claim.Name) } func claimrefToClaimKey(claimref *v1.ObjectReference) string { return fmt.Sprintf("%s/%s", claimref.Namespace, claimref.Name) } // Returns true if PV satisfies all the PVC's requested AccessModes func checkAccessModes(claim *v1.PersistentVolumeClaim, volume *v1.PersistentVolume) bool { pvModesMap := map[v1.PersistentVolumeAccessMode]bool{} for _, mode := range volume.Spec.AccessModes { pvModesMap[mode] = true } for _, mode := range claim.Spec.AccessModes { _, ok := pvModesMap[mode] if !ok { return false } } return true }<|fim▁end|>
// listByAccessModes returns all volumes with the given set of // AccessModeTypes. The list is unsorted! func (pvIndex *persistentVolumeOrderedIndex) listByAccessModes(modes []v1.PersistentVolumeAccessMode) ([]*v1.PersistentVolume, error) { pv := &v1.PersistentVolume{
<|file_name|>NavigationDrawerFragment.java<|end_file_name|><|fim▁begin|>package fyskam.fyskamssngbok; import android.app.Activity; import android.app.ActionBar; import android.app.Fragment; import android.support.v4.app.ActionBarDrawerToggle; import android.support.v4.view.GravityCompat; import android.support.v4.widget.DrawerLayout; import android.content.SharedPreferences; import android.content.res.Configuration; import android.os.Bundle; import android.preference.PreferenceManager; import android.view.LayoutInflater; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ArrayAdapter; import android.widget.ListView; import android.widget.Toast; /** * Fragment used for managing interactions for and presentation of a navigation drawer. * See the <a href="https://developer.android.com/design/patterns/navigation-drawer.html#Interaction"> * design guidelines</a> for a complete explanation of the behaviors implemented here. */ public class NavigationDrawerFragment extends Fragment { /** * Remember the position of the selected item. */ private static final String STATE_SELECTED_POSITION = "selected_navigation_drawer_position"; /** * Per the design guidelines, you should show the drawer on launch until the user manually * expands it. This shared preference tracks this. */ private static final String PREF_USER_LEARNED_DRAWER = "navigation_drawer_learned"; /** * A pointer to the current callbacks instance (the Activity). */ private NavigationDrawerCallbacks mCallbacks; /** * Helper component that ties the action bar to the navigation drawer. */ private ActionBarDrawerToggle mDrawerToggle; private DrawerLayout mDrawerLayout; private ListView mDrawerListView; private View mFragmentContainerView; private int mCurrentSelectedPosition = 0; private boolean mFromSavedInstanceState; private boolean mUserLearnedDrawer; public NavigationDrawerFragment() { } @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); // Read in the flag indicating whether or not the user has demonstrated awareness of the // drawer. See PREF_USER_LEARNED_DRAWER for details. SharedPreferences sp = PreferenceManager.getDefaultSharedPreferences(getActivity()); mUserLearnedDrawer = sp.getBoolean(PREF_USER_LEARNED_DRAWER, false); if (savedInstanceState != null) { mCurrentSelectedPosition = savedInstanceState.getInt(STATE_SELECTED_POSITION); mFromSavedInstanceState = true; } // Select either the default item (0) or the last selected item. selectItem(mCurrentSelectedPosition); } @Override public void onActivityCreated (Bundle savedInstanceState) { super.onActivityCreated(savedInstanceState); // Indicate that this fragment would like to influence the set of actions in the action bar. setHasOptionsMenu(true); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mDrawerListView = (ListView) inflater.inflate( R.layout.fragment_navigation_drawer, container, false); mDrawerListView.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { selectItem(position); } }); mDrawerListView.setAdapter(new ArrayAdapter<String>( getActionBar().getThemedContext(), android.R.layout.simple_list_item_activated_1, android.R.id.text1, new String[]{ getString(R.string.title_section1), getString(R.string.title_section2), getString(R.string.title_section3), })); mDrawerListView.setItemChecked(mCurrentSelectedPosition, true); return mDrawerListView; } public boolean isDrawerOpen() { return mDrawerLayout != null && mDrawerLayout.isDrawerOpen(mFragmentContainerView); } /** * Users of this fragment must call this method to set up the navigation drawer interactions. * * @param fragmentId The android:id of this fragment in its activity's layout. * @param drawerLayout The DrawerLayout containing this fragment's UI. */<|fim▁hole|> public void setUp(int fragmentId, DrawerLayout drawerLayout) { mFragmentContainerView = getActivity().findViewById(fragmentId); mDrawerLayout = drawerLayout; // set a custom shadow that overlays the main content when the drawer opens mDrawerLayout.setDrawerShadow(R.drawable.drawer_shadow, GravityCompat.START); // set up the drawer's list view with items and click listener ActionBar actionBar = getActionBar(); actionBar.setDisplayHomeAsUpEnabled(true); actionBar.setHomeButtonEnabled(true); // ActionBarDrawerToggle ties together the the proper interactions // between the navigation drawer and the action bar app icon. mDrawerToggle = new ActionBarDrawerToggle( getActivity(), /* host Activity */ mDrawerLayout, /* DrawerLayout object */ R.drawable.ic_drawer, /* nav drawer image to replace 'Up' caret */ R.string.navigation_drawer_open, /* "open drawer" description for accessibility */ R.string.navigation_drawer_close /* "close drawer" description for accessibility */ ) { @Override public void onDrawerClosed(View drawerView) { super.onDrawerClosed(drawerView); if (!isAdded()) { return; } getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu() } @Override public void onDrawerOpened(View drawerView) { super.onDrawerOpened(drawerView); if (!isAdded()) { return; } if (!mUserLearnedDrawer) { // The user manually opened the drawer; store this flag to prevent auto-showing // the navigation drawer automatically in the future. mUserLearnedDrawer = true; SharedPreferences sp = PreferenceManager .getDefaultSharedPreferences(getActivity()); sp.edit().putBoolean(PREF_USER_LEARNED_DRAWER, true).apply(); } getActivity().invalidateOptionsMenu(); // calls onPrepareOptionsMenu() } }; // If the user hasn't 'learned' about the drawer, open it to introduce them to the drawer, // per the navigation drawer design guidelines. if (!mUserLearnedDrawer && !mFromSavedInstanceState) { mDrawerLayout.openDrawer(mFragmentContainerView); } // Defer code dependent on restoration of previous instance state. mDrawerLayout.post(new Runnable() { @Override public void run() { mDrawerToggle.syncState(); } }); mDrawerLayout.setDrawerListener(mDrawerToggle); } private void selectItem(int position) { mCurrentSelectedPosition = position; if (mDrawerListView != null) { mDrawerListView.setItemChecked(position, true); } if (mDrawerLayout != null) { mDrawerLayout.closeDrawer(mFragmentContainerView); } if (mCallbacks != null) { mCallbacks.onNavigationDrawerItemSelected(position); } } @Override public void onAttach(Activity activity) { super.onAttach(activity); try { mCallbacks = (NavigationDrawerCallbacks) activity; } catch (ClassCastException e) { throw new ClassCastException("Activity must implement NavigationDrawerCallbacks."); } } @Override public void onDetach() { super.onDetach(); mCallbacks = null; } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_SELECTED_POSITION, mCurrentSelectedPosition); } @Override public void onConfigurationChanged(Configuration newConfig) { super.onConfigurationChanged(newConfig); // Forward the new configuration the drawer toggle component. mDrawerToggle.onConfigurationChanged(newConfig); } @Override public void onCreateOptionsMenu(Menu menu, MenuInflater inflater) { // If the drawer is open, show the global app actions in the action bar. See also // showGlobalContextActionBar, which controls the top-left area of the action bar. if (mDrawerLayout != null && isDrawerOpen()) { inflater.inflate(R.menu.global, menu); showGlobalContextActionBar(); } super.onCreateOptionsMenu(menu, inflater); } @Override public boolean onOptionsItemSelected(MenuItem item) { if (mDrawerToggle.onOptionsItemSelected(item)) { return true; } if (item.getItemId() == R.id.action_example) { Toast.makeText(getActivity(), "Example action.", Toast.LENGTH_SHORT).show(); return true; } return super.onOptionsItemSelected(item); } /** * Per the navigation drawer design guidelines, updates the action bar to show the global app * 'context', rather than just what's in the current screen. */ private void showGlobalContextActionBar() { ActionBar actionBar = getActionBar(); actionBar.setDisplayShowTitleEnabled(true); actionBar.setNavigationMode(ActionBar.NAVIGATION_MODE_STANDARD); actionBar.setTitle(R.string.app_name); } private ActionBar getActionBar() { return getActivity().getActionBar(); } /** * Callbacks interface that all activities using this fragment must implement. */ public static interface NavigationDrawerCallbacks { /** * Called when an item in the navigation drawer is selected. */ void onNavigationDrawerItemSelected(int position); } }<|fim▁end|>
<|file_name|>OsmMember.java<|end_file_name|><|fim▁begin|>package de.jotschi.geo.osm.tags; import org.w3c.dom.Node; public class OsmMember { String type, ref, role; public OsmMember(Node node) { ref = node.getAttributes().getNamedItem("ref").getNodeValue(); role = node.getAttributes().getNamedItem("role").getNodeValue(); type = node.getAttributes().getNamedItem("type").getNodeValue(); }<|fim▁hole|> public String getType() { return type; } public String getRef() { return ref; } public String getRole() { return role; } }<|fim▁end|>
<|file_name|>serializers.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ Utility Serializers """ from rest_framework.serializers import HyperlinkedModelSerializer <|fim▁hole|> """ ModelSerializer which provides both a `url` and `id` field """ def get_pk_field(self, model_field): return self.get_field(model_field)<|fim▁end|>
class HybridModelSerializer(HyperlinkedModelSerializer):
<|file_name|>visitor.ts<|end_file_name|><|fim▁begin|>namespace ts { const isTypeNodeOrTypeParameterDeclaration = or(isTypeNode, isTypeParameterDeclaration); /** * Visits a Node using the supplied visitor, possibly returning a new Node in its place. * * @param node The Node to visit. * @param visitor The callback used to visit the Node. * @param test A callback to execute to verify the Node is valid. * @param lift An optional callback to execute to lift a NodeArray into a valid Node. */ export function visitNode<T extends Node>(node: T | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T; /** * Visits a Node using the supplied visitor, possibly returning a new Node in its place. * * @param node The Node to visit. * @param visitor The callback used to visit the Node. * @param test A callback to execute to verify the Node is valid. * @param lift An optional callback to execute to lift a NodeArray into a valid Node. */ export function visitNode<T extends Node>(node: T | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T | undefined; export function visitNode<T extends Node>(node: T | undefined, visitor: Visitor | undefined, test?: (node: Node) => boolean, lift?: (node: NodeArray<Node>) => T): T | undefined { if (node === undefined || visitor === undefined) { return node; } aggregateTransformFlags(node); const visited = visitor(node); if (visited === node) { return node; } let visitedNode: Node | undefined; if (visited === undefined) { return undefined; } else if (isArray(visited)) { visitedNode = (lift || extractSingleNode)(visited); } else { visitedNode = visited; } Debug.assertNode(visitedNode, test); aggregateTransformFlags(visitedNode!); return <T>visitedNode; } /** * Visits a NodeArray using the supplied visitor, possibly returning a new NodeArray in its place. * * @param nodes The NodeArray to visit. * @param visitor The callback used to visit a Node. * @param test A node test to execute for each node. * @param start An optional value indicating the starting offset at which to start visiting. * @param count An optional value indicating the maximum number of nodes to visit. */ export function visitNodes<T extends Node>(nodes: NodeArray<T> | undefined, visitor: Visitor, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T>; /** * Visits a NodeArray using the supplied visitor, possibly returning a new NodeArray in its place. * * @param nodes The NodeArray to visit. * @param visitor The callback used to visit a Node. * @param test A node test to execute for each node. * @param start An optional value indicating the starting offset at which to start visiting. * @param count An optional value indicating the maximum number of nodes to visit. */ export function visitNodes<T extends Node>(nodes: NodeArray<T> | undefined, visitor: Visitor, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T> | undefined; /** * Visits a NodeArray using the supplied visitor, possibly returning a new NodeArray in its place. * * @param nodes The NodeArray to visit. * @param visitor The callback used to visit a Node. * @param test A node test to execute for each node. * @param start An optional value indicating the starting offset at which to start visiting. * @param count An optional value indicating the maximum number of nodes to visit. */ export function visitNodes<T extends Node>(nodes: NodeArray<T> | undefined, visitor: Visitor, test?: (node: Node) => boolean, start?: number, count?: number): NodeArray<T> | undefined { if (nodes === undefined || visitor === undefined) { return nodes; } let updated: MutableNodeArray<T> | undefined; // Ensure start and count have valid values const length = nodes.length; if (start === undefined || start < 0) { start = 0; } if (count === undefined || count > length - start) { count = length - start; } if (start > 0 || count < length) { // If we are not visiting all of the original nodes, we must always create a new array. // Since this is a fragment of a node array, we do not copy over the previous location // and will only copy over `hasTrailingComma` if we are including the last element. updated = createNodeArray<T>([], /*hasTrailingComma*/ nodes.hasTrailingComma && start + count === length); } // Visit each original node. for (let i = 0; i < count; i++) { const node = nodes[i + start]; aggregateTransformFlags(node); const visited = node !== undefined ? visitor(node) : undefined; if (updated !== undefined || visited === undefined || visited !== node) { if (updated === undefined) { // Ensure we have a copy of `nodes`, up to the current index. updated = createNodeArray(nodes.slice(0, i), nodes.hasTrailingComma); setTextRange(updated, nodes); } if (visited) { if (isArray(visited)) { for (const visitedNode of visited) { Debug.assertNode(visitedNode, test); aggregateTransformFlags(visitedNode); updated.push(<T>visitedNode); } } else { Debug.assertNode(visited, test); aggregateTransformFlags(visited); updated.push(<T>visited); } } } } return updated || nodes; } /** * Starts a new lexical environment and visits a statement list, ending the lexical environment * and merging hoisted declarations upon completion. */ export function visitLexicalEnvironment(statements: NodeArray<Statement>, visitor: Visitor, context: TransformationContext, start?: number, ensureUseStrict?: boolean) { context.startLexicalEnvironment(); statements = visitNodes(statements, visitor, isStatement, start); if (ensureUseStrict && !startsWithUseStrict(statements)) { statements = setTextRange(createNodeArray([createExpressionStatement(createLiteral("use strict")), ...statements]), statements); } const declarations = context.endLexicalEnvironment(); return setTextRange(createNodeArray(concatenate(declarations, statements)), statements); } /** * Starts a new lexical environment and visits a parameter list, suspending the lexical * environment upon completion. */ export function visitParameterList(nodes: NodeArray<ParameterDeclaration> | undefined, visitor: Visitor, context: TransformationContext, nodesVisitor = visitNodes) { context.startLexicalEnvironment(); const updated = nodesVisitor(nodes, visitor, isParameterDeclaration); context.suspendLexicalEnvironment(); return updated; } /** * Resumes a suspended lexical environment and visits a function body, ending the lexical * environment and merging hoisted declarations upon completion. */ export function visitFunctionBody(node: FunctionBody, visitor: Visitor, context: TransformationContext): FunctionBody; /** * Resumes a suspended lexical environment and visits a function body, ending the lexical * environment and merging hoisted declarations upon completion. */ export function visitFunctionBody(node: FunctionBody | undefined, visitor: Visitor, context: TransformationContext): FunctionBody | undefined; /** * Resumes a suspended lexical environment and visits a concise body, ending the lexical * environment and merging hoisted declarations upon completion. */ export function visitFunctionBody(node: ConciseBody, visitor: Visitor, context: TransformationContext): ConciseBody; export function visitFunctionBody(node: ConciseBody | undefined, visitor: Visitor, context: TransformationContext): ConciseBody | undefined { context.resumeLexicalEnvironment(); const updated = visitNode(node, visitor, isConciseBody); const declarations = context.endLexicalEnvironment(); if (some(declarations)) { const block = convertToFunctionBody(updated); const statements = mergeLexicalEnvironment(block.statements, declarations); return updateBlock(block, statements); } return updated; } /** * Visits each child of a Node using the supplied visitor, possibly returning a new Node of the same kind in its place. * * @param node The Node whose children will be visited. * @param visitor The callback used to visit each child. * @param context A lexical environment context for the visitor. */ export function visitEachChild<T extends Node>(node: T, visitor: Visitor, context: TransformationContext): T; /** * Visits each child of a Node using the supplied visitor, possibly returning a new Node of the same kind in its place. * * @param node The Node whose children will be visited. * @param visitor The callback used to visit each child. * @param context A lexical environment context for the visitor. */ export function visitEachChild<T extends Node>(node: T | undefined, visitor: Visitor, context: TransformationContext, nodesVisitor?: typeof visitNodes, tokenVisitor?: Visitor): T | undefined; <|fim▁hole|> } const kind = node.kind; // No need to visit nodes with no children. if ((kind > SyntaxKind.FirstToken && kind <= SyntaxKind.LastToken) || kind === SyntaxKind.ThisType) { return node; } switch (kind) { // Names case SyntaxKind.Identifier: return updateIdentifier(<Identifier>node, nodesVisitor((<Identifier>node).typeArguments, visitor, isTypeNodeOrTypeParameterDeclaration)); case SyntaxKind.QualifiedName: return updateQualifiedName(<QualifiedName>node, visitNode((<QualifiedName>node).left, visitor, isEntityName), visitNode((<QualifiedName>node).right, visitor, isIdentifier)); case SyntaxKind.ComputedPropertyName: return updateComputedPropertyName(<ComputedPropertyName>node, visitNode((<ComputedPropertyName>node).expression, visitor, isExpression)); // Signature elements case SyntaxKind.TypeParameter: return updateTypeParameterDeclaration(<TypeParameterDeclaration>node, visitNode((<TypeParameterDeclaration>node).name, visitor, isIdentifier), visitNode((<TypeParameterDeclaration>node).constraint, visitor, isTypeNode), visitNode((<TypeParameterDeclaration>node).default, visitor, isTypeNode)); case SyntaxKind.Parameter: return updateParameter(<ParameterDeclaration>node, nodesVisitor((<ParameterDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ParameterDeclaration>node).modifiers, visitor, isModifier), visitNode((<ParameterDeclaration>node).dotDotDotToken, tokenVisitor, isToken), visitNode((<ParameterDeclaration>node).name, visitor, isBindingName), visitNode((<ParameterDeclaration>node).questionToken, tokenVisitor, isToken), visitNode((<ParameterDeclaration>node).type, visitor, isTypeNode), visitNode((<ParameterDeclaration>node).initializer, visitor, isExpression)); case SyntaxKind.Decorator: return updateDecorator(<Decorator>node, visitNode((<Decorator>node).expression, visitor, isExpression)); // Type elements case SyntaxKind.PropertySignature: return updatePropertySignature((<PropertySignature>node), nodesVisitor((<PropertySignature>node).modifiers, visitor, isToken), visitNode((<PropertySignature>node).name, visitor, isPropertyName), visitNode((<PropertySignature>node).questionToken, tokenVisitor, isToken), visitNode((<PropertySignature>node).type, visitor, isTypeNode), visitNode((<PropertySignature>node).initializer, visitor, isExpression)); case SyntaxKind.PropertyDeclaration: return updateProperty(<PropertyDeclaration>node, nodesVisitor((<PropertyDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<PropertyDeclaration>node).modifiers, visitor, isModifier), visitNode((<PropertyDeclaration>node).name, visitor, isPropertyName), visitNode((<PropertyDeclaration>node).questionToken, tokenVisitor, isToken), visitNode((<PropertyDeclaration>node).type, visitor, isTypeNode), visitNode((<PropertyDeclaration>node).initializer, visitor, isExpression)); case SyntaxKind.MethodSignature: return updateMethodSignature(<MethodSignature>node, nodesVisitor((<MethodSignature>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<MethodSignature>node).parameters, visitor, isParameterDeclaration), visitNode((<MethodSignature>node).type, visitor, isTypeNode), visitNode((<MethodSignature>node).name, visitor, isPropertyName), visitNode((<MethodSignature>node).questionToken, tokenVisitor, isToken)); case SyntaxKind.MethodDeclaration: return updateMethod(<MethodDeclaration>node, nodesVisitor((<MethodDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<MethodDeclaration>node).modifiers, visitor, isModifier), visitNode((<MethodDeclaration>node).asteriskToken, tokenVisitor, isToken), visitNode((<MethodDeclaration>node).name, visitor, isPropertyName), visitNode((<MethodDeclaration>node).questionToken, tokenVisitor, isToken), nodesVisitor((<MethodDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), visitParameterList((<MethodDeclaration>node).parameters, visitor, context, nodesVisitor), visitNode((<MethodDeclaration>node).type, visitor, isTypeNode), visitFunctionBody((<MethodDeclaration>node).body!, visitor, context)); case SyntaxKind.Constructor: return updateConstructor(<ConstructorDeclaration>node, nodesVisitor((<ConstructorDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ConstructorDeclaration>node).modifiers, visitor, isModifier), visitParameterList((<ConstructorDeclaration>node).parameters, visitor, context, nodesVisitor), visitFunctionBody((<ConstructorDeclaration>node).body!, visitor, context)); case SyntaxKind.GetAccessor: return updateGetAccessor(<GetAccessorDeclaration>node, nodesVisitor((<GetAccessorDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<GetAccessorDeclaration>node).modifiers, visitor, isModifier), visitNode((<GetAccessorDeclaration>node).name, visitor, isPropertyName), visitParameterList((<GetAccessorDeclaration>node).parameters, visitor, context, nodesVisitor), visitNode((<GetAccessorDeclaration>node).type, visitor, isTypeNode), visitFunctionBody((<GetAccessorDeclaration>node).body!, visitor, context)); case SyntaxKind.SetAccessor: return updateSetAccessor(<SetAccessorDeclaration>node, nodesVisitor((<SetAccessorDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<SetAccessorDeclaration>node).modifiers, visitor, isModifier), visitNode((<SetAccessorDeclaration>node).name, visitor, isPropertyName), visitParameterList((<SetAccessorDeclaration>node).parameters, visitor, context, nodesVisitor), visitFunctionBody((<SetAccessorDeclaration>node).body!, visitor, context)); case SyntaxKind.CallSignature: return updateCallSignature(<CallSignatureDeclaration>node, nodesVisitor((<CallSignatureDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<CallSignatureDeclaration>node).parameters, visitor, isParameterDeclaration), visitNode((<CallSignatureDeclaration>node).type, visitor, isTypeNode)); case SyntaxKind.ConstructSignature: return updateConstructSignature(<ConstructSignatureDeclaration>node, nodesVisitor((<ConstructSignatureDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<ConstructSignatureDeclaration>node).parameters, visitor, isParameterDeclaration), visitNode((<ConstructSignatureDeclaration>node).type, visitor, isTypeNode)); case SyntaxKind.IndexSignature: return updateIndexSignature(<IndexSignatureDeclaration>node, nodesVisitor((<IndexSignatureDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<IndexSignatureDeclaration>node).modifiers, visitor, isModifier), nodesVisitor((<IndexSignatureDeclaration>node).parameters, visitor, isParameterDeclaration), visitNode((<IndexSignatureDeclaration>node).type!, visitor, isTypeNode)); // Types case SyntaxKind.TypePredicate: return updateTypePredicateNode(<TypePredicateNode>node, visitNode((<TypePredicateNode>node).parameterName, visitor), visitNode((<TypePredicateNode>node).type, visitor, isTypeNode)); case SyntaxKind.TypeReference: return updateTypeReferenceNode(<TypeReferenceNode>node, visitNode((<TypeReferenceNode>node).typeName, visitor, isEntityName), nodesVisitor((<TypeReferenceNode>node).typeArguments, visitor, isTypeNode)); case SyntaxKind.FunctionType: return updateFunctionTypeNode(<FunctionTypeNode>node, nodesVisitor((<FunctionTypeNode>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<FunctionTypeNode>node).parameters, visitor, isParameterDeclaration), visitNode((<FunctionTypeNode>node).type, visitor, isTypeNode)); case SyntaxKind.ConstructorType: return updateConstructorTypeNode(<ConstructorTypeNode>node, nodesVisitor((<ConstructorTypeNode>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<ConstructorTypeNode>node).parameters, visitor, isParameterDeclaration), visitNode((<ConstructorTypeNode>node).type, visitor, isTypeNode)); case SyntaxKind.TypeQuery: return updateTypeQueryNode((<TypeQueryNode>node), visitNode((<TypeQueryNode>node).exprName, visitor, isEntityName)); case SyntaxKind.TypeLiteral: return updateTypeLiteralNode((<TypeLiteralNode>node), nodesVisitor((<TypeLiteralNode>node).members, visitor, isTypeElement)); case SyntaxKind.ArrayType: return updateArrayTypeNode(<ArrayTypeNode>node, visitNode((<ArrayTypeNode>node).elementType, visitor, isTypeNode)); case SyntaxKind.TupleType: return updateTupleTypeNode((<TupleTypeNode>node), nodesVisitor((<TupleTypeNode>node).elementTypes, visitor, isTypeNode)); case SyntaxKind.OptionalType: return updateOptionalTypeNode((<OptionalTypeNode>node), visitNode((<OptionalTypeNode>node).type, visitor, isTypeNode)); case SyntaxKind.RestType: return updateRestTypeNode((<RestTypeNode>node), visitNode((<RestTypeNode>node).type, visitor, isTypeNode)); case SyntaxKind.UnionType: return updateUnionTypeNode(<UnionTypeNode>node, nodesVisitor((<UnionTypeNode>node).types, visitor, isTypeNode)); case SyntaxKind.IntersectionType: return updateIntersectionTypeNode(<IntersectionTypeNode>node, nodesVisitor((<IntersectionTypeNode>node).types, visitor, isTypeNode)); case SyntaxKind.ConditionalType: return updateConditionalTypeNode(<ConditionalTypeNode>node, visitNode((<ConditionalTypeNode>node).checkType, visitor, isTypeNode), visitNode((<ConditionalTypeNode>node).extendsType, visitor, isTypeNode), visitNode((<ConditionalTypeNode>node).trueType, visitor, isTypeNode), visitNode((<ConditionalTypeNode>node).falseType, visitor, isTypeNode)); case SyntaxKind.InferType: return updateInferTypeNode(<InferTypeNode>node, visitNode((<InferTypeNode>node).typeParameter, visitor, isTypeParameterDeclaration)); case SyntaxKind.ImportType: return updateImportTypeNode(<ImportTypeNode>node, visitNode((<ImportTypeNode>node).argument, visitor, isTypeNode), visitNode((<ImportTypeNode>node).qualifier, visitor, isEntityName), visitNodes((<ImportTypeNode>node).typeArguments, visitor, isTypeNode), (<ImportTypeNode>node).isTypeOf ); case SyntaxKind.ParenthesizedType: return updateParenthesizedType(<ParenthesizedTypeNode>node, visitNode((<ParenthesizedTypeNode>node).type, visitor, isTypeNode)); case SyntaxKind.TypeOperator: return updateTypeOperatorNode(<TypeOperatorNode>node, visitNode((<TypeOperatorNode>node).type, visitor, isTypeNode)); case SyntaxKind.IndexedAccessType: return updateIndexedAccessTypeNode((<IndexedAccessTypeNode>node), visitNode((<IndexedAccessTypeNode>node).objectType, visitor, isTypeNode), visitNode((<IndexedAccessTypeNode>node).indexType, visitor, isTypeNode)); case SyntaxKind.MappedType: return updateMappedTypeNode((<MappedTypeNode>node), visitNode((<MappedTypeNode>node).readonlyToken, tokenVisitor, isToken), visitNode((<MappedTypeNode>node).typeParameter, visitor, isTypeParameterDeclaration), visitNode((<MappedTypeNode>node).questionToken, tokenVisitor, isToken), visitNode((<MappedTypeNode>node).type, visitor, isTypeNode)); case SyntaxKind.LiteralType: return updateLiteralTypeNode(<LiteralTypeNode>node, visitNode((<LiteralTypeNode>node).literal, visitor, isExpression)); // Binding patterns case SyntaxKind.ObjectBindingPattern: return updateObjectBindingPattern(<ObjectBindingPattern>node, nodesVisitor((<ObjectBindingPattern>node).elements, visitor, isBindingElement)); case SyntaxKind.ArrayBindingPattern: return updateArrayBindingPattern(<ArrayBindingPattern>node, nodesVisitor((<ArrayBindingPattern>node).elements, visitor, isArrayBindingElement)); case SyntaxKind.BindingElement: return updateBindingElement(<BindingElement>node, visitNode((<BindingElement>node).dotDotDotToken, tokenVisitor, isToken), visitNode((<BindingElement>node).propertyName, visitor, isPropertyName), visitNode((<BindingElement>node).name, visitor, isBindingName), visitNode((<BindingElement>node).initializer, visitor, isExpression)); // Expression case SyntaxKind.ArrayLiteralExpression: return updateArrayLiteral(<ArrayLiteralExpression>node, nodesVisitor((<ArrayLiteralExpression>node).elements, visitor, isExpression)); case SyntaxKind.ObjectLiteralExpression: return updateObjectLiteral(<ObjectLiteralExpression>node, nodesVisitor((<ObjectLiteralExpression>node).properties, visitor, isObjectLiteralElementLike)); case SyntaxKind.PropertyAccessExpression: return updatePropertyAccess(<PropertyAccessExpression>node, visitNode((<PropertyAccessExpression>node).expression, visitor, isExpression), visitNode((<PropertyAccessExpression>node).name, visitor, isIdentifier)); case SyntaxKind.ElementAccessExpression: return updateElementAccess(<ElementAccessExpression>node, visitNode((<ElementAccessExpression>node).expression, visitor, isExpression), visitNode((<ElementAccessExpression>node).argumentExpression, visitor, isExpression)); case SyntaxKind.CallExpression: return updateCall(<CallExpression>node, visitNode((<CallExpression>node).expression, visitor, isExpression), nodesVisitor((<CallExpression>node).typeArguments, visitor, isTypeNode), nodesVisitor((<CallExpression>node).arguments, visitor, isExpression)); case SyntaxKind.NewExpression: return updateNew(<NewExpression>node, visitNode((<NewExpression>node).expression, visitor, isExpression), nodesVisitor((<NewExpression>node).typeArguments, visitor, isTypeNode), nodesVisitor((<NewExpression>node).arguments, visitor, isExpression)); case SyntaxKind.TaggedTemplateExpression: return updateTaggedTemplate(<TaggedTemplateExpression>node, visitNode((<TaggedTemplateExpression>node).tag, visitor, isExpression), visitNodes((<TaggedTemplateExpression>node).typeArguments, visitor, isExpression), visitNode((<TaggedTemplateExpression>node).template, visitor, isTemplateLiteral)); case SyntaxKind.TypeAssertionExpression: return updateTypeAssertion(<TypeAssertion>node, visitNode((<TypeAssertion>node).type, visitor, isTypeNode), visitNode((<TypeAssertion>node).expression, visitor, isExpression)); case SyntaxKind.ParenthesizedExpression: return updateParen(<ParenthesizedExpression>node, visitNode((<ParenthesizedExpression>node).expression, visitor, isExpression)); case SyntaxKind.FunctionExpression: return updateFunctionExpression(<FunctionExpression>node, nodesVisitor((<FunctionExpression>node).modifiers, visitor, isModifier), visitNode((<FunctionExpression>node).asteriskToken, tokenVisitor, isToken), visitNode((<FunctionExpression>node).name, visitor, isIdentifier), nodesVisitor((<FunctionExpression>node).typeParameters, visitor, isTypeParameterDeclaration), visitParameterList((<FunctionExpression>node).parameters, visitor, context, nodesVisitor), visitNode((<FunctionExpression>node).type, visitor, isTypeNode), visitFunctionBody((<FunctionExpression>node).body, visitor, context)); case SyntaxKind.ArrowFunction: return updateArrowFunction(<ArrowFunction>node, nodesVisitor((<ArrowFunction>node).modifiers, visitor, isModifier), nodesVisitor((<ArrowFunction>node).typeParameters, visitor, isTypeParameterDeclaration), visitParameterList((<ArrowFunction>node).parameters, visitor, context, nodesVisitor), visitNode((<ArrowFunction>node).type, visitor, isTypeNode), visitNode((<ArrowFunction>node).equalsGreaterThanToken, visitor, isToken), visitFunctionBody((<ArrowFunction>node).body, visitor, context)); case SyntaxKind.DeleteExpression: return updateDelete(<DeleteExpression>node, visitNode((<DeleteExpression>node).expression, visitor, isExpression)); case SyntaxKind.TypeOfExpression: return updateTypeOf(<TypeOfExpression>node, visitNode((<TypeOfExpression>node).expression, visitor, isExpression)); case SyntaxKind.VoidExpression: return updateVoid(<VoidExpression>node, visitNode((<VoidExpression>node).expression, visitor, isExpression)); case SyntaxKind.AwaitExpression: return updateAwait(<AwaitExpression>node, visitNode((<AwaitExpression>node).expression, visitor, isExpression)); case SyntaxKind.PrefixUnaryExpression: return updatePrefix(<PrefixUnaryExpression>node, visitNode((<PrefixUnaryExpression>node).operand, visitor, isExpression)); case SyntaxKind.PostfixUnaryExpression: return updatePostfix(<PostfixUnaryExpression>node, visitNode((<PostfixUnaryExpression>node).operand, visitor, isExpression)); case SyntaxKind.BinaryExpression: return updateBinary(<BinaryExpression>node, visitNode((<BinaryExpression>node).left, visitor, isExpression), visitNode((<BinaryExpression>node).right, visitor, isExpression), visitNode((<BinaryExpression>node).operatorToken, visitor, isToken)); case SyntaxKind.ConditionalExpression: return updateConditional(<ConditionalExpression>node, visitNode((<ConditionalExpression>node).condition, visitor, isExpression), visitNode((<ConditionalExpression>node).questionToken, visitor, isToken), visitNode((<ConditionalExpression>node).whenTrue, visitor, isExpression), visitNode((<ConditionalExpression>node).colonToken, visitor, isToken), visitNode((<ConditionalExpression>node).whenFalse, visitor, isExpression)); case SyntaxKind.TemplateExpression: return updateTemplateExpression(<TemplateExpression>node, visitNode((<TemplateExpression>node).head, visitor, isTemplateHead), nodesVisitor((<TemplateExpression>node).templateSpans, visitor, isTemplateSpan)); case SyntaxKind.YieldExpression: return updateYield(<YieldExpression>node, visitNode((<YieldExpression>node).asteriskToken, tokenVisitor, isToken), visitNode((<YieldExpression>node).expression!, visitor, isExpression)); case SyntaxKind.SpreadElement: return updateSpread(<SpreadElement>node, visitNode((<SpreadElement>node).expression, visitor, isExpression)); case SyntaxKind.ClassExpression: return updateClassExpression(<ClassExpression>node, nodesVisitor((<ClassExpression>node).modifiers, visitor, isModifier), visitNode((<ClassExpression>node).name, visitor, isIdentifier), nodesVisitor((<ClassExpression>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<ClassExpression>node).heritageClauses, visitor, isHeritageClause), nodesVisitor((<ClassExpression>node).members, visitor, isClassElement)); case SyntaxKind.ExpressionWithTypeArguments: return updateExpressionWithTypeArguments(<ExpressionWithTypeArguments>node, nodesVisitor((<ExpressionWithTypeArguments>node).typeArguments, visitor, isTypeNode), visitNode((<ExpressionWithTypeArguments>node).expression, visitor, isExpression)); case SyntaxKind.AsExpression: return updateAsExpression(<AsExpression>node, visitNode((<AsExpression>node).expression, visitor, isExpression), visitNode((<AsExpression>node).type, visitor, isTypeNode)); case SyntaxKind.NonNullExpression: return updateNonNullExpression(<NonNullExpression>node, visitNode((<NonNullExpression>node).expression, visitor, isExpression)); case SyntaxKind.MetaProperty: return updateMetaProperty(<MetaProperty>node, visitNode((<MetaProperty>node).name, visitor, isIdentifier)); // Misc case SyntaxKind.TemplateSpan: return updateTemplateSpan(<TemplateSpan>node, visitNode((<TemplateSpan>node).expression, visitor, isExpression), visitNode((<TemplateSpan>node).literal, visitor, isTemplateMiddleOrTemplateTail)); // Element case SyntaxKind.Block: return updateBlock(<Block>node, nodesVisitor((<Block>node).statements, visitor, isStatement)); case SyntaxKind.VariableStatement: return updateVariableStatement(<VariableStatement>node, nodesVisitor((<VariableStatement>node).modifiers, visitor, isModifier), visitNode((<VariableStatement>node).declarationList, visitor, isVariableDeclarationList)); case SyntaxKind.ExpressionStatement: return updateExpressionStatement(<ExpressionStatement>node, visitNode((<ExpressionStatement>node).expression, visitor, isExpression)); case SyntaxKind.IfStatement: return updateIf(<IfStatement>node, visitNode((<IfStatement>node).expression, visitor, isExpression), visitNode((<IfStatement>node).thenStatement, visitor, isStatement, liftToBlock), visitNode((<IfStatement>node).elseStatement, visitor, isStatement, liftToBlock)); case SyntaxKind.DoStatement: return updateDo(<DoStatement>node, visitNode((<DoStatement>node).statement, visitor, isStatement, liftToBlock), visitNode((<DoStatement>node).expression, visitor, isExpression)); case SyntaxKind.WhileStatement: return updateWhile(<WhileStatement>node, visitNode((<WhileStatement>node).expression, visitor, isExpression), visitNode((<WhileStatement>node).statement, visitor, isStatement, liftToBlock)); case SyntaxKind.ForStatement: return updateFor(<ForStatement>node, visitNode((<ForStatement>node).initializer, visitor, isForInitializer), visitNode((<ForStatement>node).condition, visitor, isExpression), visitNode((<ForStatement>node).incrementor, visitor, isExpression), visitNode((<ForStatement>node).statement, visitor, isStatement, liftToBlock)); case SyntaxKind.ForInStatement: return updateForIn(<ForInStatement>node, visitNode((<ForInStatement>node).initializer, visitor, isForInitializer), visitNode((<ForInStatement>node).expression, visitor, isExpression), visitNode((<ForInStatement>node).statement, visitor, isStatement, liftToBlock)); case SyntaxKind.ForOfStatement: return updateForOf(<ForOfStatement>node, visitNode((<ForOfStatement>node).awaitModifier, visitor, isToken), visitNode((<ForOfStatement>node).initializer, visitor, isForInitializer), visitNode((<ForOfStatement>node).expression, visitor, isExpression), visitNode((<ForOfStatement>node).statement, visitor, isStatement, liftToBlock)); case SyntaxKind.ContinueStatement: return updateContinue(<ContinueStatement>node, visitNode((<ContinueStatement>node).label, visitor, isIdentifier)); case SyntaxKind.BreakStatement: return updateBreak(<BreakStatement>node, visitNode((<BreakStatement>node).label, visitor, isIdentifier)); case SyntaxKind.ReturnStatement: return updateReturn(<ReturnStatement>node, visitNode((<ReturnStatement>node).expression, visitor, isExpression)); case SyntaxKind.WithStatement: return updateWith(<WithStatement>node, visitNode((<WithStatement>node).expression, visitor, isExpression), visitNode((<WithStatement>node).statement, visitor, isStatement, liftToBlock)); case SyntaxKind.SwitchStatement: return updateSwitch(<SwitchStatement>node, visitNode((<SwitchStatement>node).expression, visitor, isExpression), visitNode((<SwitchStatement>node).caseBlock, visitor, isCaseBlock)); case SyntaxKind.LabeledStatement: return updateLabel(<LabeledStatement>node, visitNode((<LabeledStatement>node).label, visitor, isIdentifier), visitNode((<LabeledStatement>node).statement, visitor, isStatement, liftToBlock)); case SyntaxKind.ThrowStatement: return updateThrow(<ThrowStatement>node, visitNode((<ThrowStatement>node).expression!, visitor, isExpression)); case SyntaxKind.TryStatement: return updateTry(<TryStatement>node, visitNode((<TryStatement>node).tryBlock, visitor, isBlock), visitNode((<TryStatement>node).catchClause, visitor, isCatchClause), visitNode((<TryStatement>node).finallyBlock, visitor, isBlock)); case SyntaxKind.VariableDeclaration: return updateVariableDeclaration(<VariableDeclaration>node, visitNode((<VariableDeclaration>node).name, visitor, isBindingName), visitNode((<VariableDeclaration>node).type, visitor, isTypeNode), visitNode((<VariableDeclaration>node).initializer, visitor, isExpression)); case SyntaxKind.VariableDeclarationList: return updateVariableDeclarationList(<VariableDeclarationList>node, nodesVisitor((<VariableDeclarationList>node).declarations, visitor, isVariableDeclaration)); case SyntaxKind.FunctionDeclaration: return updateFunctionDeclaration(<FunctionDeclaration>node, nodesVisitor((<FunctionDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<FunctionDeclaration>node).modifiers, visitor, isModifier), visitNode((<FunctionDeclaration>node).asteriskToken, tokenVisitor, isToken), visitNode((<FunctionDeclaration>node).name, visitor, isIdentifier), nodesVisitor((<FunctionDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), visitParameterList((<FunctionDeclaration>node).parameters, visitor, context, nodesVisitor), visitNode((<FunctionDeclaration>node).type, visitor, isTypeNode), visitFunctionBody((<FunctionExpression>node).body, visitor, context)); case SyntaxKind.ClassDeclaration: return updateClassDeclaration(<ClassDeclaration>node, nodesVisitor((<ClassDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ClassDeclaration>node).modifiers, visitor, isModifier), visitNode((<ClassDeclaration>node).name, visitor, isIdentifier), nodesVisitor((<ClassDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<ClassDeclaration>node).heritageClauses, visitor, isHeritageClause), nodesVisitor((<ClassDeclaration>node).members, visitor, isClassElement)); case SyntaxKind.InterfaceDeclaration: return updateInterfaceDeclaration(<InterfaceDeclaration>node, nodesVisitor((<InterfaceDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<InterfaceDeclaration>node).modifiers, visitor, isModifier), visitNode((<InterfaceDeclaration>node).name, visitor, isIdentifier), nodesVisitor((<InterfaceDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), nodesVisitor((<InterfaceDeclaration>node).heritageClauses, visitor, isHeritageClause), nodesVisitor((<InterfaceDeclaration>node).members, visitor, isTypeElement)); case SyntaxKind.TypeAliasDeclaration: return updateTypeAliasDeclaration(<TypeAliasDeclaration>node, nodesVisitor((<TypeAliasDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<TypeAliasDeclaration>node).modifiers, visitor, isModifier), visitNode((<TypeAliasDeclaration>node).name, visitor, isIdentifier), nodesVisitor((<TypeAliasDeclaration>node).typeParameters, visitor, isTypeParameterDeclaration), visitNode((<TypeAliasDeclaration>node).type, visitor, isTypeNode)); case SyntaxKind.EnumDeclaration: return updateEnumDeclaration(<EnumDeclaration>node, nodesVisitor((<EnumDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<EnumDeclaration>node).modifiers, visitor, isModifier), visitNode((<EnumDeclaration>node).name, visitor, isIdentifier), nodesVisitor((<EnumDeclaration>node).members, visitor, isEnumMember)); case SyntaxKind.ModuleDeclaration: return updateModuleDeclaration(<ModuleDeclaration>node, nodesVisitor((<ModuleDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ModuleDeclaration>node).modifiers, visitor, isModifier), visitNode((<ModuleDeclaration>node).name, visitor, isIdentifier), visitNode((<ModuleDeclaration>node).body, visitor, isModuleBody)); case SyntaxKind.ModuleBlock: return updateModuleBlock(<ModuleBlock>node, nodesVisitor((<ModuleBlock>node).statements, visitor, isStatement)); case SyntaxKind.CaseBlock: return updateCaseBlock(<CaseBlock>node, nodesVisitor((<CaseBlock>node).clauses, visitor, isCaseOrDefaultClause)); case SyntaxKind.NamespaceExportDeclaration: return updateNamespaceExportDeclaration(<NamespaceExportDeclaration>node, visitNode((<NamespaceExportDeclaration>node).name, visitor, isIdentifier)); case SyntaxKind.ImportEqualsDeclaration: return updateImportEqualsDeclaration(<ImportEqualsDeclaration>node, nodesVisitor((<ImportEqualsDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ImportEqualsDeclaration>node).modifiers, visitor, isModifier), visitNode((<ImportEqualsDeclaration>node).name, visitor, isIdentifier), visitNode((<ImportEqualsDeclaration>node).moduleReference, visitor, isModuleReference)); case SyntaxKind.ImportDeclaration: return updateImportDeclaration(<ImportDeclaration>node, nodesVisitor((<ImportDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ImportDeclaration>node).modifiers, visitor, isModifier), visitNode((<ImportDeclaration>node).importClause, visitor, isImportClause), visitNode((<ImportDeclaration>node).moduleSpecifier, visitor, isExpression)); case SyntaxKind.ImportClause: return updateImportClause(<ImportClause>node, visitNode((<ImportClause>node).name, visitor, isIdentifier), visitNode((<ImportClause>node).namedBindings, visitor, isNamedImportBindings)); case SyntaxKind.NamespaceImport: return updateNamespaceImport(<NamespaceImport>node, visitNode((<NamespaceImport>node).name, visitor, isIdentifier)); case SyntaxKind.NamedImports: return updateNamedImports(<NamedImports>node, nodesVisitor((<NamedImports>node).elements, visitor, isImportSpecifier)); case SyntaxKind.ImportSpecifier: return updateImportSpecifier(<ImportSpecifier>node, visitNode((<ImportSpecifier>node).propertyName, visitor, isIdentifier), visitNode((<ImportSpecifier>node).name, visitor, isIdentifier)); case SyntaxKind.ExportAssignment: return updateExportAssignment(<ExportAssignment>node, nodesVisitor((<ExportAssignment>node).decorators, visitor, isDecorator), nodesVisitor((<ExportAssignment>node).modifiers, visitor, isModifier), visitNode((<ExportAssignment>node).expression, visitor, isExpression)); case SyntaxKind.ExportDeclaration: return updateExportDeclaration(<ExportDeclaration>node, nodesVisitor((<ExportDeclaration>node).decorators, visitor, isDecorator), nodesVisitor((<ExportDeclaration>node).modifiers, visitor, isModifier), visitNode((<ExportDeclaration>node).exportClause, visitor, isNamedExports), visitNode((<ExportDeclaration>node).moduleSpecifier, visitor, isExpression)); case SyntaxKind.NamedExports: return updateNamedExports(<NamedExports>node, nodesVisitor((<NamedExports>node).elements, visitor, isExportSpecifier)); case SyntaxKind.ExportSpecifier: return updateExportSpecifier(<ExportSpecifier>node, visitNode((<ExportSpecifier>node).propertyName, visitor, isIdentifier), visitNode((<ExportSpecifier>node).name, visitor, isIdentifier)); // Module references case SyntaxKind.ExternalModuleReference: return updateExternalModuleReference(<ExternalModuleReference>node, visitNode((<ExternalModuleReference>node).expression, visitor, isExpression)); // JSX case SyntaxKind.JsxElement: return updateJsxElement(<JsxElement>node, visitNode((<JsxElement>node).openingElement, visitor, isJsxOpeningElement), nodesVisitor((<JsxElement>node).children, visitor, isJsxChild), visitNode((<JsxElement>node).closingElement, visitor, isJsxClosingElement)); case SyntaxKind.JsxSelfClosingElement: return updateJsxSelfClosingElement(<JsxSelfClosingElement>node, visitNode((<JsxSelfClosingElement>node).tagName, visitor, isJsxTagNameExpression), nodesVisitor((<JsxSelfClosingElement>node).typeArguments, visitor, isTypeNode), visitNode((<JsxSelfClosingElement>node).attributes, visitor, isJsxAttributes)); case SyntaxKind.JsxOpeningElement: return updateJsxOpeningElement(<JsxOpeningElement>node, visitNode((<JsxOpeningElement>node).tagName, visitor, isJsxTagNameExpression), nodesVisitor((<JsxSelfClosingElement>node).typeArguments, visitor, isTypeNode), visitNode((<JsxOpeningElement>node).attributes, visitor, isJsxAttributes)); case SyntaxKind.JsxClosingElement: return updateJsxClosingElement(<JsxClosingElement>node, visitNode((<JsxClosingElement>node).tagName, visitor, isJsxTagNameExpression)); case SyntaxKind.JsxFragment: return updateJsxFragment(<JsxFragment>node, visitNode((<JsxFragment>node).openingFragment, visitor, isJsxOpeningFragment), nodesVisitor((<JsxFragment>node).children, visitor, isJsxChild), visitNode((<JsxFragment>node).closingFragment, visitor, isJsxClosingFragment)); case SyntaxKind.JsxAttribute: return updateJsxAttribute(<JsxAttribute>node, visitNode((<JsxAttribute>node).name, visitor, isIdentifier), visitNode((<JsxAttribute>node).initializer!, visitor, isStringLiteralOrJsxExpression)); case SyntaxKind.JsxAttributes: return updateJsxAttributes(<JsxAttributes>node, nodesVisitor((<JsxAttributes>node).properties, visitor, isJsxAttributeLike)); case SyntaxKind.JsxSpreadAttribute: return updateJsxSpreadAttribute(<JsxSpreadAttribute>node, visitNode((<JsxSpreadAttribute>node).expression, visitor, isExpression)); case SyntaxKind.JsxExpression: return updateJsxExpression(<JsxExpression>node, visitNode((<JsxExpression>node).expression, visitor, isExpression)); // Clauses case SyntaxKind.CaseClause: return updateCaseClause(<CaseClause>node, visitNode((<CaseClause>node).expression, visitor, isExpression), nodesVisitor((<CaseClause>node).statements, visitor, isStatement)); case SyntaxKind.DefaultClause: return updateDefaultClause(<DefaultClause>node, nodesVisitor((<DefaultClause>node).statements, visitor, isStatement)); case SyntaxKind.HeritageClause: return updateHeritageClause(<HeritageClause>node, nodesVisitor((<HeritageClause>node).types, visitor, isExpressionWithTypeArguments)); case SyntaxKind.CatchClause: return updateCatchClause(<CatchClause>node, visitNode((<CatchClause>node).variableDeclaration, visitor, isVariableDeclaration), visitNode((<CatchClause>node).block, visitor, isBlock)); // Property assignments case SyntaxKind.PropertyAssignment: return updatePropertyAssignment(<PropertyAssignment>node, visitNode((<PropertyAssignment>node).name, visitor, isPropertyName), visitNode((<PropertyAssignment>node).initializer, visitor, isExpression)); case SyntaxKind.ShorthandPropertyAssignment: return updateShorthandPropertyAssignment(<ShorthandPropertyAssignment>node, visitNode((<ShorthandPropertyAssignment>node).name, visitor, isIdentifier), visitNode((<ShorthandPropertyAssignment>node).objectAssignmentInitializer, visitor, isExpression)); case SyntaxKind.SpreadAssignment: return updateSpreadAssignment(<SpreadAssignment>node, visitNode((<SpreadAssignment>node).expression, visitor, isExpression)); // Enum case SyntaxKind.EnumMember: return updateEnumMember(<EnumMember>node, visitNode((<EnumMember>node).name, visitor, isPropertyName), visitNode((<EnumMember>node).initializer, visitor, isExpression)); // Top-level nodes case SyntaxKind.SourceFile: return updateSourceFileNode(<SourceFile>node, visitLexicalEnvironment((<SourceFile>node).statements, visitor, context)); // Transformation nodes case SyntaxKind.PartiallyEmittedExpression: return updatePartiallyEmittedExpression(<PartiallyEmittedExpression>node, visitNode((<PartiallyEmittedExpression>node).expression, visitor, isExpression)); case SyntaxKind.CommaListExpression: return updateCommaList(<CommaListExpression>node, nodesVisitor((<CommaListExpression>node).elements, visitor, isExpression)); default: // No need to visit nodes with no children. return node; } } /** * Extracts the single node from a NodeArray. * * @param nodes The NodeArray. */ function extractSingleNode(nodes: ReadonlyArray<Node>): Node | undefined { Debug.assert(nodes.length <= 1, "Too many nodes written to output."); return singleOrUndefined(nodes); } } /* @internal */ namespace ts { function reduceNode<T>(node: Node | undefined, f: (memo: T, node: Node) => T, initial: T) { return node ? f(initial, node) : initial; } function reduceNodeArray<T>(nodes: NodeArray<Node> | undefined, f: (memo: T, nodes: NodeArray<Node>) => T, initial: T) { return nodes ? f(initial, nodes) : initial; } /** * Similar to `reduceLeft`, performs a reduction against each child of a node. * NOTE: Unlike `forEachChild`, this does *not* visit every node. * * @param node The node containing the children to reduce. * @param initial The initial value to supply to the reduction. * @param f The callback function */ export function reduceEachChild<T>(node: Node | undefined, initial: T, cbNode: (memo: T, node: Node) => T, cbNodeArray?: (memo: T, nodes: NodeArray<Node>) => T): T { if (node === undefined) { return initial; } const reduceNodes: (nodes: NodeArray<Node> | undefined, f: ((memo: T, node: Node) => T) | ((memo: T, node: NodeArray<Node>) => T), initial: T) => T = cbNodeArray ? reduceNodeArray : reduceLeft; const cbNodes = cbNodeArray || cbNode; const kind = node.kind; // No need to visit nodes with no children. if ((kind > SyntaxKind.FirstToken && kind <= SyntaxKind.LastToken)) { return initial; } // We do not yet support types. if ((kind >= SyntaxKind.TypePredicate && kind <= SyntaxKind.LiteralType)) { return initial; } let result = initial; switch (node.kind) { // Leaf nodes case SyntaxKind.SemicolonClassElement: case SyntaxKind.EmptyStatement: case SyntaxKind.OmittedExpression: case SyntaxKind.DebuggerStatement: case SyntaxKind.NotEmittedStatement: // No need to visit nodes with no children. break; // Names case SyntaxKind.QualifiedName: result = reduceNode((<QualifiedName>node).left, cbNode, result); result = reduceNode((<QualifiedName>node).right, cbNode, result); break; case SyntaxKind.ComputedPropertyName: result = reduceNode((<ComputedPropertyName>node).expression, cbNode, result); break; // Signature elements case SyntaxKind.Parameter: result = reduceNodes((<ParameterDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<ParameterDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<ParameterDeclaration>node).name, cbNode, result); result = reduceNode((<ParameterDeclaration>node).type, cbNode, result); result = reduceNode((<ParameterDeclaration>node).initializer, cbNode, result); break; case SyntaxKind.Decorator: result = reduceNode((<Decorator>node).expression, cbNode, result); break; // Type member case SyntaxKind.PropertySignature: result = reduceNodes((<PropertySignature>node).modifiers, cbNodes, result); result = reduceNode((<PropertySignature>node).name, cbNode, result); result = reduceNode((<PropertySignature>node).questionToken, cbNode, result); result = reduceNode((<PropertySignature>node).type, cbNode, result); result = reduceNode((<PropertySignature>node).initializer, cbNode, result); break; case SyntaxKind.PropertyDeclaration: result = reduceNodes((<PropertyDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<PropertyDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<PropertyDeclaration>node).name, cbNode, result); result = reduceNode((<PropertyDeclaration>node).type, cbNode, result); result = reduceNode((<PropertyDeclaration>node).initializer, cbNode, result); break; case SyntaxKind.MethodDeclaration: result = reduceNodes((<MethodDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<MethodDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<MethodDeclaration>node).name, cbNode, result); result = reduceNodes((<MethodDeclaration>node).typeParameters, cbNodes, result); result = reduceNodes((<MethodDeclaration>node).parameters, cbNodes, result); result = reduceNode((<MethodDeclaration>node).type, cbNode, result); result = reduceNode((<MethodDeclaration>node).body, cbNode, result); break; case SyntaxKind.Constructor: result = reduceNodes((<ConstructorDeclaration>node).modifiers, cbNodes, result); result = reduceNodes((<ConstructorDeclaration>node).parameters, cbNodes, result); result = reduceNode((<ConstructorDeclaration>node).body, cbNode, result); break; case SyntaxKind.GetAccessor: result = reduceNodes((<GetAccessorDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<GetAccessorDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<GetAccessorDeclaration>node).name, cbNode, result); result = reduceNodes((<GetAccessorDeclaration>node).parameters, cbNodes, result); result = reduceNode((<GetAccessorDeclaration>node).type, cbNode, result); result = reduceNode((<GetAccessorDeclaration>node).body, cbNode, result); break; case SyntaxKind.SetAccessor: result = reduceNodes((<GetAccessorDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<GetAccessorDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<GetAccessorDeclaration>node).name, cbNode, result); result = reduceNodes((<GetAccessorDeclaration>node).parameters, cbNodes, result); result = reduceNode((<GetAccessorDeclaration>node).body, cbNode, result); break; // Binding patterns case SyntaxKind.ObjectBindingPattern: case SyntaxKind.ArrayBindingPattern: result = reduceNodes((<BindingPattern>node).elements, cbNodes, result); break; case SyntaxKind.BindingElement: result = reduceNode((<BindingElement>node).propertyName, cbNode, result); result = reduceNode((<BindingElement>node).name, cbNode, result); result = reduceNode((<BindingElement>node).initializer, cbNode, result); break; // Expression case SyntaxKind.ArrayLiteralExpression: result = reduceNodes((<ArrayLiteralExpression>node).elements, cbNodes, result); break; case SyntaxKind.ObjectLiteralExpression: result = reduceNodes((<ObjectLiteralExpression>node).properties, cbNodes, result); break; case SyntaxKind.PropertyAccessExpression: result = reduceNode((<PropertyAccessExpression>node).expression, cbNode, result); result = reduceNode((<PropertyAccessExpression>node).name, cbNode, result); break; case SyntaxKind.ElementAccessExpression: result = reduceNode((<ElementAccessExpression>node).expression, cbNode, result); result = reduceNode((<ElementAccessExpression>node).argumentExpression, cbNode, result); break; case SyntaxKind.CallExpression: result = reduceNode((<CallExpression>node).expression, cbNode, result); result = reduceNodes((<CallExpression>node).typeArguments, cbNodes, result); result = reduceNodes((<CallExpression>node).arguments, cbNodes, result); break; case SyntaxKind.NewExpression: result = reduceNode((<NewExpression>node).expression, cbNode, result); result = reduceNodes((<NewExpression>node).typeArguments, cbNodes, result); result = reduceNodes((<NewExpression>node).arguments, cbNodes, result); break; case SyntaxKind.TaggedTemplateExpression: result = reduceNode((<TaggedTemplateExpression>node).tag, cbNode, result); result = reduceNodes((<TaggedTemplateExpression>node).typeArguments, cbNodes, result); result = reduceNode((<TaggedTemplateExpression>node).template, cbNode, result); break; case SyntaxKind.TypeAssertionExpression: result = reduceNode((<TypeAssertion>node).type, cbNode, result); result = reduceNode((<TypeAssertion>node).expression, cbNode, result); break; case SyntaxKind.FunctionExpression: result = reduceNodes((<FunctionExpression>node).modifiers, cbNodes, result); result = reduceNode((<FunctionExpression>node).name, cbNode, result); result = reduceNodes((<FunctionExpression>node).typeParameters, cbNodes, result); result = reduceNodes((<FunctionExpression>node).parameters, cbNodes, result); result = reduceNode((<FunctionExpression>node).type, cbNode, result); result = reduceNode((<FunctionExpression>node).body, cbNode, result); break; case SyntaxKind.ArrowFunction: result = reduceNodes((<ArrowFunction>node).modifiers, cbNodes, result); result = reduceNodes((<ArrowFunction>node).typeParameters, cbNodes, result); result = reduceNodes((<ArrowFunction>node).parameters, cbNodes, result); result = reduceNode((<ArrowFunction>node).type, cbNode, result); result = reduceNode((<ArrowFunction>node).body, cbNode, result); break; case SyntaxKind.ParenthesizedExpression: case SyntaxKind.DeleteExpression: case SyntaxKind.TypeOfExpression: case SyntaxKind.VoidExpression: case SyntaxKind.AwaitExpression: case SyntaxKind.YieldExpression: case SyntaxKind.SpreadElement: case SyntaxKind.NonNullExpression: result = reduceNode((<ParenthesizedExpression | DeleteExpression | TypeOfExpression | VoidExpression | AwaitExpression | YieldExpression | SpreadElement | NonNullExpression>node).expression, cbNode, result); break; case SyntaxKind.PrefixUnaryExpression: case SyntaxKind.PostfixUnaryExpression: result = reduceNode((<PrefixUnaryExpression | PostfixUnaryExpression>node).operand, cbNode, result); break; case SyntaxKind.BinaryExpression: result = reduceNode((<BinaryExpression>node).left, cbNode, result); result = reduceNode((<BinaryExpression>node).right, cbNode, result); break; case SyntaxKind.ConditionalExpression: result = reduceNode((<ConditionalExpression>node).condition, cbNode, result); result = reduceNode((<ConditionalExpression>node).whenTrue, cbNode, result); result = reduceNode((<ConditionalExpression>node).whenFalse, cbNode, result); break; case SyntaxKind.TemplateExpression: result = reduceNode((<TemplateExpression>node).head, cbNode, result); result = reduceNodes((<TemplateExpression>node).templateSpans, cbNodes, result); break; case SyntaxKind.ClassExpression: result = reduceNodes((<ClassExpression>node).modifiers, cbNodes, result); result = reduceNode((<ClassExpression>node).name, cbNode, result); result = reduceNodes((<ClassExpression>node).typeParameters, cbNodes, result); result = reduceNodes((<ClassExpression>node).heritageClauses, cbNodes, result); result = reduceNodes((<ClassExpression>node).members, cbNodes, result); break; case SyntaxKind.ExpressionWithTypeArguments: result = reduceNode((<ExpressionWithTypeArguments>node).expression, cbNode, result); result = reduceNodes((<ExpressionWithTypeArguments>node).typeArguments, cbNodes, result); break; case SyntaxKind.AsExpression: result = reduceNode((<AsExpression>node).expression, cbNode, result); result = reduceNode((<AsExpression>node).type, cbNode, result); break; // Misc case SyntaxKind.TemplateSpan: result = reduceNode((<TemplateSpan>node).expression, cbNode, result); result = reduceNode((<TemplateSpan>node).literal, cbNode, result); break; // Element case SyntaxKind.Block: result = reduceNodes((<Block>node).statements, cbNodes, result); break; case SyntaxKind.VariableStatement: result = reduceNodes((<VariableStatement>node).modifiers, cbNodes, result); result = reduceNode((<VariableStatement>node).declarationList, cbNode, result); break; case SyntaxKind.ExpressionStatement: result = reduceNode((<ExpressionStatement>node).expression, cbNode, result); break; case SyntaxKind.IfStatement: result = reduceNode((<IfStatement>node).expression, cbNode, result); result = reduceNode((<IfStatement>node).thenStatement, cbNode, result); result = reduceNode((<IfStatement>node).elseStatement, cbNode, result); break; case SyntaxKind.DoStatement: result = reduceNode((<DoStatement>node).statement, cbNode, result); result = reduceNode((<DoStatement>node).expression, cbNode, result); break; case SyntaxKind.WhileStatement: case SyntaxKind.WithStatement: result = reduceNode((<WhileStatement | WithStatement>node).expression, cbNode, result); result = reduceNode((<WhileStatement | WithStatement>node).statement, cbNode, result); break; case SyntaxKind.ForStatement: result = reduceNode((<ForStatement>node).initializer, cbNode, result); result = reduceNode((<ForStatement>node).condition, cbNode, result); result = reduceNode((<ForStatement>node).incrementor, cbNode, result); result = reduceNode((<ForStatement>node).statement, cbNode, result); break; case SyntaxKind.ForInStatement: case SyntaxKind.ForOfStatement: result = reduceNode((<ForInOrOfStatement>node).initializer, cbNode, result); result = reduceNode((<ForInOrOfStatement>node).expression, cbNode, result); result = reduceNode((<ForInOrOfStatement>node).statement, cbNode, result); break; case SyntaxKind.ReturnStatement: case SyntaxKind.ThrowStatement: result = reduceNode((<ReturnStatement>node).expression, cbNode, result); break; case SyntaxKind.SwitchStatement: result = reduceNode((<SwitchStatement>node).expression, cbNode, result); result = reduceNode((<SwitchStatement>node).caseBlock, cbNode, result); break; case SyntaxKind.LabeledStatement: result = reduceNode((<LabeledStatement>node).label, cbNode, result); result = reduceNode((<LabeledStatement>node).statement, cbNode, result); break; case SyntaxKind.TryStatement: result = reduceNode((<TryStatement>node).tryBlock, cbNode, result); result = reduceNode((<TryStatement>node).catchClause, cbNode, result); result = reduceNode((<TryStatement>node).finallyBlock, cbNode, result); break; case SyntaxKind.VariableDeclaration: result = reduceNode((<VariableDeclaration>node).name, cbNode, result); result = reduceNode((<VariableDeclaration>node).type, cbNode, result); result = reduceNode((<VariableDeclaration>node).initializer, cbNode, result); break; case SyntaxKind.VariableDeclarationList: result = reduceNodes((<VariableDeclarationList>node).declarations, cbNodes, result); break; case SyntaxKind.FunctionDeclaration: result = reduceNodes((<FunctionDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<FunctionDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<FunctionDeclaration>node).name, cbNode, result); result = reduceNodes((<FunctionDeclaration>node).typeParameters, cbNodes, result); result = reduceNodes((<FunctionDeclaration>node).parameters, cbNodes, result); result = reduceNode((<FunctionDeclaration>node).type, cbNode, result); result = reduceNode((<FunctionDeclaration>node).body, cbNode, result); break; case SyntaxKind.ClassDeclaration: result = reduceNodes((<ClassDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<ClassDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<ClassDeclaration>node).name, cbNode, result); result = reduceNodes((<ClassDeclaration>node).typeParameters, cbNodes, result); result = reduceNodes((<ClassDeclaration>node).heritageClauses, cbNodes, result); result = reduceNodes((<ClassDeclaration>node).members, cbNodes, result); break; case SyntaxKind.EnumDeclaration: result = reduceNodes((<EnumDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<EnumDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<EnumDeclaration>node).name, cbNode, result); result = reduceNodes((<EnumDeclaration>node).members, cbNodes, result); break; case SyntaxKind.ModuleDeclaration: result = reduceNodes((<ModuleDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<ModuleDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<ModuleDeclaration>node).name, cbNode, result); result = reduceNode((<ModuleDeclaration>node).body, cbNode, result); break; case SyntaxKind.ModuleBlock: result = reduceNodes((<ModuleBlock>node).statements, cbNodes, result); break; case SyntaxKind.CaseBlock: result = reduceNodes((<CaseBlock>node).clauses, cbNodes, result); break; case SyntaxKind.ImportEqualsDeclaration: result = reduceNodes((<ImportEqualsDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<ImportEqualsDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<ImportEqualsDeclaration>node).name, cbNode, result); result = reduceNode((<ImportEqualsDeclaration>node).moduleReference, cbNode, result); break; case SyntaxKind.ImportDeclaration: result = reduceNodes((<ImportDeclaration>node).decorators, cbNodes, result); result = reduceNodes((<ImportDeclaration>node).modifiers, cbNodes, result); result = reduceNode((<ImportDeclaration>node).importClause, cbNode, result); result = reduceNode((<ImportDeclaration>node).moduleSpecifier, cbNode, result); break; case SyntaxKind.ImportClause: result = reduceNode((<ImportClause>node).name, cbNode, result); result = reduceNode((<ImportClause>node).namedBindings, cbNode, result); break; case SyntaxKind.NamespaceImport: result = reduceNode((<NamespaceImport>node).name, cbNode, result); break; case SyntaxKind.NamedImports: case SyntaxKind.NamedExports: result = reduceNodes((<NamedImports | NamedExports>node).elements, cbNodes, result); break; case SyntaxKind.ImportSpecifier: case SyntaxKind.ExportSpecifier: result = reduceNode((<ImportSpecifier | ExportSpecifier>node).propertyName, cbNode, result); result = reduceNode((<ImportSpecifier | ExportSpecifier>node).name, cbNode, result); break; case SyntaxKind.ExportAssignment: result = reduceLeft((<ExportAssignment>node).decorators, cbNode, result); result = reduceLeft((<ExportAssignment>node).modifiers, cbNode, result); result = reduceNode((<ExportAssignment>node).expression, cbNode, result); break; case SyntaxKind.ExportDeclaration: result = reduceLeft((<ExportDeclaration>node).decorators, cbNode, result); result = reduceLeft((<ExportDeclaration>node).modifiers, cbNode, result); result = reduceNode((<ExportDeclaration>node).exportClause, cbNode, result); result = reduceNode((<ExportDeclaration>node).moduleSpecifier, cbNode, result); break; // Module references case SyntaxKind.ExternalModuleReference: result = reduceNode((<ExternalModuleReference>node).expression, cbNode, result); break; // JSX case SyntaxKind.JsxElement: result = reduceNode((<JsxElement>node).openingElement, cbNode, result); result = reduceLeft((<JsxElement>node).children, cbNode, result); result = reduceNode((<JsxElement>node).closingElement, cbNode, result); break; case SyntaxKind.JsxFragment: result = reduceNode((<JsxFragment>node).openingFragment, cbNode, result); result = reduceLeft((<JsxFragment>node).children, cbNode, result); result = reduceNode((<JsxFragment>node).closingFragment, cbNode, result); break; case SyntaxKind.JsxSelfClosingElement: case SyntaxKind.JsxOpeningElement: result = reduceNode((<JsxSelfClosingElement | JsxOpeningElement>node).tagName, cbNode, result); result = reduceNodes((<JsxSelfClosingElement | JsxOpeningElement>node).typeArguments, cbNode, result); result = reduceNode((<JsxSelfClosingElement | JsxOpeningElement>node).attributes, cbNode, result); break; case SyntaxKind.JsxAttributes: result = reduceNodes((<JsxAttributes>node).properties, cbNodes, result); break; case SyntaxKind.JsxClosingElement: result = reduceNode((<JsxClosingElement>node).tagName, cbNode, result); break; case SyntaxKind.JsxAttribute: result = reduceNode((<JsxAttribute>node).name, cbNode, result); result = reduceNode((<JsxAttribute>node).initializer, cbNode, result); break; case SyntaxKind.JsxSpreadAttribute: result = reduceNode((<JsxSpreadAttribute>node).expression, cbNode, result); break; case SyntaxKind.JsxExpression: result = reduceNode((<JsxExpression>node).expression, cbNode, result); break; // Clauses case SyntaxKind.CaseClause: result = reduceNode((<CaseClause>node).expression, cbNode, result); // falls through case SyntaxKind.DefaultClause: result = reduceNodes((<CaseClause | DefaultClause>node).statements, cbNodes, result); break; case SyntaxKind.HeritageClause: result = reduceNodes((<HeritageClause>node).types, cbNodes, result); break; case SyntaxKind.CatchClause: result = reduceNode((<CatchClause>node).variableDeclaration, cbNode, result); result = reduceNode((<CatchClause>node).block, cbNode, result); break; // Property assignments case SyntaxKind.PropertyAssignment: result = reduceNode((<PropertyAssignment>node).name, cbNode, result); result = reduceNode((<PropertyAssignment>node).initializer, cbNode, result); break; case SyntaxKind.ShorthandPropertyAssignment: result = reduceNode((<ShorthandPropertyAssignment>node).name, cbNode, result); result = reduceNode((<ShorthandPropertyAssignment>node).objectAssignmentInitializer, cbNode, result); break; case SyntaxKind.SpreadAssignment: result = reduceNode((<SpreadAssignment>node).expression, cbNode, result); break; // Enum case SyntaxKind.EnumMember: result = reduceNode((<EnumMember>node).name, cbNode, result); result = reduceNode((<EnumMember>node).initializer, cbNode, result); break; // Top-level nodes case SyntaxKind.SourceFile: result = reduceNodes((<SourceFile>node).statements, cbNodes, result); break; // Transformation nodes case SyntaxKind.PartiallyEmittedExpression: result = reduceNode((<PartiallyEmittedExpression>node).expression, cbNode, result); break; case SyntaxKind.CommaListExpression: result = reduceNodes((<CommaListExpression>node).elements, cbNodes, result); break; default: break; } return result; } /** * Merges generated lexical declarations into a new statement list. */ export function mergeLexicalEnvironment(statements: NodeArray<Statement>, declarations: ReadonlyArray<Statement> | undefined): NodeArray<Statement>; /** * Appends generated lexical declarations to an array of statements. */ export function mergeLexicalEnvironment(statements: Statement[], declarations: ReadonlyArray<Statement> | undefined): Statement[]; export function mergeLexicalEnvironment(statements: Statement[] | NodeArray<Statement>, declarations: ReadonlyArray<Statement> | undefined) { if (!some(declarations)) { return statements; } return isNodeArray(statements) ? setTextRange(createNodeArray(insertStatementsAfterStandardPrologue(statements.slice(), declarations)), statements) : insertStatementsAfterStandardPrologue(statements, declarations); } /** * Lifts a NodeArray containing only Statement nodes to a block. * * @param nodes The NodeArray. */ export function liftToBlock(nodes: ReadonlyArray<Node>): Statement { Debug.assert(every(nodes, isStatement), "Cannot lift nodes to a Block."); return <Statement>singleOrUndefined(nodes) || createBlock(<NodeArray<Statement>>nodes); } /** * Aggregates the TransformFlags for a Node and its subtree. */ export function aggregateTransformFlags<T extends Node>(node: T): T { aggregateTransformFlagsForNode(node); return node; } /** * Aggregates the TransformFlags for a Node and its subtree. The flags for the subtree are * computed first, then the transform flags for the current node are computed from the subtree * flags and the state of the current node. Finally, the transform flags of the node are * returned, excluding any flags that should not be included in its parent node's subtree * flags. */ function aggregateTransformFlagsForNode(node: Node): TransformFlags { if (node === undefined) { return TransformFlags.None; } if (node.transformFlags & TransformFlags.HasComputedFlags) { return node.transformFlags & ~getTransformFlagsSubtreeExclusions(node.kind); } const subtreeFlags = aggregateTransformFlagsForSubtree(node); return computeTransformFlagsForNode(node, subtreeFlags); } function aggregateTransformFlagsForNodeArray(nodes: NodeArray<Node>): TransformFlags { if (nodes === undefined) { return TransformFlags.None; } let subtreeFlags = TransformFlags.None; let nodeArrayFlags = TransformFlags.None; for (const node of nodes) { subtreeFlags |= aggregateTransformFlagsForNode(node); nodeArrayFlags |= node.transformFlags & ~TransformFlags.HasComputedFlags; } nodes.transformFlags = nodeArrayFlags | TransformFlags.HasComputedFlags; return subtreeFlags; } /** * Aggregates the transform flags for the subtree of a node. */ function aggregateTransformFlagsForSubtree(node: Node): TransformFlags { // We do not transform ambient declarations or types, so there is no need to // recursively aggregate transform flags. if (hasModifier(node, ModifierFlags.Ambient) || (isTypeNode(node) && node.kind !== SyntaxKind.ExpressionWithTypeArguments)) { return TransformFlags.None; } // Aggregate the transform flags of each child. return reduceEachChild(node, TransformFlags.None, aggregateTransformFlagsForChildNode, aggregateTransformFlagsForChildNodes); } /** * Aggregates the TransformFlags of a child node with the TransformFlags of its * siblings. */ function aggregateTransformFlagsForChildNode(transformFlags: TransformFlags, node: Node): TransformFlags { return transformFlags | aggregateTransformFlagsForNode(node); } function aggregateTransformFlagsForChildNodes(transformFlags: TransformFlags, nodes: NodeArray<Node>): TransformFlags { return transformFlags | aggregateTransformFlagsForNodeArray(nodes); } export namespace Debug { let isDebugInfoEnabled = false; export function failBadSyntaxKind(node: Node, message?: string): never { return fail( `${message || "Unexpected node."}\r\nNode ${formatSyntaxKind(node.kind)} was unexpected.`, failBadSyntaxKind); } export const assertEachNode = shouldAssert(AssertionLevel.Normal) ? (nodes: Node[], test: (node: Node) => boolean, message?: string): void => assert( test === undefined || every(nodes, test), message || "Unexpected node.", () => `Node array did not pass test '${getFunctionName(test)}'.`, assertEachNode) : noop; export const assertNode = shouldAssert(AssertionLevel.Normal) ? (node: Node | undefined, test: ((node: Node | undefined) => boolean) | undefined, message?: string): void => assert( test === undefined || test(node), message || "Unexpected node.", () => `Node ${formatSyntaxKind(node!.kind)} did not pass test '${getFunctionName(test!)}'.`, assertNode) : noop; export const assertOptionalNode = shouldAssert(AssertionLevel.Normal) ? (node: Node, test: (node: Node) => boolean, message?: string): void => assert( test === undefined || node === undefined || test(node), message || "Unexpected node.", () => `Node ${formatSyntaxKind(node.kind)} did not pass test '${getFunctionName(test)}'.`, assertOptionalNode) : noop; export const assertOptionalToken = shouldAssert(AssertionLevel.Normal) ? (node: Node, kind: SyntaxKind, message?: string): void => assert( kind === undefined || node === undefined || node.kind === kind, message || "Unexpected node.", () => `Node ${formatSyntaxKind(node.kind)} was not a '${formatSyntaxKind(kind)}' token.`, assertOptionalToken) : noop; export const assertMissingNode = shouldAssert(AssertionLevel.Normal) ? (node: Node, message?: string): void => assert( node === undefined, message || "Unexpected node.", () => `Node ${formatSyntaxKind(node.kind)} was unexpected'.`, assertMissingNode) : noop; /** * Injects debug information into frequently used types. */ export function enableDebugInfo() { if (isDebugInfoEnabled) return; // Add additional properties in debug mode to assist with debugging. Object.defineProperties(objectAllocator.getSymbolConstructor().prototype, { __debugFlags: { get(this: Symbol) { return formatSymbolFlags(this.flags); } } }); Object.defineProperties(objectAllocator.getTypeConstructor().prototype, { __debugFlags: { get(this: Type) { return formatTypeFlags(this.flags); } }, __debugObjectFlags: { get(this: Type) { return this.flags & TypeFlags.Object ? formatObjectFlags((<ObjectType>this).objectFlags) : ""; } }, __debugTypeToString: { value(this: Type) { return this.checker.typeToString(this); } }, }); const nodeConstructors = [ objectAllocator.getNodeConstructor(), objectAllocator.getIdentifierConstructor(), objectAllocator.getTokenConstructor(), objectAllocator.getSourceFileConstructor() ]; for (const ctor of nodeConstructors) { if (!ctor.prototype.hasOwnProperty("__debugKind")) { Object.defineProperties(ctor.prototype, { __debugKind: { get(this: Node) { return formatSyntaxKind(this.kind); } }, __debugModifierFlags: { get(this: Node) { return formatModifierFlags(getModifierFlagsNoCache(this)); } }, __debugTransformFlags: { get(this: Node) { return formatTransformFlags(this.transformFlags); } }, __debugEmitFlags: { get(this: Node) { return formatEmitFlags(getEmitFlags(this)); } }, __debugGetText: { value(this: Node, includeTrivia?: boolean) { if (nodeIsSynthesized(this)) return ""; const parseNode = getParseTreeNode(this); const sourceFile = parseNode && getSourceFileOfNode(parseNode); return sourceFile ? getSourceTextOfNodeFromSourceFile(sourceFile, parseNode, includeTrivia) : ""; } } }); } } isDebugInfoEnabled = true; } } }<|fim▁end|>
export function visitEachChild(node: Node | undefined, visitor: Visitor, context: TransformationContext, nodesVisitor = visitNodes, tokenVisitor?: Visitor): Node | undefined { if (node === undefined) { return undefined;
<|file_name|>reject.js<|end_file_name|><|fim▁begin|>const filter = require('./filter') module.exports = (collection, test, callback) => { return filter(collection, function(value, index, collection) { return !test(value, index, collection)<|fim▁hole|><|fim▁end|>
}, callback) }
<|file_name|>lcd_display.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- # # Urwid LCD display module # Copyright (C) 2010 Ian Ward # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA # # Urwid web site: http://excess.org/urwid/ from .display_common import BaseScreen import time class LCDScreen(BaseScreen): def set_terminal_properties(self, colors=None, bright_is_bold=None, has_underline=None): pass def set_mouse_tracking(self, enable=True): pass def start(self): pass def stop(self): pass def set_input_timeouts(self, *args): pass def reset_default_terminal_palette(self, *args): pass def run_wrapper(self,fn): return fn() def draw_screen(self, xxx_todo_changeme, r ): (cols, rows) = xxx_todo_changeme pass def clear(self): pass def get_cols_rows(self): return self.DISPLAY_SIZE class CFLCDScreen(LCDScreen): """ Common methods for Crystal Fontz LCD displays """ KEYS = [None, # no key with code 0 'up_press', 'down_press', 'left_press', 'right_press', 'enter_press', 'exit_press', 'up_release', 'down_release', 'left_release', 'right_release', 'enter_release', 'exit_release', 'ul_press', 'ur_press', 'll_press', 'lr_press', 'ul_release', 'ur_release', 'll_release', 'lr_release'] CMD_PING = 0 CMD_VERSION = 1 CMD_CLEAR = 6 CMD_CGRAM = 9 CMD_CURSOR_POSITION = 11 # data = [col, row] CMD_CURSOR_STYLE = 12 # data = [style (0-4)] CMD_LCD_CONTRAST = 13 # data = [contrast (0-255)] CMD_BACKLIGHT = 14 # data = [power (0-100)] CMD_LCD_DATA = 31 # data = [col, row] + text CMD_GPO = 34 # data = [pin(0-12), value(0-100)] # sent from device CMD_KEY_ACTIVITY = 0x80 CMD_ACK = 0x40 # in high two bits ie. & 0xc0 CURSOR_NONE = 0 CURSOR_BLINKING_BLOCK = 1 CURSOR_UNDERSCORE = 2 CURSOR_BLINKING_BLOCK_UNDERSCORE = 3 CURSOR_INVERTING_BLINKING_BLOCK = 4 MAX_PACKET_DATA_LENGTH = 22 colors = 1 has_underline = False def __init__(self, device_path, baud): """ device_path -- eg. '/dev/ttyUSB0' baud -- baud rate """ super(CFLCDScreen, self).__init__() self.device_path = device_path from serial import Serial self._device = Serial(device_path, baud, timeout=0) self._unprocessed = "" @classmethod def get_crc(cls, buf): # This seed makes the output of this shift based algorithm match # the table based algorithm. The center 16 bits of the 32-bit # "newCRC" are used for the CRC. The MSB of the lower byte is used # to see what bit was shifted out of the center 16 bit CRC # accumulator ("carry flag analog"); newCRC = 0x00F32100 for byte in buf: # Push this byte’s bits through a software # implementation of a hardware shift & xor. for bit_count in range(8): # Shift the CRC accumulator newCRC >>= 1 # The new MSB of the CRC accumulator comes # from the LSB of the current data byte. if ord(byte) & (0x01 << bit_count): newCRC |= 0x00800000 # If the low bit of the current CRC accumulator was set # before the shift, then we need to XOR the accumulator # with the polynomial (center 16 bits of 0x00840800) if newCRC & 0x00000080: newCRC ^= 0x00840800 # All the data has been done. Do 16 more bits of 0 data. for bit_count in range(16): # Shift the CRC accumulator newCRC >>= 1 # If the low bit of the current CRC accumulator was set # before the shift we need to XOR the accumulator with # 0x00840800. if newCRC & 0x00000080: newCRC ^= 0x00840800 # Return the center 16 bits, making this CRC match the one’s # complement that is sent in the packet. return ((~newCRC)>>8) & 0xffff def _send_packet(self, command, data): """ low-level packet sending. Following the protocol requires waiting for ack packet between sending each packet to the device. """ buf = chr(command) + chr(len(data)) + data crc = self.get_crc(buf) buf = buf + chr(crc & 0xff) + chr(crc >> 8) self._device.write(buf) def _read_packet(self): """ low-level packet reading. returns (command/report code, data) or None This method stored data read and tries to resync when bad data is received. """ # pull in any new data available self._unprocessed = self._unprocessed + self._device.read() while True: try: command, data, unprocessed = self._parse_data(self._unprocessed) self._unprocessed = unprocessed return command, data except self.MoreDataRequired: return except self.InvalidPacket: # throw out a byte and try to parse again self._unprocessed = self._unprocessed[1:] class InvalidPacket(Exception): pass class MoreDataRequired(Exception): pass @classmethod def _parse_data(cls, data): """ Try to read a packet from the start of data, returning (command/report code, packet_data, remaining_data) or raising InvalidPacket or MoreDataRequired """ if len(data) < 2: raise cls.MoreDataRequired command = ord(data[0]) plen = ord(data[1]) if plen > cls.MAX_PACKET_DATA_LENGTH: raise cls.InvalidPacket("length value too large") if len(data) < plen + 4: raise cls.MoreDataRequired crc = cls.get_crc(data[:2 + plen]) pcrc = ord(data[2 + plen]) + (ord(data[3 + plen]) << 8 ) if crc != pcrc: raise cls.InvalidPacket("CRC doesn't match") return (command, data[2:2 + plen], data[4 + plen:]) class KeyRepeatSimulator(object): """ Provide simulated repeat key events when given press and release events. If two or more keys are pressed disable repeating until all keys are released. """ def __init__(self, repeat_delay, repeat_next): """ repeat_delay -- seconds to wait before starting to repeat keys repeat_next -- time between each repeated key """ self.repeat_delay = repeat_delay self.repeat_next = repeat_next self.pressed = {} self.multiple_pressed = False def press(self, key): if self.pressed: self.multiple_pressed = True self.pressed[key] = time.time() def release(self, key): if key not in self.pressed: return # ignore extra release events<|fim▁hole|> if not self.pressed: self.multiple_pressed = False def next_event(self): """ Return (remaining, key) where remaining is the number of seconds (float) until the key repeat event should be sent, or None if no events are pending. """ if len(self.pressed) != 1 or self.multiple_pressed: return for key in self.pressed: return max(0, self.pressed[key] + self.repeat_delay - time.time()), key def sent_event(self): """ Cakk this method when you have sent a key repeat event so the timer will be reset for the next event """ if len(self.pressed) != 1: return # ignore event that shouldn't have been sent for key in self.pressed: self.pressed[key] = ( time.time() - self.repeat_delay + self.repeat_next) return class CF635Screen(CFLCDScreen): """ Crystal Fontz 635 display 20x4 character display + cursor no foreground/background colors or settings supported see CGROM for list of close unicode matches to characters available 6 button input up, down, left, right, enter (check mark), exit (cross) """ DISPLAY_SIZE = (20, 4) # ① through ⑧ are programmable CGRAM (chars 0-7, repeated at 8-15) # double arrows (⇑⇓) appear as double arrowheads (chars 18, 19) # ⑴ resembles a bell # ⑵ resembles a filled-in "Y" # ⑶ is the letters "Pt" together # partial blocks (▇▆▄▃▁) are actually shorter versions of (▉▋▌▍▏) # both groups are intended to draw horizontal bars with pixel # precision, use ▇*[▆▄▃▁]? for a thin bar or ▉*[▋▌▍▏]? for a thick bar CGROM = ( "①②③④⑤⑥⑦⑧①②③④⑤⑥⑦⑧" "►◄⇑⇓«»↖↗↙↘▲▼↲^ˇ█" " !\"#¤%&'()*+,-./" "0123456789:;<=>?" "¡ABCDEFGHIJKLMNO" "PQRSTUVWXYZÄÖÑܧ" "¿abcdefghijklmno" "pqrstuvwxyzäöñüà" "⁰¹²³⁴⁵⁶⁷⁸⁹½¼±≥≤μ" "♪♫⑴♥♦⑵⌜⌟“”()αɛδ∞" "@£$¥èéùìòÇᴾØøʳÅå" "⌂¢ΦτλΩπΨΣθΞ♈ÆæßÉ" "ΓΛΠϒ_ÈÊêçğŞşİι~◊" "▇▆▄▃▁ƒ▉▋▌▍▏⑶◽▪↑→" "↓←ÁÍÓÚÝáíóúýÔôŮů" "ČĔŘŠŽčĕřšž[\]{|}") cursor_style = CFLCDScreen.CURSOR_INVERTING_BLINKING_BLOCK def __init__(self, device_path, baud=115200, repeat_delay=0.5, repeat_next=0.125, key_map=['up', 'down', 'left', 'right', 'enter', 'esc']): """ device_path -- eg. '/dev/ttyUSB0' baud -- baud rate repeat_delay -- seconds to wait before starting to repeat keys repeat_next -- time between each repeated key key_map -- the keys to send for this device's buttons """ super(CF635Screen, self).__init__(device_path, baud) self.repeat_delay = repeat_delay self.repeat_next = repeat_next self.key_repeat = KeyRepeatSimulator(repeat_delay, repeat_next) self.key_map = key_map self._last_command = None self._last_command_time = 0 self._command_queue = [] self._screen_buf = None self._previous_canvas = None self._update_cursor = False def get_input_descriptors(self): """ return the fd from our serial device so we get called on input and responses """ return [self._device.fd] def get_input_nonblocking(self): """ Return a (next_input_timeout, keys_pressed, raw_keycodes) tuple. The protocol for our device requires waiting for acks between each command, so this method responds to those as well as key press and release events. Key repeat events are simulated here as the device doesn't send any for us. raw_keycodes are the bytes of messages we received, which might not seem to have any correspondence to keys_pressed. """ input = [] raw_input = [] timeout = None while True: packet = self._read_packet() if not packet: break command, data = packet if command == self.CMD_KEY_ACTIVITY and data: d0 = ord(data[0]) if 1 <= d0 <= 12: release = d0 > 6 keycode = d0 - (release * 6) - 1 key = self.key_map[keycode] if release: self.key_repeat.release(key) else: input.append(key) self.key_repeat.press(key) raw_input.append(d0) elif command & 0xc0 == 0x40: # "ACK" if command & 0x3f == self._last_command: self._send_next_command() next_repeat = self.key_repeat.next_event() if next_repeat: timeout, key = next_repeat if not timeout: input.append(key) self.key_repeat.sent_event() timeout = None return timeout, input, [] def _send_next_command(self): """ send out the next command in the queue """ if not self._command_queue: self._last_command = None return command, data = self._command_queue.pop(0) self._send_packet(command, data) self._last_command = command # record command for ACK self._last_command_time = time.time() def queue_command(self, command, data): self._command_queue.append((command, data)) # not waiting? send away! if self._last_command is None: self._send_next_command() def draw_screen(self, size, canvas): assert size == self.DISPLAY_SIZE if self._screen_buf: osb = self._screen_buf else: osb = [] sb = [] y = 0 for row in canvas.content(): text = [] for a, cs, run in row: text.append(run) if not osb or osb[y] != text: self.queue_command(self.CMD_LCD_DATA, chr(0) + chr(y) + "".join(text)) sb.append(text) y += 1 if (self._previous_canvas and self._previous_canvas.cursor == canvas.cursor and (not self._update_cursor or not canvas.cursor)): pass elif canvas.cursor is None: self.queue_command(self.CMD_CURSOR_STYLE, chr(self.CURSOR_NONE)) else: x, y = canvas.cursor self.queue_command(self.CMD_CURSOR_POSITION, chr(x) + chr(y)) self.queue_command(self.CMD_CURSOR_STYLE, chr(self.cursor_style)) self._update_cursor = False self._screen_buf = sb self._previous_canvas = canvas def program_cgram(self, index, data): """ Program character data. Characters available as chr(0) through chr(7), and repeated as chr(8) through chr(15). index -- 0 to 7 index of character to program data -- list of 8, 6-bit integer values top to bottom with MSB on the left side of the character. """ assert 0 <= index <= 7 assert len(data) == 8 self.queue_command(self.CMD_CGRAM, chr(index) + "".join([chr(x) for x in data])) def set_cursor_style(self, style): """ style -- CURSOR_BLINKING_BLOCK, CURSOR_UNDERSCORE, CURSOR_BLINKING_BLOCK_UNDERSCORE or CURSOR_INVERTING_BLINKING_BLOCK """ assert 1 <= style <= 4 self.cursor_style = style self._update_cursor = True def set_backlight(self, value): """ Set backlight brightness value -- 0 to 100 """ assert 0 <= value <= 100 self.queue_command(self.CMD_BACKLIGHT, chr(value)) def set_lcd_contrast(self, value): """ value -- 0 to 255 """ assert 0 <= value <= 255 self.queue_command(self.CMD_LCD_CONTRAST, chr(value)) def set_led_pin(self, led, rg, value): """ led -- 0 to 3 rg -- 0 for red, 1 for green value -- 0 to 100 """ assert 0 <= led <= 3 assert rg in (0, 1) assert 0 <= value <= 100 self.queue_command(self.CMD_GPO, chr(12 - 2 * led - rg) + chr(value))<|fim▁end|>
del self.pressed[key]
<|file_name|>RawOuter.java<|end_file_name|><|fim▁begin|>package com.coekie.gentyref.factory; <|fim▁hole|>@SuppressWarnings("rawtypes") public class RawOuter extends GenericOuter {}<|fim▁end|>
<|file_name|>DeletionBuilderTest.java<|end_file_name|><|fim▁begin|>package net.zer0bandwidth.android.lib.content.querybuilder; import android.content.ContentResolver; import android.support.test.runner.AndroidJUnit4; import android.test.ProviderTestCase2; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; import static junit.framework.Assert.assertNull; /** * Exercises {@link DeletionBuilder}. * @since zer0bandwidth-net/android 0.1.7 (#39) */ @RunWith( AndroidJUnit4.class ) public class DeletionBuilderTest extends ProviderTestCase2<MockContentProvider> { protected QueryBuilderTest.MockContext m_mockery = new QueryBuilderTest.MockContext() ; @SuppressWarnings( "unused" ) // sAuthority is intentionally ignored public DeletionBuilderTest() { super( MockContentProvider.class, QueryBuilderTest.MockContext.AUTHORITY ) ; } @Override @Before public void setUp() throws Exception { super.setUp() ; } /** Exercises {@link DeletionBuilder#deleteAll} */ @Test public void testDeleteAll() { DeletionBuilder qb = new DeletionBuilder( m_mockery.ctx, m_mockery.uri ) ; qb.m_sExplicitWhereFormat = "qarnflarglebarg" ; qb.m_asExplicitWhereParams = new String[] { "foo", "bar", "baz" } ; qb.deleteAll() ;<|fim▁hole|> /** Exercises {@link DeletionBuilder#executeQuery}. */ @Test public void testExecuteQuery() throws Exception // Any uncaught exception is a failure. { ContentResolver rslv = this.getMockContentResolver() ; int nDeleted = QueryBuilder.deleteFrom( rslv, m_mockery.uri ).execute(); assertEquals( MockContentProvider.EXPECTED_DELETE_COUNT, nDeleted ) ; } }<|fim▁end|>
assertNull( qb.m_sExplicitWhereFormat ) ; assertNull( qb.m_asExplicitWhereParams ) ; }
<|file_name|>router_policy_test.py<|end_file_name|><|fim▁begin|># # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import json from qpid_dispatch_internal.policy.policy_util import HostAddr, is_ipv6_enabled from qpid_dispatch_internal.policy.policy_util import HostStruct from qpid_dispatch_internal.policy.policy_util import PolicyError from qpid_dispatch_internal.policy.policy_util import PolicyAppConnectionMgr from qpid_dispatch_internal.policy.policy_local import PolicyLocal from system_test import unittest from system_test import TestCase, main_module <|fim▁hole|> class PolicyHostAddrTest(TestCase): def expect_deny(self, badhostname, msg): denied = False try: xxx = HostStruct(badhostname) except PolicyError: denied = True self.assertTrue(denied, ("%s" % msg)) def check_hostaddr_match(self, tHostAddr, tString, expectOk=True): # check that the string is a match for the addr # check that the internal struct version matches, too ha = HostStruct(tString) if expectOk: self.assertTrue(tHostAddr.match_str(tString)) self.assertTrue(tHostAddr.match_bin(ha)) else: self.assertFalse(tHostAddr.match_str(tString)) self.assertFalse(tHostAddr.match_bin(ha)) def test_policy_hostaddr_ipv4(self): # Create simple host and range aaa = HostAddr("192.168.1.1") bbb = HostAddr("1.1.1.1,1.1.1.255") # Verify host and range self.check_hostaddr_match(aaa, "192.168.1.1") self.check_hostaddr_match(aaa, "1.1.1.1", False) self.check_hostaddr_match(aaa, "192.168.1.2", False) self.check_hostaddr_match(bbb, "1.1.1.1") self.check_hostaddr_match(bbb, "1.1.1.254") self.check_hostaddr_match(bbb, "1.1.1.0", False) self.check_hostaddr_match(bbb, "1.1.2.0", False) def test_policy_hostaddr_ipv6(self): if not is_ipv6_enabled(): self.skipTest("System IPv6 support is not available") # Create simple host and range aaa = HostAddr("::1") bbb = HostAddr("::1,::ffff") ccc = HostAddr("ffff::0,ffff:ffff::0") # Verify host and range self.check_hostaddr_match(aaa, "::1") self.check_hostaddr_match(aaa, "::2", False) self.check_hostaddr_match(aaa, "ffff:ffff::0", False) self.check_hostaddr_match(bbb, "::1") self.check_hostaddr_match(bbb, "::fffe") self.check_hostaddr_match(bbb, "::1:0", False) self.check_hostaddr_match(bbb, "ffff::0", False) self.check_hostaddr_match(ccc, "ffff::1") self.check_hostaddr_match(ccc, "ffff:fffe:ffff:ffff::ffff") self.check_hostaddr_match(ccc, "ffff:ffff::1", False) self.check_hostaddr_match(ccc, "ffff:ffff:ffff:ffff::ffff", False) def test_policy_hostaddr_ipv4_wildcard(self): aaa = HostAddr("*") self.check_hostaddr_match(aaa, "0.0.0.0") self.check_hostaddr_match(aaa, "127.0.0.1") self.check_hostaddr_match(aaa, "255.254.253.252") def test_policy_hostaddr_ipv6_wildcard(self): if not is_ipv6_enabled(): self.skipTest("System IPv6 support is not available") aaa = HostAddr("*") self.check_hostaddr_match(aaa, "::0") self.check_hostaddr_match(aaa, "::1") self.check_hostaddr_match(aaa, "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff") def test_policy_malformed_hostaddr_ipv4(self): self.expect_deny("0.0.0.0.0", "Name or service not known") self.expect_deny("1.1.1.1,2.2.2.2,3.3.3.3", "arg count") self.expect_deny("9.9.9.9,8.8.8.8", "a > b") def test_policy_malformed_hostaddr_ipv6(self): if not is_ipv6_enabled(): self.skipTest("System IPv6 support is not available") self.expect_deny("1::2::3", "Name or service not known") self.expect_deny("::1,::2,::3", "arg count") self.expect_deny("0:ff:0,0:fe:ffff:ffff::0", "a > b") class QpidDispatch: def qd_dispatch_policy_c_counts_alloc(self): return 100 def qd_dispatch_policy_c_counts_refresh(self, cstats, entitymap): pass class MockAgent: def __init__(self): self.qd = QpidDispatch() def add_implementation(self, entity, cfg_obj_name): pass class MockPolicyManager: def __init__(self): self.agent = MockAgent() self.logs = [] def log_debug(self, text): print("DEBUG: %s" % text) self.logs.append(text) def log_info(self, text): print("INFO: %s" % text) self.logs.append(text) def log_trace(self, text): print("TRACE: %s" % text) self.logs.append(text) def log_error(self, text): print("ERROR: %s" % text) self.logs.append(text) def log_warning(self, text): print("WARNING: %s" % text) self.logs.append(text) def get_agent(self): return self.agent class PolicyFile(TestCase): manager = MockPolicyManager() policy = PolicyLocal(manager) policy.test_load_config() def test_policy1_test_zeke_ok(self): p1 = PolicyFile.policy.lookup_user('zeke', '192.168.100.5', 'photoserver', '192.168.100.5:33333', 1) self.assertTrue(p1 == 'test') upolicy = {} self.assertTrue( PolicyFile.policy.lookup_settings('photoserver', p1, upolicy) ) self.assertTrue(upolicy['maxFrameSize'] == 444444) self.assertTrue(upolicy['maxMessageSize'] == 444444) self.assertTrue(upolicy['maxSessionWindow'] == 444444) self.assertTrue(upolicy['maxSessions'] == 4) self.assertTrue(upolicy['maxSenders'] == 44) self.assertTrue(upolicy['maxReceivers'] == 44) self.assertTrue(upolicy['allowAnonymousSender']) self.assertTrue(upolicy['allowDynamicSource']) self.assertTrue(upolicy['targets'] == 'a,private,') self.assertTrue(upolicy['sources'] == 'a,private,') def test_policy1_test_zeke_bad_IP(self): self.assertTrue( PolicyFile.policy.lookup_user('zeke', '10.18.0.1', 'photoserver', "connid", 2) == '') self.assertTrue( PolicyFile.policy.lookup_user('zeke', '72.135.2.9', 'photoserver', "connid", 3) == '') self.assertTrue( PolicyFile.policy.lookup_user('zeke', '127.0.0.1', 'photoserver', "connid", 4) == '') def test_policy1_test_zeke_bad_app(self): self.assertTrue( PolicyFile.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5) == '') def test_policy1_test_users_same_permissions(self): zname = PolicyFile.policy.lookup_user('zeke', '192.168.100.5', 'photoserver', '192.168.100.5:33333', 6) yname = PolicyFile.policy.lookup_user('ynot', '10.48.255.254', 'photoserver', '192.168.100.5:33334', 7) self.assertTrue(zname == yname) def test_policy1_lookup_unknown_application(self): upolicy = {} self.assertFalse( PolicyFile.policy.lookup_settings('unknown', 'doesntmatter', upolicy) ) def test_policy1_lookup_unknown_usergroup(self): upolicy = {} self.assertFalse( PolicyFile.policy.lookup_settings('photoserver', 'unknown', upolicy) ) class PolicyFileApplicationFallback(TestCase): manager = MockPolicyManager() policy = PolicyLocal(manager) policy.test_load_config() def test_bad_app_fallback(self): # Show that with no fallback the user cannot connect self.assertTrue( self.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5) == '') # Enable the fallback defaultVhost and show the same user can now connect self.policy.set_default_vhost('photoserver') settingsname = self.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5) self.assertTrue(settingsname == 'test') # Show that the fallback settings are returned upolicy = {} self.assertTrue( self.policy.lookup_settings('phony*app*name', settingsname, upolicy) ) self.assertTrue(upolicy['maxFrameSize'] == 444444) self.assertTrue(upolicy['maxMessageSize'] == 444444) self.assertTrue(upolicy['maxSessionWindow'] == 444444) self.assertTrue(upolicy['maxSessions'] == 4) self.assertTrue(upolicy['maxSenders'] == 44) self.assertTrue(upolicy['maxReceivers'] == 44) self.assertTrue(upolicy['allowAnonymousSender']) self.assertTrue(upolicy['allowDynamicSource']) self.assertTrue(upolicy['targets'] == 'a,private,') self.assertTrue(upolicy['sources'] == 'a,private,') # Disable fallback and show failure again self.policy.set_default_vhost('') self.assertTrue( self.policy.lookup_user('zeke', '192.168.100.5', 'galleria', "connid", 5) == '') class PolicyAppConnectionMgrTests(TestCase): def test_policy_app_conn_mgr_fail_by_total(self): stats = PolicyAppConnectionMgr(1, 2, 2) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags, None, None)) self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) self.assertTrue(len(diags) == 1) self.assertIn('application connection limit', diags[0]) def test_policy_app_conn_mgr_fail_by_user(self): stats = PolicyAppConnectionMgr(3, 1, 2) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags, None, None)) self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) self.assertTrue(len(diags) == 1) self.assertIn('per user', diags[0]) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10002', 'chuck', '10.10.10.10', diags, 2, None)) self.assertFalse(stats.can_connect('10.10.10.10:10003', 'chuck', '10.10.10.10', diags, 2, None)) def test_policy_app_conn_mgr_fail_by_hosts(self): stats = PolicyAppConnectionMgr(3, 2, 1) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags, None, None)) self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) self.assertTrue(len(diags) == 1) self.assertIn('per host', diags[0]) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10002', 'chuck', '10.10.10.10', diags, None, 2)) self.assertFalse(stats.can_connect('10.10.10.10:10003', 'chuck', '10.10.10.10', diags, None, 2)) def test_policy_app_conn_mgr_fail_by_user_hosts(self): stats = PolicyAppConnectionMgr(3, 1, 1) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags, None, None)) self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) self.assertTrue(len(diags) == 2) success = 'per user' in diags[0] or 'per user' in diags[1] self.assertTrue(success) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10002', 'chuck', '10.10.10.10', diags, 2, 2)) self.assertFalse(stats.can_connect('10.10.10.10:10003', 'chuck', '10.10.10.10', diags, 2, 2)) def test_policy_app_conn_mgr_update(self): stats = PolicyAppConnectionMgr(3, 1, 2) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags, None, None)) self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) self.assertTrue(len(diags) == 1) self.assertIn('per user', diags[0]) diags = [] stats.update(3, 2, 2) self.assertTrue(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) def test_policy_app_conn_mgr_disconnect(self): stats = PolicyAppConnectionMgr(3, 1, 2) diags = [] self.assertTrue(stats.can_connect('10.10.10.10:10000', 'chuck', '10.10.10.10', diags, None, None)) self.assertFalse(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) self.assertTrue(len(diags) == 1) self.assertIn('per user', diags[0]) diags = [] stats.disconnect("10.10.10.10:10000", 'chuck', '10.10.10.10') self.assertTrue(stats.can_connect('10.10.10.10:10001', 'chuck', '10.10.10.10', diags, None, None)) def test_policy_app_conn_mgr_create_bad_settings(self): denied = False try: stats = PolicyAppConnectionMgr(-3, 1, 2) except PolicyError: denied = True self.assertTrue(denied, "Failed to detect negative setting value.") def test_policy_app_conn_mgr_update_bad_settings(self): denied = False try: stats = PolicyAppConnectionMgr(0, 0, 0) except PolicyError: denied = True self.assertFalse(denied, "Should allow all zeros.") try: stats.update(0, -1, 0) except PolicyError: denied = True self.assertTrue(denied, "Failed to detect negative setting value.") def test_policy_app_conn_mgr_larger_counts(self): stats = PolicyAppConnectionMgr(10000, 10000, 10000) diags = [] for i in range(0, 10000): self.assertTrue(stats.can_connect('1.1.1.1:' + str(i), 'chuck', '1.1.1.1', diags, None, None)) self.assertTrue(len(diags) == 0) self.assertFalse(stats.can_connect('1.1.1.1:10000', 'chuck', '1.1.1.1', diags, None, None)) self.assertTrue(len(diags) == 3) self.assertTrue(stats.connections_active == 10000) self.assertTrue(stats.connections_approved == 10000) self.assertTrue(stats.connections_denied == 1) class PolicyAliases(TestCase): # def test_AliasesRenameOwnVhost(self): config_str = """ [{ "hostname": "$default", "allowUnknownUser": true, "aliases": "$default", "groups": { "$default": { "remoteHosts": "*", "allowDynamicSource": true, "allowAnonymousSender": true, "sources": "$management, examples, q1", "targets": "$management, examples, q1", "maxSessions": 1 } } }] """ manager = MockPolicyManager() policy = PolicyLocal(manager) ruleset = json.loads(config_str) denied = False try: policy.create_ruleset(ruleset[0]) except PolicyError: denied = True self.assertTrue(denied, "Ruleset duplicates vhost and alias but condition not detected.") # def test_SameAliasOnTwoVhosts(self): config_str = """ [{ "hostname": "$default", "aliases": "a,b,c,d,e", "groups": { "$default": { "maxSessions": 1 } } }, { "hostname": "doshormigas", "aliases": "i,h,g,f,e", "groups": { "$default": { "maxSessions": 1 } } }] """ manager = MockPolicyManager() policy = PolicyLocal(manager) ruleset = json.loads(config_str) denied = False try: policy.create_ruleset(ruleset[0]) policy.create_ruleset(ruleset[1]) except PolicyError as e: denied = True self.assertTrue(denied, "Rulesets duplicate same alias in two vhosts but condition not detected.") # def test_AliasConflictsWithVhost(self): config_str = """ [{ "hostname": "$default", "groups": { "$default": { "maxSessions": 1 } } }, { "hostname": "conflict-with-vhost", "aliases": "$default", "groups": { "$default": { "maxSessions": 1 } } }] """ manager = MockPolicyManager() policy = PolicyLocal(manager) ruleset = json.loads(config_str) denied = False try: policy.create_ruleset(ruleset[0]) policy.create_ruleset(ruleset[1]) except PolicyError as e: denied = True self.assertTrue(denied, "Ruleset alias names other vhost but condition not detected.") # def test_AliasOperationalLookup(self): manager = MockPolicyManager() policy = PolicyLocal(manager) policy.test_load_config() # For this test the test config defines vhost 'photoserver'. # This test accesses that vhost using the alias name 'antialias'. settingsname = policy.lookup_user('zeke', '192.168.100.5', 'antialias', "connid", 5) self.assertTrue(settingsname == 'test') upolicy = {} self.assertTrue( policy.lookup_settings('antialias', settingsname, upolicy) ) self.assertTrue(upolicy['maxFrameSize'] == 444444) self.assertTrue(upolicy['sources'] == 'a,private,') if __name__ == '__main__': unittest.main(main_module())<|fim▁end|>
<|file_name|>award.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import unittest from copy import deepcopy from openprocurement.api.tests.base import snitch from openprocurement.tender.belowthreshold.adapters import TenderBelowThersholdConfigurator from openprocurement.tender.belowthreshold.tests.base import ( TenderContentWebTest, test_bids, test_lots, test_organization ) from openprocurement.tender.belowthreshold.tests.award_blanks import ( # TenderAwardResourceTest create_tender_award_invalid, create_tender_award, patch_tender_award, patch_tender_award_unsuccessful, get_tender_award, patch_tender_award_Administrator_change, # TenderLotAwardCheckResourceTest check_tender_award, # TenderLotAwardResourceTest create_tender_lot_award, patch_tender_lot_award, patch_tender_lot_award_unsuccessful, # Tender2LotAwardResourceTest create_tender_lots_award, patch_tender_lots_award, # TenderAwardComplaintResourceTest create_tender_award_complaint_invalid, create_tender_award_complaint, patch_tender_award_complaint, review_tender_award_complaint, get_tender_award_complaint, get_tender_award_complaints, # TenderLotAwardComplaintResourceTest create_tender_lot_award_complaint, patch_tender_lot_award_complaint, get_tender_lot_award_complaint, get_tender_lot_award_complaints, # Tender2LotAwardComplaintResourceTest create_tender_lots_award_complaint, patch_tender_lots_award_complaint, # TenderAwardComplaintDocumentResourceTest not_found, create_tender_award_complaint_document, put_tender_award_complaint_document, patch_tender_award_complaint_document, # Tender2LotAwardComplaintDocumentResourceTest create_tender_lots_award_complaint_document, put_tender_lots_award_complaint_document, patch_tender_lots_award_complaint_document, # TenderAwardDocumentResourceTest not_found_award_document, create_tender_award_document, put_tender_award_document, patch_tender_award_document, create_award_document_bot, patch_not_author, # Tender2LotAwardDocumentResourceTest create_tender_lots_award_document, put_tender_lots_award_document, patch_tender_lots_award_document, )<|fim▁hole|> test_get_tender_award = snitch(get_tender_award) test_patch_tender_award_Administrator_change = snitch(patch_tender_award_Administrator_change) class TenderAwardComplaintResourceTestMixin(object): test_create_tender_award_complaint_invalid = snitch(create_tender_award_complaint_invalid) test_get_tender_award_complaint = snitch(get_tender_award_complaint) test_get_tender_award_complaints = snitch(get_tender_award_complaints) class TenderAwardDocumentResourceTestMixin(object): test_not_found_award_document = snitch(not_found_award_document) test_create_tender_award_document = snitch(create_tender_award_document) test_put_tender_award_document = snitch(put_tender_award_document) test_patch_tender_award_document = snitch(patch_tender_award_document) test_create_award_document_bot = snitch(create_award_document_bot) test_patch_not_author = snitch(patch_not_author) class TenderAwardComplaintDocumentResourceTestMixin(object): test_not_found = snitch(not_found) test_create_tender_award_complaint_document = snitch(create_tender_award_complaint_document) test_put_tender_award_complaint_document = snitch(put_tender_award_complaint_document) class TenderLotAwardCheckResourceTestMixin(object): test_check_tender_award = snitch(check_tender_award) class Tender2LotAwardDocumentResourceTestMixin(object): test_create_tender_lots_award_document = snitch(create_tender_lots_award_document) test_put_tender_lots_award_document = snitch(put_tender_lots_award_document) test_patch_tender_lots_award_document = snitch(patch_tender_lots_award_document) class TenderAwardResourceTest(TenderContentWebTest, TenderAwardResourceTestMixin): initial_status = 'active.qualification' initial_bids = test_bids test_create_tender_award = snitch(create_tender_award) test_patch_tender_award = snitch(patch_tender_award) test_patch_tender_award_unsuccessful = snitch(patch_tender_award_unsuccessful) class TenderLotAwardCheckResourceTest(TenderContentWebTest, TenderLotAwardCheckResourceTestMixin): initial_status = 'active.auction' initial_lots = test_lots initial_bids = deepcopy(test_bids) initial_bids.append(deepcopy(test_bids[0])) initial_bids[1]['tenderers'][0]['name'] = u'Не зовсім Державне управління справами' initial_bids[1]['tenderers'][0]['identifier']['id'] = u'88837256' initial_bids[2]['tenderers'][0]['name'] = u'Точно не Державне управління справами' initial_bids[2]['tenderers'][0]['identifier']['id'] = u'44437256' reverse = TenderBelowThersholdConfigurator.reverse_awarding_criteria awarding_key = TenderBelowThersholdConfigurator.awarding_criteria_key def setUp(self): super(TenderLotAwardCheckResourceTest, self).setUp() self.app.authorization = ('Basic', ('auction', '')) response = self.app.get('/tenders/{}/auction'.format(self.tender_id)) auction_bids_data = response.json['data']['bids'] for lot_id in self.initial_lots: response = self.app.post_json('/tenders/{}/auction/{}'.format(self.tender_id, lot_id['id']), {'data': {'bids': auction_bids_data}}) self.assertEqual(response.status, "200 OK") self.assertEqual(response.content_type, 'application/json') response = self.app.get('/tenders/{}'.format(self.tender_id)) self.assertEqual(response.json['data']['status'], "active.qualification") class TenderLotAwardResourceTest(TenderContentWebTest): initial_status = 'active.qualification' initial_lots = test_lots initial_bids = test_bids test_create_tender_lot_award = snitch(create_tender_lot_award) test_patch_tender_lot_award = snitch(patch_tender_lot_award) test_patch_tender_lot_award_unsuccessful = snitch(patch_tender_lot_award_unsuccessful) class Tender2LotAwardResourceTest(TenderContentWebTest): initial_status = 'active.qualification' initial_lots = 2 * test_lots initial_bids = test_bids test_create_tender_lots_award = snitch(create_tender_lots_award) test_patch_tender_lots_award = snitch(patch_tender_lots_award) class TenderAwardComplaintResourceTest(TenderContentWebTest, TenderAwardComplaintResourceTestMixin): initial_status = 'active.qualification' initial_bids = test_bids def setUp(self): super(TenderAwardComplaintResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth test_create_tender_award_complaint = snitch(create_tender_award_complaint) test_patch_tender_award_complaint = snitch(patch_tender_award_complaint) test_review_tender_award_complaint = snitch(review_tender_award_complaint) class TenderLotAwardComplaintResourceTest(TenderContentWebTest): initial_status = 'active.qualification' initial_lots = test_lots initial_bids = test_bids def setUp(self): super(TenderLotAwardComplaintResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) bid = self.initial_bids[0] response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id'], 'lotID': bid['lotValues'][0]['relatedLot']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth test_create_tender_lot_award_complaint = snitch(create_tender_lot_award_complaint) test_patch_tender_lot_award_complaint = snitch(patch_tender_lot_award_complaint) test_get_tender_lot_award_complaint = snitch(get_tender_lot_award_complaint) test_get_tender_lot_award_complaints = snitch(get_tender_lot_award_complaints) class Tender2LotAwardComplaintResourceTest(TenderLotAwardComplaintResourceTest): initial_lots = 2 * test_lots test_create_tender_lots_award_complaint = snitch(create_tender_lots_award_complaint) test_patch_tender_lots_award_complaint = snitch(patch_tender_lots_award_complaint) class TenderAwardComplaintDocumentResourceTest(TenderContentWebTest, TenderAwardComplaintDocumentResourceTestMixin): initial_status = 'active.qualification' initial_bids = test_bids def setUp(self): super(TenderAwardComplaintDocumentResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth # Create complaint for award self.bid_token = self.initial_bids_tokens.values()[0] response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format( self.tender_id, self.award_id, self.bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization}}) complaint = response.json['data'] self.complaint_id = complaint['id'] self.complaint_owner_token = response.json['access']['token'] test_patch_tender_award_complaint_document = snitch(patch_tender_award_complaint_document) class Tender2LotAwardComplaintDocumentResourceTest(TenderContentWebTest): initial_status = 'active.qualification' initial_bids = test_bids initial_lots = 2 * test_lots def setUp(self): super(Tender2LotAwardComplaintDocumentResourceTest, self).setUp() # Create award bid = self.initial_bids[0] auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id'], 'lotID': bid['lotValues'][0]['relatedLot']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth # Create complaint for award bid_token = self.initial_bids_tokens.values()[0] response = self.app.post_json('/tenders/{}/awards/{}/complaints?acc_token={}'.format( self.tender_id, self.award_id, bid_token), {'data': {'title': 'complaint title', 'description': 'complaint description', 'author': test_organization}}) complaint = response.json['data'] self.complaint_id = complaint['id'] self.complaint_owner_token = response.json['access']['token'] test_create_tender_lots_award_complaint_document = snitch(create_tender_lots_award_complaint_document) test_put_tender_lots_award_complaint_document = snitch(put_tender_lots_award_complaint_document) test_patch_tender_lots_award_complaint_document = snitch(patch_tender_lots_award_complaint_document) class TenderAwardDocumentResourceTest(TenderContentWebTest, TenderAwardDocumentResourceTestMixin): initial_status = 'active.qualification' initial_bids = test_bids def setUp(self): super(TenderAwardDocumentResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': self.initial_bids[0]['id']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth class TenderAwardDocumentWithDSResourceTest(TenderAwardDocumentResourceTest): docservice = True class Tender2LotAwardDocumentResourceTest(TenderContentWebTest, Tender2LotAwardDocumentResourceTestMixin): initial_status = 'active.qualification' initial_bids = test_bids initial_lots = 2 * test_lots def setUp(self): super(Tender2LotAwardDocumentResourceTest, self).setUp() # Create award auth = self.app.authorization self.app.authorization = ('Basic', ('token', '')) bid = self.initial_bids[0] response = self.app.post_json('/tenders/{}/awards'.format( self.tender_id), {'data': {'suppliers': [test_organization], 'status': 'pending', 'bid_id': bid['id'], 'lotID': bid['lotValues'][0]['relatedLot']}}) award = response.json['data'] self.award_id = award['id'] self.app.authorization = auth class Tender2LotAwardDocumentWithDSResourceTest(Tender2LotAwardDocumentResourceTest): docservice = True def suite(): suite = unittest.TestSuite() suite.addTest(unittest.makeSuite(Tender2LotAwardComplaintDocumentResourceTest)) suite.addTest(unittest.makeSuite(Tender2LotAwardComplaintResourceTest)) suite.addTest(unittest.makeSuite(Tender2LotAwardDocumentResourceTest)) suite.addTest(unittest.makeSuite(Tender2LotAwardResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardComplaintDocumentResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardComplaintResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardDocumentResourceTest)) suite.addTest(unittest.makeSuite(TenderAwardResourceTest)) suite.addTest(unittest.makeSuite(TenderLotAwardResourceTest)) return suite if __name__ == '__main__': unittest.main(defaultTest='suite')<|fim▁end|>
class TenderAwardResourceTestMixin(object): test_create_tender_award_invalid = snitch(create_tender_award_invalid)
<|file_name|>base.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Licensed to Cloudera, Inc. under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. Cloudera, Inc. licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.<|fim▁hole|> from django.utils.translation import ugettext as _ from desktop.lib.exceptions_renderable import PopupException from desktop.lib.i18n import smart_unicode from notebook.conf import get_interpreters LOG = logging.getLogger(__name__) class SessionExpired(Exception): pass class QueryExpired(Exception): pass class AuthenticationRequired(Exception): pass class QueryError(Exception): def __init__(self, message, handle=None): self.message = message or _('No error message, please check the logs.') self.handle = handle self.extra = {} def __unicode__(self): return smart_unicode(self.message) class Notebook(object): def __init__(self, document=None): self.document = None if document is not None: self.data = document.data self.document = document else: self.data = json.dumps({ 'name': 'My Notebook', 'description': '', 'type': 'notebook', 'snippets': [], }) def get_json(self): _data = self.get_data() return json.dumps(_data) def get_data(self): _data = json.loads(self.data) if self.document is not None: _data['id'] = self.document.id _data['is_history'] = self.document.is_history return _data def get_str(self): return '\n\n'.join([snippet['statement_raw'] for snippet in self.get_data()['snippets']]) def get_api(request, snippet): from notebook.connectors.hiveserver2 import HS2Api from notebook.connectors.jdbc import JdbcApi from notebook.connectors.rdbms import RdbmsApi from notebook.connectors.pig_batch import PigApi from notebook.connectors.solr import SolrApi from notebook.connectors.spark_shell import SparkApi from notebook.connectors.spark_batch import SparkBatchApi from notebook.connectors.text import TextApi interpreter = [interpreter for interpreter in get_interpreters(request.user) if interpreter['type'] == snippet['type']] if not interpreter: raise PopupException(_('Snippet type %(type)s is not configured in hue.ini') % snippet) interpreter = interpreter[0] interface = interpreter['interface'] if interface == 'hiveserver2': return HS2Api(user=request.user, request=request) elif interface == 'livy': return SparkApi(request.user) elif interface == 'livy-batch': return SparkBatchApi(request.user) elif interface == 'text' or interface == 'markdown': return TextApi(request.user) elif interface == 'rdbms': return RdbmsApi(request.user, interpreter=snippet['type']) elif interface == 'jdbc': return JdbcApi(request.user, interpreter=interpreter) elif interface == 'solr': return SolrApi(request.user, interpreter=interpreter) elif interface == 'pig': return PigApi(user=request.user, request=request) else: raise PopupException(_('Notebook connector interface not recognized: %s') % interface) def _get_snippet_session(notebook, snippet): session = [session for session in notebook['sessions'] if session['type'] == snippet['type']] if not session: raise SessionExpired() else: return session[0] # Base API class Api(object): def __init__(self, user, interpreter=None, request=None): self.user = user self.interpreter = interpreter self.request = request def create_session(self, lang, properties=None): return { 'type': lang, 'id': None, 'properties': properties if not None else [] } def close_session(self, session): pass def fetch_result(self, notebook, snippet, rows, start_over): pass def download(self, notebook, snippet, format): pass def get_log(self, notebook, snippet, startFrom=None, size=None): return 'No logs' def autocomplete(self, snippet, database=None, table=None, column=None, nested=None): return {} def progress(self, snippet, logs=None): return 50 def get_jobs(self, notebook, snippet, logs): return [] def export_data_as_hdfs_file(self, snippet, target_file, overwrite): raise NotImplementedError() def export_data_as_table(self, notebook, snippet, destination): raise NotImplementedError() def export_large_data_to_hdfs(self, notebook, snippet, destination): raise NotImplementedError()<|fim▁end|>
import json import logging
<|file_name|>gitkali.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 """ Written by: True Demon The non-racist Kali repository grabber for all operating systems. Git Kali uses Offensive Security's package repositories and their generous catalog<|fim▁hole|>of Offensive Security actually sticking to good practices and keeping their packages well-organized, so thanks OffSec! :) #TryHarder """ # TODO: Finish Install Script # TODO: Categorize tool searches # TODO: Categorization of repos is a big task to be done later # TODO: Include package management import argparse import packmgr as packager from utils import * # includes sys, os prog_info = "GIT Kali Project" __author__ = "True Demon" __winstall__ = "C:\\ProgramFiles\\GitKali\\" # Default package installation directory for Windows __linstall__ = "/usr/share" # Default package installation directory for Linux __install__ = "" # Used to store default install directory based on OS try: if os.name == 'posix': __install__ = __linstall__ if os.getuid(): print("You need to be root to install packages. Try again as sudo.") sys.exit() elif os.name == 'nt': __install__ = __winstall__ from ctypes import windll if not windll.shell32.IsUserAnAdmin(): print("You must be an administrator to install packages. Please run from an escalated cmd.") else: sys.stderr("Could not detect your privileges / operating system. " "This script only supports Linux (Posix) and Windows (nt) systems.") except OSError: sys.stderr("Unknown Operating System detected. You must have invented this one yourself! Teach me, Senpai!") exit() except ImportError as e: sys.stderr("Invalid or missing libraries: \n%s" % e) def search(search_word): # search function for valid packages to install found = [] with open('kali-packages.lst', 'r') as file: packages = file.readlines() for p in packages: if search_word in p.split()[0]: found.append(p.split()[0]) if not len(found): print(Symbol.fail + " Could not find any matching packages") return None print("Found packages: ") print(' '.join(found)) def check_install_dir(install_dir=__install__): if os.path.exists(install_dir): try: os.chdir(install_dir) if os.getcwd() != install_dir: print("Something went wrong. We can't get to your installation directory: %s" % install_dir) sys.exit() except OSError: print("Somehow, you broke it. Dunno how ya did it, but a bug report would be mighty handy to figure out how!") sys.exit(-1) def main(): parser = argparse.ArgumentParser(prog='gitkali.py', description='The apt-like Kali package installer for Linux', epilog=prog_info, formatter_class=argparse.RawTextHelpFormatter) parser._positionals.title = "Commands" parser.add_argument("command", choices=["search", "install", "update", "upgrade"], help="search : search package list for compatible packages\n" + "install : install specified package\n" + "update : update package lists\n" + "upgrade : upgrade kali packages\n\n" ) parser.add_argument("packages", action='store', metavar='package', nargs='*', help="package(s) to upgrade/install") parser.add_argument("-d", "--directory", action='store', default=__install__, help="Alternate installation directory") args = parser.parse_args() packages = [str(p) for p in args.packages] # Converts args.package(tuple) to list of strings for ease of use args.directory = os.path.abspath(args.directory) if args.command == 'search': packager.check_kali_packages() for p in packages: search(p) elif args.command == 'update': packager.get_updates() exit() elif args.command == 'upgrade': packager.upgrade(packages, args.directory) elif args.command == 'install': if len(packages) == 0 : print("No packages given") if '*' in packages: # NEVER EVER EVER EVER EEEEEEEVVVVVEEEEEEEEEEEERRRRRRRRRRR DO THIS!!! # TODO: EVENTUALLY...build a way for this to work safely... packager.install_all(args.directory) if args.directory != __install__: # Usually /usr/share/ check_install_dir(args.directory) # Check that the directory exists warn_non_standard_dir(args.directory) # Warn the user that this is not advised response = input("Do you wish to proceed?: [y/N]") # Confirm decision if response.upper() != 'Y': exit() packages_to_install = packager.get_local_packages(packages) # Returns a dictionary ex: {package_name: package_url} for p in packages_to_install: print("Proceeding with install: ", p) packager.install(p, packages_to_install[p], args.directory) # install(package_name, url, into directory) if __name__ == "__main__": main()<|fim▁end|>
of extremely handy penetration testing tools. This project is possible because
<|file_name|>interface.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The html5ever Project Developers. See the // COPYRIGHT file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! The interface for consumers of the tree builder (and thus the //! parser overall). use tokenizer::Attribute; use std::borrow::Cow; use string_cache::QualName; use tendril::StrTendril; pub use self::QuirksMode::{Quirks, LimitedQuirks, NoQuirks}; pub use self::NodeOrText::{AppendNode, AppendText}; /// A document's quirks mode. #[derive(PartialEq, Eq, Copy, Clone, Hash, Debug)] #[cfg_attr(feature = "heap_size", derive(HeapSizeOf))] pub enum QuirksMode { Quirks, LimitedQuirks, NoQuirks, } /// Something which can be inserted into the DOM. /// /// Adjacent sibling text nodes are merged into a single node, so /// the sink may not want to allocate a `Handle` for each. pub enum NodeOrText<Handle> { AppendNode(Handle), AppendText(StrTendril), } /// Whether to interrupt further parsing of the current input until /// the next explicit resumption of the tokenizer, or continue without /// any interruption. #[derive(PartialEq, Eq, Copy, Clone, Hash, Debug)] pub enum NextParserState { Suspend, Continue, } /// Types which can process tree modifications from the tree builder. pub trait TreeSink { /// The overall result of parsing. /// /// This should default to Self, but default associated types are not stable yet. /// (https://github.com/rust-lang/rust/issues/29661) type Output; /// Consume this sink and return the overall result of parsing. /// /// This should default to `fn finish(self) -> Self::Output { self }`, /// but default associated types are not stable yet. /// (https://github.com/rust-lang/rust/issues/29661) fn finish(self) -> Self::Output; /// `Handle` is a reference to a DOM node. The tree builder requires /// that a `Handle` implements `Clone` to get another reference to /// the same node. type Handle: Clone; /// Signal a parse error. fn parse_error(&mut self, msg: Cow<'static, str>); /// Get a handle to the `Document` node. fn get_document(&mut self) -> Self::Handle; /// Get a handle to a template's template contents. The tree builder /// promises this will never be called with something else than /// a template element. fn get_template_contents(&mut self, target: Self::Handle) -> Self::Handle; /// Do two handles refer to the same node? fn same_node(&self, x: Self::Handle, y: Self::Handle) -> bool; /// What is the name of this element? /// /// Should never be called on a non-element node; /// feel free to `panic!`. fn elem_name(&self, target: Self::Handle) -> QualName; /// Set the document's quirks mode. fn set_quirks_mode(&mut self, mode: QuirksMode); /// Create an element. /// /// When creating a template element (`name == qualname!(html, "template")`), /// an associated document fragment called the "template contents" should /// also be created. Later calls to self.get_template_contents() with that /// given element return it. /// https://html.spec.whatwg.org/multipage/#htmltemplateelement fn create_element(&mut self, name: QualName, attrs: Vec<Attribute>) -> Self::Handle; /// Create a comment node. fn create_comment(&mut self, text: StrTendril) -> Self::Handle; /// Append a node as the last child of the given node. If this would /// produce adjacent sibling text nodes, it should concatenate the text /// instead. /// /// The child node will not already have a parent. fn append(&mut self, parent: Self::Handle, child: NodeOrText<Self::Handle>); /// Append a node as the sibling immediately before the given node. If that node /// has no parent, do nothing and return Err(new_node). /// /// The tree builder promises that `sibling` is not a text node. However its /// old previous sibling, which would become the new node's previous sibling, /// could be a text node. If the new node is also a text node, the two should /// be merged, as in the behavior of `append`. /// /// NB: `new_node` may have an old parent, from which it should be removed. fn append_before_sibling(&mut self, sibling: Self::Handle, new_node: NodeOrText<Self::Handle>) -> Result<(), NodeOrText<Self::Handle>>; /// Append a `DOCTYPE` element to the `Document` node. fn append_doctype_to_document(&mut self, name: StrTendril, public_id: StrTendril, system_id: StrTendril); /// Add each attribute to the given element, if no attribute with that name /// already exists. The tree builder promises this will never be called /// with something else than an element. fn add_attrs_if_missing(&mut self, target: Self::Handle, attrs: Vec<Attribute>); /// Detach the given node from its parent.<|fim▁hole|> fn remove_from_parent(&mut self, target: Self::Handle); /// Remove all the children from node and append them to new_parent. fn reparent_children(&mut self, node: Self::Handle, new_parent: Self::Handle); /// Mark a HTML `<script>` element as "already started". fn mark_script_already_started(&mut self, node: Self::Handle); /// Indicate that a `<script>` element is complete. fn complete_script(&mut self, _node: Self::Handle) -> NextParserState { NextParserState::Continue } // Returns true if the adjusted current node is an HTML integration point // and the token is a start tag fn is_mathml_annotation_xml_integration_point(&self, handle: Self::Handle) -> bool { false } } /// Trace hooks for a garbage-collected DOM. pub trait Tracer { type Handle; /// Upon a call to `trace_handles`, the tree builder will call this method /// for each handle in its internal state. fn trace_handle(&self, node: &Self::Handle); }<|fim▁end|>
<|file_name|>Activator.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.oak.sling; import java.util.HashMap; import java.util.Map; import java.util.Properties; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import javax.jcr.Repository; import org.apache.jackrabbit.oak.api.ContentRepository; import org.apache.jackrabbit.oak.spi.security.SecurityProvider; import org.apache.sling.jcr.api.SlingRepository; import org.osgi.framework.BundleActivator; import org.osgi.framework.BundleContext; import org.osgi.framework.ServiceReference; import org.osgi.framework.ServiceRegistration; import org.osgi.util.tracker.ServiceTracker; import org.osgi.util.tracker.ServiceTrackerCustomizer; public class Activator implements BundleActivator, ServiceTrackerCustomizer { private BundleContext context; private ScheduledExecutorService executor; private SecurityProvider securityProvider; private ServiceTracker tracker; private final Map<ServiceReference, ServiceRegistration> jcrRepositories = new HashMap<ServiceReference, ServiceRegistration>(); private final Map<ServiceReference, ServiceRegistration> slingRepositories = new HashMap<ServiceReference, ServiceRegistration>(); //-----------------------------------------------------< BundleActivator >-- @Override public void start(BundleContext bundleContext) throws Exception { context = bundleContext; executor = Executors.newScheduledThreadPool(1); securityProvider = null; // TODO tracker = new ServiceTracker( context, ContentRepository.class.getName(), this); tracker.open(); } @Override public void stop(BundleContext bundleContext) throws Exception { tracker.close(); executor.shutdown(); } //--------------------------------------------< ServiceTrackerCustomizer >-- @Override public Object addingService(ServiceReference reference) { Object service = context.getService(reference); if (service instanceof ContentRepository) { SlingRepository repository = new SlingRepositoryImpl( (ContentRepository) service, executor, securityProvider); jcrRepositories.put(reference, context.registerService( Repository.class.getName(), repository, new Properties())); slingRepositories.put(reference, context.registerService( SlingRepository.class.getName(), repository, new Properties())); return service;<|fim▁hole|> context.ungetService(reference); return null; } } @Override public void modifiedService(ServiceReference reference, Object service) { } @Override public void removedService(ServiceReference reference, Object service) { slingRepositories.get(reference).unregister(); jcrRepositories.get(reference).unregister(); context.ungetService(reference); } }<|fim▁end|>
} else {
<|file_name|>update_spec.rs<|end_file_name|><|fim▁begin|>/// Purpose of this binary is to update the submodule to a particular tag /// TODO Could add it in that it will perform a check on the tags if it needs to be updated // use std::env; use std::process::Command; fn main() { // let tag = env::args().nth(1); // if tag.is_none() { // println!("Tag must be specificed on command line"); // } // let tag = tag.unwrap();<|fim▁hole|> .arg("submodule") .arg("update") .arg("--remote") .arg("comms-spec") .output() .expect("Got no output :("); }<|fim▁end|>
// println!("Updating the comms-spec submodule to version {:?}", tag); println!("Updating submodule comms-spec", ); Command::new("git")
<|file_name|>RangerElasticsearchAuthorizer.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ranger.authorization.elasticsearch.authorizer; import java.util.List; import org.apache.logging.log4j.Logger; import org.apache.ranger.plugin.classloader.RangerPluginClassLoader; import org.elasticsearch.common.logging.ESLoggerFactory; public class RangerElasticsearchAuthorizer { private static final Logger LOG = ESLoggerFactory.getLogger(RangerElasticsearchAuthorizer.class); private static final String RANGER_PLUGIN_TYPE = "elasticsearch"; private static final String RANGER_ELASTICSEARCH_AUTHORIZER_IMPL_CLASSNAME = "org.apache.ranger.authorization.elasticsearch.authorizer.RangerElasticsearchAuthorizer"; private static RangerPluginClassLoader rangerPluginClassLoader = null; private static ClassLoader esClassLoader = null; private RangerElasticsearchAccessControl rangerElasticsearchAccessControl = null; public RangerElasticsearchAuthorizer() { if (LOG.isDebugEnabled()) { LOG.debug("==> RangerElasticsearchAuthorizer.RangerElasticsearchAuthorizer()"); } this.init(); if (LOG.isDebugEnabled()) { LOG.debug("<== RangerElasticsearchAuthorizer.RangerElasticsearchAuthorizer()"); } } public void init() { if (LOG.isDebugEnabled()) { LOG.debug("==> RangerElasticsearchAuthorizer.init()"); } try { // In elasticsearch this.getClass().getClassLoader() is FactoryURLClassLoader, // but Thread.currentThread().getContextClassLoader() is AppClassLoader. esClassLoader = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(this.getClass().getClassLoader()); rangerPluginClassLoader = RangerPluginClassLoader.getInstance(RANGER_PLUGIN_TYPE, this.getClass()); Thread.currentThread().setContextClassLoader(esClassLoader); @SuppressWarnings("unchecked") Class<RangerElasticsearchAccessControl> cls = (Class<RangerElasticsearchAccessControl>) Class .forName(RANGER_ELASTICSEARCH_AUTHORIZER_IMPL_CLASSNAME, true, rangerPluginClassLoader); activatePluginClassLoader(); rangerElasticsearchAccessControl = cls.newInstance(); } catch (Exception e) { LOG.error("Error Enabling RangerElasticsearchAuthorizer", e); } finally { deactivatePluginClassLoader(); } if (LOG.isDebugEnabled()) { LOG.debug("<== RangerElasticsearchAuthorizer.init()"); } } public boolean checkPermission(String user, List<String> groups, String index, String action, String clientIPAddress) { boolean ret = false; if (LOG.isDebugEnabled()) { LOG.debug("==> RangerElasticsearchAuthorizer.checkPermission()"); } try { activatePluginClassLoader(); ret = rangerElasticsearchAccessControl.checkPermission(user, groups, index, action, clientIPAddress); } finally { deactivatePluginClassLoader(); } if (LOG.isDebugEnabled()) { LOG.debug("<== RangerElasticsearchAuthorizer.checkPermission()"); } return ret; } private void activatePluginClassLoader() { if (rangerPluginClassLoader != null) { Thread.currentThread().setContextClassLoader(rangerPluginClassLoader); }<|fim▁hole|> } private void deactivatePluginClassLoader() { if (esClassLoader != null) { Thread.currentThread().setContextClassLoader(esClassLoader); } } }<|fim▁end|>
<|file_name|>clear_pgviews.py<|end_file_name|><|fim▁begin|>import logging from django.core.management.base import BaseCommand from django.apps import apps from django.db import connection from django_pgviews.view import clear_view, View, MaterializedView <|fim▁hole|> class Command(BaseCommand): help = """Clear Postgres views. Use this before running a migration""" def handle(self, **options): """ """ for view_cls in apps.get_models(): if not (isinstance(view_cls, type) and issubclass(view_cls, View) and hasattr(view_cls, 'sql')): continue python_name = '{}.{}'.format(view_cls._meta.app_label, view_cls.__name__) status = clear_view( connection, view_cls._meta.db_table, materialized=isinstance(view_cls(), MaterializedView)) if status == 'DROPPED': msg = 'dropped' else: msg = 'not dropped' log.info("%(python_name)s (%(view_name)s): %(msg)s" % { 'python_name': python_name, 'view_name': view_cls._meta.db_table, 'msg': msg})<|fim▁end|>
log = logging.getLogger('django_pgviews.sync_pgviews')
<|file_name|>libs.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- import os import sys import time import datetime import logging import csv import six from datetime import date from django.apps import apps from django.utils import timezone logger = logging.getLogger(__name__) ats_settings = apps.get_app_config('ats') def get_user_realname(first_name, last_name, is_last_name_front): if is_last_name_front: return '%s %s' % (last_name, first_name) else: return '%s %s' % (first_name, last_name) def format_totaltime(td): totalhour = (td.days * 24) + int(td.seconds / 3600) minute = int(td.seconds / 60) - (int(td.seconds / 3600) * 60) return '%d:%02d' % (totalhour, minute) def format_hours_float(td): return (td.days * 24) + (td.seconds / 3600.0) def format_time(timedata): return '%d:%02d' % (timedata.hour, timedata.minute) def get_localtime(): _now = timezone.localtime() # logger.debug("get_localtime() : %s" % (_now)) return _now def get_thismonth_1st(): _now = timezone.localtime() _ret = _now.replace(day=1) # logger.debug("get_thismonth_1st(): %s" % (_ret)) return _ret def export_csv_task(datalist, add_header, new_line): _s = '' if six.PY2: bufffer = six.BytesIO() else:<|fim▁hole|> try: if True: _writer = csv.writer( bufffer, lineterminator=new_line, quotechar='"', quoting=csv.QUOTE_ALL) if add_header: _header = [ 'date', 'project', 'code', 'job', 'task', 'user', 'tasktime', 'task_userdata1', 'task_userdata2', 'task_userdata3', 'task_userdata4', 'task_userdata5', 'comment', ] _writer.writerow(_header) for d in datalist: _line = [] _date = d['taskdate'].isoformat() _line.append(_date) _line.append(d['project__name']) if d['project__external_project__code']: _line.append(d['project__external_project__code']) else: _line.append('') _line.append(d['task__job__name']) _line.append(d['task__name']) _line.append(get_user_realname(d['user__first_name'], d['user__last_name'], ats_settings.ATS_IS_LASTNAME_FRONT)) _line.append(format_time(d['tasktime'])) _line.append(d['task__userdata1']) _line.append(d['task__userdata2']) _line.append(d['task__userdata3']) _line.append(d['task__userdata4']) _line.append(d['task__userdata5']) _line.append(d['comment']) if six.PY2: for i in range(len(_line)): _line[i] = _line[i].encode('utf8') _writer.writerow(_line) _s = bufffer.getvalue() bufffer.close() if six.PY3: _s = _s.encode('utf8') except Exception as e: logger.error('fail export_csv_task().') logger.error('EXCEPT: export_csv_task(). e=%s, msg1=%s,msg2=%s', e, sys.exc_info()[1], sys.exc_info()[2]) return None return _s<|fim▁end|>
bufffer = six.StringIO()
<|file_name|>hashes.rs<|end_file_name|><|fim▁begin|>//! Computing hashes for files. use crate::Result; use openssl::hash::{DigestBytes, Hasher, MessageDigest}; use std::io::{Read, Write}; #[derive(Debug)] pub struct Estimate { pub files: u64, pub bytes: u64, } // TODO: Reuse buffer and hasher for a given thread. pub(crate) fn hash_file<R: Read>(rd: &mut R) -> Result<DigestBytes> { let mut h = Hasher::new(MessageDigest::sha1())?; let mut buf = vec![0u8; 8192]; loop { let count = rd.read(&mut buf)?; if count == 0 { break; } h.write_all(&buf[0..count])?; } Ok(h.finish()?) } pub(crate) use self::atime_impl::noatime_open; /// Open the given file, trying to not update the atime if that is /// possible. /// The `custom_flags` method is only stable since Rust 1.10.0. #[cfg(target_os = "linux")] mod atime_impl { use std::fs::{File, OpenOptions}; use std::io; use std::os::unix::fs::OpenOptionsExt; use std::path::Path; // From linux's fcntl.h, not exported in the libc crate. const O_NOATIME: i32 = 0o1000000; pub fn noatime_open(name: &Path) -> io::Result<File> { // Try opening it first with noatime, and if that fails, try the open // again without the option.<|fim▁hole|> .custom_flags(O_NOATIME) .open(name) { Ok(f) => Ok(f), Err(_) => OpenOptions::new().read(true).open(name), } } } // Other platforms, just use normal open. #[cfg(not(target_os = "linux"))] mod atime_impl { use std::fs::{File, OpenOptions}; use std::io; use std::path::Path; pub fn noatime_open(name: &Path) -> io::Result<File> { OpenOptions::new().read(true).open(name) } }<|fim▁end|>
match OpenOptions::new() .read(true)
<|file_name|>grabber.py<|end_file_name|><|fim▁begin|>"""Grabber for collecting data""" import urllib2 from random import sample from veliberator.settings import PROXY_SERVERS class Grabber(object): """Url encapsultation for making request throught HTTP""" page = None data = None def __init__(self, url, proxies=PROXY_SERVERS): """Init the grabber""" self.url = url self.proxies = proxies self.opener = self.build_opener() def build_opener(self): """Build the url opener""" handlers = [] if self.proxies: server = sample(self.proxies, 1)[0] handlers.append(urllib2.ProxyHandler({'http': server})) return urllib2.build_opener(*handlers) @property def content(self): """Return the data grabbed""" if self.data: return self.data<|fim▁hole|> return self.data except: return ''<|fim▁end|>
try: self.page = self.opener.open(self.url) self.data = ''.join(self.page.readlines()) self.page.close()
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|># Copyright (C) 2015 by the Free Software Foundation, Inc. # # This file is part of GNU Mailman. # # GNU Mailman is free software: you can redistribute it and/or modify it under # the terms of the GNU General Public License as published by the Free # Software Foundation, either version 3 of the License, or (at your option) # any later version. # # GNU Mailman is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or # FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for # more details. # # You should have received a copy of the GNU General Public License along with # GNU Mailman. If not, see <http://www.gnu.org/licenses/>. """API version tests.""" __all__ = [<|fim▁hole|> import unittest from mailman.core.system import system from mailman.testing.helpers import call_api from mailman.testing.layers import RESTLayer from urllib.error import HTTPError class TestAPIVersion(unittest.TestCase): layer = RESTLayer def test_api_31(self): # API version 3.1 was introduced in Mailman 3.1. url = 'http://localhost:9001/3.1/system' new = '{}/versions'.format(url) json, response = call_api(url) self.assertEqual(json['mailman_version'], system.mailman_version) self.assertEqual(json['python_version'], system.python_version) self.assertEqual(json['api_version'], '3.1') self.assertEqual(json['self_link'], new) def test_api_30(self): # API version 3.0 is still supported. url = 'http://localhost:9001/3.0/system' new = '{}/versions'.format(url) json, response = call_api(url) self.assertEqual(json['mailman_version'], system.mailman_version) self.assertEqual(json['python_version'], system.python_version) self.assertEqual(json['api_version'], '3.0') self.assertEqual(json['self_link'], new) def test_bad_api(self): # There is no API version earlier than 3.0. with self.assertRaises(HTTPError) as cm: call_api('http://localhost:9001/2.9/system') self.assertEqual(cm.exception.code, 404)<|fim▁end|>
'TestAPIVersion', ]
<|file_name|>errors.py<|end_file_name|><|fim▁begin|>""" error types Copyright (c) 2010-2012 Mika Eloranta See LICENSE for details. """ class Error(Exception): """error""" class InvalidProperty(Error): """invalid property""" class MissingProperty(Error): """missing property""" class UserError(Error): """user error""" class InvalidRange(Error): """invalid range""" class SettingsError(Error): """settings error""" class VerifyError(Error): """verify error""" class TemplateError(Error): """template rendering error""" class CloudError(Error): """cloud error""" class RemoteError(Error): """remote error""" class RemoteFileDoesNotExist(RemoteError): """remote file does not exist"""<|fim▁hole|> """repository error""" class ImporterError(Error): """importer error""" class MissingLibraryError(Error): """missing library error""" class RequirementError(Error): """requirement error""" class ControlError(Error): """control error""" class OperationError(Error): """operation error"""<|fim▁end|>
class RepoError(Error):
<|file_name|>toolbar.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core'; import { Router } from '@angular/router'; import { IInputButtons } from './toolbar'; @Component({ selector: 'app-toolbar', templateUrl: './toolbar.component.html',<|fim▁hole|> @Input() buttons: IInputButtons; constructor( private router: Router ) { } logout() { localStorage.removeItem('token'); this.router.navigate(['/login']); } }<|fim▁end|>
styleUrls: ['./toolbar.component.scss'] }) export class ToolbarComponent {
<|file_name|>errors.go<|end_file_name|><|fim▁begin|>package rpc import "errors" var (<|fim▁hole|> ErrInvalidToken = errors.New("invalid token") )<|fim▁end|>
ErrRPCServerDisabled = errors.New("server is disabled")
<|file_name|>inmem_spanner_server_test.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package testutil_test import ( "strconv" . "cloud.google.com/go/spanner/internal/testutil" "context" "flag" "fmt" "log" "net" "os" "strings" "testing" structpb "github.com/golang/protobuf/ptypes/struct" spannerpb "google.golang.org/genproto/googleapis/spanner/v1" "google.golang.org/grpc/codes" apiv1 "cloud.google.com/go/spanner/apiv1" "google.golang.org/api/iterator" "google.golang.org/api/option" "google.golang.org/grpc" gstatus "google.golang.org/grpc/status" ) // clientOpt is the option tests should use to connect to the test server. // It is initialized by TestMain. var serverAddress string var clientOpt option.ClientOption var testSpanner InMemSpannerServer // Mocked selectSQL statement. const selectSQL = "SELECT FOO FROM BAR" const selectRowCount int64 = 2 const selectColCount int = 1 var selectValues = [...]int64{1, 2} // Mocked DML statement. const updateSQL = "UPDATE FOO SET BAR=1 WHERE ID=ID" const updateRowCount int64 = 2 func TestMain(m *testing.M) { flag.Parse() testSpanner = NewInMemSpannerServer() serv := grpc.NewServer() spannerpb.RegisterSpannerServer(serv, testSpanner) lis, err := net.Listen("tcp", "localhost:0") if err != nil { log.Fatal(err) } go serv.Serve(lis) serverAddress = lis.Addr().String() conn, err := grpc.Dial(serverAddress, grpc.WithInsecure()) if err != nil { log.Fatal(err) } clientOpt = option.WithGRPCConn(conn) os.Exit(m.Run()) } // Resets the mock server to its default values and registers a mocked result // for the statements "SELECT FOO FROM BAR" and // "UPDATE FOO SET BAR=1 WHERE ID=ID". func setup() { testSpanner.Reset() fields := make([]*spannerpb.StructType_Field, selectColCount) fields[0] = &spannerpb.StructType_Field{ Name: "FOO", Type: &spannerpb.Type{Code: spannerpb.TypeCode_INT64}, } rowType := &spannerpb.StructType{ Fields: fields, } metadata := &spannerpb.ResultSetMetadata{ RowType: rowType, } rows := make([]*structpb.ListValue, selectRowCount) for idx, value := range selectValues { rowValue := make([]*structpb.Value, selectColCount) rowValue[0] = &structpb.Value{ Kind: &structpb.Value_StringValue{StringValue: strconv.FormatInt(value, 10)}, } rows[idx] = &structpb.ListValue{ Values: rowValue, } } resultSet := &spannerpb.ResultSet{ Metadata: metadata, Rows: rows, } result := &StatementResult{Type: StatementResultResultSet, ResultSet: resultSet} testSpanner.PutStatementResult(selectSQL, result) updateResult := &StatementResult{Type: StatementResultUpdateCount, UpdateCount: updateRowCount} testSpanner.PutStatementResult(updateSQL, updateResult) } func TestSpannerCreateSession(t *testing.T) { testSpanner.Reset() var expectedName = fmt.Sprintf("projects/%s/instances/%s/databases/%s/sessions/", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var request = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil {<|fim▁hole|> t.Fatal(err) } if strings.Index(resp.Name, expectedName) != 0 { t.Errorf("Session name mismatch\nGot: %s\nWant: Name should start with %s)", resp.Name, expectedName) } } func TestSpannerCreateSession_Unavailable(t *testing.T) { testSpanner.Reset() var expectedName = fmt.Sprintf("projects/%s/instances/%s/databases/%s/sessions/", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var request = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } testSpanner.SetError(gstatus.Error(codes.Unavailable, "Temporary unavailable")) resp, err := c.CreateSession(context.Background(), request) if err != nil { t.Fatal(err) } if strings.Index(resp.Name, expectedName) != 0 { t.Errorf("Session name mismatch\nGot: %s\nWant: Name should start with %s)", resp.Name, expectedName) } } func TestSpannerGetSession(t *testing.T) { testSpanner.Reset() var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } createResp, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } var getRequest = &spannerpb.GetSessionRequest{ Name: createResp.Name, } getResp, err := c.GetSession(context.Background(), getRequest) if err != nil { t.Fatal(err) } if getResp.Name != getRequest.Name { t.Errorf("Session name mismatch\nGot: %s\nWant: Name should start with %s)", getResp.Name, getRequest.Name) } } func TestSpannerListSessions(t *testing.T) { testSpanner.Reset() const expectedNumberOfSessions = 5 var expectedName = fmt.Sprintf("projects/%s/instances/%s/databases/%s/sessions/", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } for i := 0; i < expectedNumberOfSessions; i++ { _, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } } var listRequest = &spannerpb.ListSessionsRequest{ Database: formattedDatabase, } var sessionCount int listResp := c.ListSessions(context.Background(), listRequest) for { session, err := listResp.Next() if err == iterator.Done { break } if err != nil { t.Fatal(err) } if strings.Index(session.Name, expectedName) != 0 { t.Errorf("Session name mismatch\nGot: %s\nWant: Name should start with %s)", session.Name, expectedName) } sessionCount++ } if sessionCount != expectedNumberOfSessions { t.Errorf("Session count mismatch\nGot: %d\nWant: %d", sessionCount, expectedNumberOfSessions) } } func TestSpannerDeleteSession(t *testing.T) { testSpanner.Reset() const expectedNumberOfSessions = 5 var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } for i := 0; i < expectedNumberOfSessions; i++ { _, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } } var listRequest = &spannerpb.ListSessionsRequest{ Database: formattedDatabase, } var sessionCount int listResp := c.ListSessions(context.Background(), listRequest) for { session, err := listResp.Next() if err == iterator.Done { break } if err != nil { t.Fatal(err) } var deleteRequest = &spannerpb.DeleteSessionRequest{ Name: session.Name, } c.DeleteSession(context.Background(), deleteRequest) sessionCount++ } if sessionCount != expectedNumberOfSessions { t.Errorf("Session count mismatch\nGot: %d\nWant: %d", sessionCount, expectedNumberOfSessions) } // Re-list all sessions. This should now be empty. listResp = c.ListSessions(context.Background(), listRequest) _, err = listResp.Next() if err != iterator.Done { t.Errorf("expected empty session iterator") } } func TestSpannerExecuteSql(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } request := &spannerpb.ExecuteSqlRequest{ Session: session.Name, Sql: selectSQL, Transaction: &spannerpb.TransactionSelector{ Selector: &spannerpb.TransactionSelector_SingleUse{ SingleUse: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadOnly_{ ReadOnly: &spannerpb.TransactionOptions_ReadOnly{ ReturnReadTimestamp: false, TimestampBound: &spannerpb.TransactionOptions_ReadOnly_Strong{ Strong: true, }, }, }, }, }, }, Seqno: 1, QueryMode: spannerpb.ExecuteSqlRequest_NORMAL, } response, err := c.ExecuteSql(context.Background(), request) if err != nil { t.Fatal(err) } var rowCount int64 for _, row := range response.Rows { if len(row.Values) != selectColCount { t.Fatalf("Column count mismatch\nGot: %d\nWant: %d", len(row.Values), selectColCount) } rowCount++ } if rowCount != selectRowCount { t.Fatalf("Row count mismatch\nGot: %d\nWant: %d", rowCount, selectRowCount) } } func TestSpannerExecuteSqlDml(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } request := &spannerpb.ExecuteSqlRequest{ Session: session.Name, Sql: updateSQL, Transaction: &spannerpb.TransactionSelector{ Selector: &spannerpb.TransactionSelector_Begin{ Begin: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadWrite_{ ReadWrite: &spannerpb.TransactionOptions_ReadWrite{}, }, }, }, }, Seqno: 1, QueryMode: spannerpb.ExecuteSqlRequest_NORMAL, } response, err := c.ExecuteSql(context.Background(), request) if err != nil { t.Fatal(err) } var rowCount int64 = response.Stats.GetRowCountExact() if rowCount != updateRowCount { t.Fatalf("Update count mismatch\nGot: %d\nWant: %d", rowCount, updateRowCount) } } func TestSpannerExecuteStreamingSql(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } request := &spannerpb.ExecuteSqlRequest{ Session: session.Name, Sql: selectSQL, Transaction: &spannerpb.TransactionSelector{ Selector: &spannerpb.TransactionSelector_SingleUse{ SingleUse: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadOnly_{ ReadOnly: &spannerpb.TransactionOptions_ReadOnly{ ReturnReadTimestamp: false, TimestampBound: &spannerpb.TransactionOptions_ReadOnly_Strong{ Strong: true, }, }, }, }, }, }, Seqno: 1, QueryMode: spannerpb.ExecuteSqlRequest_NORMAL, } response, err := c.ExecuteStreamingSql(context.Background(), request) if err != nil { t.Fatal(err) } var rowIndex int64 var colCount int for { for rowIndexInPartial := int64(0); rowIndexInPartial < MaxRowsPerPartialResultSet; rowIndexInPartial++ { partial, err := response.Recv() if err != nil { t.Fatal(err) } if rowIndex == 0 { colCount = len(partial.Metadata.RowType.Fields) if colCount != selectColCount { t.Fatalf("Column count mismatch\nGot: %d\nWant: %d", colCount, selectColCount) } } for col := 0; col < colCount; col++ { pIndex := rowIndexInPartial*int64(colCount) + int64(col) val, err := strconv.ParseInt(partial.Values[pIndex].GetStringValue(), 10, 64) if err != nil { t.Fatalf("Error parsing integer at #%d: %v", pIndex, err) } if val != selectValues[rowIndex] { t.Fatalf("Value mismatch at index %d\nGot: %d\nWant: %d", rowIndex, val, selectValues[rowIndex]) } } rowIndex++ } if rowIndex == selectRowCount { break } } if rowIndex != selectRowCount { t.Fatalf("Row count mismatch\nGot: %d\nWant: %d", rowIndex, selectRowCount) } } func TestSpannerExecuteBatchDml(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } statements := make([]*spannerpb.ExecuteBatchDmlRequest_Statement, 3) for idx := 0; idx < len(statements); idx++ { statements[idx] = &spannerpb.ExecuteBatchDmlRequest_Statement{Sql: updateSQL} } executeBatchDmlRequest := &spannerpb.ExecuteBatchDmlRequest{ Session: session.Name, Statements: statements, Transaction: &spannerpb.TransactionSelector{ Selector: &spannerpb.TransactionSelector_Begin{ Begin: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadWrite_{ ReadWrite: &spannerpb.TransactionOptions_ReadWrite{}, }, }, }, }, Seqno: 1, } response, err := c.ExecuteBatchDml(context.Background(), executeBatchDmlRequest) if err != nil { t.Fatal(err) } var totalRowCount int64 for _, res := range response.ResultSets { var rowCount int64 = res.Stats.GetRowCountExact() if rowCount != updateRowCount { t.Fatalf("Update count mismatch\nGot: %d\nWant: %d", rowCount, updateRowCount) } totalRowCount += rowCount } if totalRowCount != updateRowCount*int64(len(statements)) { t.Fatalf("Total update count mismatch\nGot: %d\nWant: %d", totalRowCount, updateRowCount*int64(len(statements))) } } func TestBeginTransaction(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } beginRequest := &spannerpb.BeginTransactionRequest{ Session: session.Name, Options: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadWrite_{ ReadWrite: &spannerpb.TransactionOptions_ReadWrite{}, }, }, } tx, err := c.BeginTransaction(context.Background(), beginRequest) if err != nil { t.Fatal(err) } expectedName := fmt.Sprintf("%s/transactions/", session.Name) if strings.Index(string(tx.Id), expectedName) != 0 { t.Errorf("Transaction name mismatch\nGot: %s\nWant: Name should start with %s)", string(tx.Id), expectedName) } } func TestCommitTransaction(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } beginRequest := &spannerpb.BeginTransactionRequest{ Session: session.Name, Options: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadWrite_{ ReadWrite: &spannerpb.TransactionOptions_ReadWrite{}, }, }, } tx, err := c.BeginTransaction(context.Background(), beginRequest) if err != nil { t.Fatal(err) } commitRequest := &spannerpb.CommitRequest{ Session: session.Name, Transaction: &spannerpb.CommitRequest_TransactionId{ TransactionId: tx.Id, }, } resp, err := c.Commit(context.Background(), commitRequest) if err != nil { t.Fatal(err) } if resp.CommitTimestamp == nil { t.Fatalf("No commit timestamp returned") } } func TestRollbackTransaction(t *testing.T) { setup() c, err := apiv1.NewClient(context.Background(), clientOpt) if err != nil { t.Fatal(err) } var formattedDatabase = fmt.Sprintf("projects/%s/instances/%s/databases/%s", "[PROJECT]", "[INSTANCE]", "[DATABASE]") var createRequest = &spannerpb.CreateSessionRequest{ Database: formattedDatabase, } session, err := c.CreateSession(context.Background(), createRequest) if err != nil { t.Fatal(err) } beginRequest := &spannerpb.BeginTransactionRequest{ Session: session.Name, Options: &spannerpb.TransactionOptions{ Mode: &spannerpb.TransactionOptions_ReadWrite_{ ReadWrite: &spannerpb.TransactionOptions_ReadWrite{}, }, }, } tx, err := c.BeginTransaction(context.Background(), beginRequest) if err != nil { t.Fatal(err) } rollbackRequest := &spannerpb.RollbackRequest{ Session: session.Name, TransactionId: tx.Id, } err = c.Rollback(context.Background(), rollbackRequest) if err != nil { t.Fatal(err) } }<|fim▁end|>
t.Fatal(err) } resp, err := c.CreateSession(context.Background(), request) if err != nil {
<|file_name|>metadata.rs<|end_file_name|><|fim▁begin|>//! Types related to topic metadata for introspection by clients. //! Example: `KafkaClient::topics()`. use std::collections::hash_map; use std::fmt; use super::KafkaClient; use super::state::{ClientState, TopicPartitions, TopicPartitionIter, TopicPartition}; // public re-export pub use super::state::Broker; pub use super::state::TopicNames; /// A view on the loaded metadata about topics and their partitions. pub struct Topics<'a> { state: &'a ClientState, } impl<'a> Topics<'a> { /// Constructs a view of the currently loaded topic metadata from /// the specified kafka client. #[inline] pub fn new(client: &KafkaClient) -> Topics { Topics { state: &client.state } } /// Retrieves the number of the underlying topics. #[inline] pub fn len(&self) -> usize { self.state.num_topics() } /// Provides an iterator over the underlying topics. #[inline] pub fn iter(&'a self) -> TopicIter<'a> { TopicIter::new(self.state) } /// A conveniece method to return an iterator over the topics' /// names. #[inline] pub fn names(&'a self) -> TopicNames<'a> { self.state.topic_names() } /// A convenience method to determine whether the specified topic /// is known. #[inline] pub fn contains(&'a self, topic: &str) -> bool { self.state.contains_topic(topic) } /// Retrieves the partitions of a specified topic. #[inline] pub fn partitions(&'a self, topic: &str) -> Option<Partitions<'a>> { self.state.partitions_for(topic).map(|tp| { Partitions { state: self.state, tp: tp, } }) } } impl<'a> fmt::Debug for Topics<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "Topics {{ topics: [")); let mut ts = self.iter(); if let Some(t) = ts.next() { try!(write!(f, "{:?}", t)); } for t in ts { try!(write!(f, ", {:?}", t)); } write!(f, "] }}") } } impl<'a> IntoIterator for &'a Topics<'a> { type Item = Topic<'a>; type IntoIter = TopicIter<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a> IntoIterator for Topics<'a> { type Item = Topic<'a>; type IntoIter = TopicIter<'a>; fn into_iter(self) -> Self::IntoIter { TopicIter::new(self.state) } } /// An interator over topics. pub struct TopicIter<'a> { state: &'a ClientState, iter: hash_map::Iter<'a, String, TopicPartitions>, } impl<'a> TopicIter<'a> { fn new(state: &'a ClientState) -> TopicIter<'a> { TopicIter { state: state, iter: state.topic_partitions().iter(), } } } impl<'a> Iterator for TopicIter<'a> { type Item = Topic<'a>; #[inline] fn next(&mut self) -> Option<Self::Item> { self.iter.next().map(|(name, tps)| { Topic { state: self.state, name: &name[..], tp: tps, } }) } } /// A view on the loaded metadata for a particular topic. pub struct Topic<'a> { state: &'a ClientState, name: &'a str, tp: &'a TopicPartitions, } impl<'a> Topic<'a> { /// Retrieves the name of this topic. #[inline] pub fn name(&self) -> &str { self.name } /// Retrieves the list of all partitions for this topic. #[inline] pub fn partitions(&self) -> Partitions<'a> { Partitions { state: self.state, tp: self.tp, } } } impl<'a> fmt::Debug for Topic<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Topic {{ name: {}, partitions: {:?} }}", self.name, self.partitions()) } } /// Metadata relevant to partitions of a particular topic. pub struct Partitions<'a> { state: &'a ClientState, tp: &'a TopicPartitions, } impl<'a> Partitions<'a> { /// Retrieves the number of the topic's partitions. #[inline] pub fn len(&self) -> usize { self.tp.len() } /// Tests for `.len() > 0`. #[inline] pub fn is_empty(&self) -> bool { self.tp.is_empty() } /// Retrieves an iterator of the partitions of the underlying topic. #[inline] pub fn iter(&self) -> PartitionIter<'a> { PartitionIter::new(self.state, self.tp) } /// Finds a specified partition identified by its id. #[inline] pub fn partition(&self, partition_id: i32) -> Option<Partition<'a>> { self.tp.partition(partition_id).map(|p| { Partition::new(self.state, p, partition_id) }) } /// Convenience method to retrieve the identifiers of all /// currently "available" partitions. Such partitions are known /// to have a leader broker and can be sent messages to. #[inline] pub fn available_ids(&self) -> Vec<i32> { self.tp .iter() .filter_map(|(id, p)| p.broker(self.state).map(|_| id)) .collect() } } impl<'a> fmt::Debug for Partitions<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { try!(write!(f, "Partitions {{ [")); let mut ps = self.iter(); if let Some(p) = ps.next() { try!(write!(f, "{:?}", p)); } for p in ps { try!(write!(f, ", {:?}", p)); } write!(f, "] }}") } } impl<'a> IntoIterator for &'a Partitions<'a> { type Item = Partition<'a>; type IntoIter = PartitionIter<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() } } impl<'a> IntoIterator for Partitions<'a> { type Item = Partition<'a>; type IntoIter = PartitionIter<'a>; fn into_iter(self) -> Self::IntoIter { PartitionIter::new(self.state, self.tp) } } /// An interator over a topic's partitions. pub struct PartitionIter<'a> { state: &'a ClientState, iter: TopicPartitionIter<'a>, } <|fim▁hole|> iter: tp.iter(), } } } impl<'a> Iterator for PartitionIter<'a> { type Item = Partition<'a>; #[inline] fn next(&mut self) -> Option<Self::Item> { self.iter.next().map( |(id, p)| Partition::new(self.state, p, id), ) } } /// Metadata about a particular topic partition. /// /// A partition can be seen as either available or not by /// `kafka-rust`. "Available" partitions are partitions with an /// assigned leader broker and can be send messages to or fetched /// messages from. Non-available partitions are ignored by /// `kafka-rust`. Whether or not a partition is currently "available" /// can be determined by testing for `partition.leader().is_some()` or /// more directly through `partition.is_available()`. pub struct Partition<'a> { state: &'a ClientState, partition: &'a TopicPartition, id: i32, } impl<'a> Partition<'a> { fn new(state: &'a ClientState, partition: &'a TopicPartition, id: i32) -> Partition<'a> { Partition { state: state, partition: partition, id: id, } } /// Retrieves the identifier of this topic partition. #[inline] pub fn id(&self) -> i32 { self.id } /// Retrieves the current leader broker of this partition - if /// any. A partition with a leader is said to be "available". #[inline] pub fn leader(&self) -> Option<&'a Broker> { self.partition.broker(self.state) } /// Determines whether this partition is currently "available". /// See `Partition::leader()`. pub fn is_available(&self) -> bool { self.leader().is_some() } } impl<'a> fmt::Debug for Partition<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Partition {{ id: {}, leader: {:?} }}", self.id(), self.leader()) } }<|fim▁end|>
impl<'a> PartitionIter<'a> { fn new(state: &'a ClientState, tp: &'a TopicPartitions) -> Self { PartitionIter { state: state,
<|file_name|>Library.py<|end_file_name|><|fim▁begin|>''' Created on Apr 3, 2012 @author: Dan ''' from Common.Converter import * from Common.Module import * from Common.Symbol import DevicePart from Common.Device import Deviceset class Library(object): __slots__ = ("name", "modules", "symbols", "converter", "deviceparts") def __init__(self, node, name, converter=None): self.name = name if converter is None: converter = Converter()<|fim▁hole|> self.modules = [] self.deviceparts = [] devicesetsLst = [] symbolsHash = {} packages = node.find("packages").findall("package") if packages != None: for package in packages: self.modules.append(Module(package, converter)) devicesets = node.find("devicesets").findall("deviceset") if devicesets != None: for deviceset in devicesets: ds = Deviceset(deviceset, symConverter) devicesetsLst.append(ds) symbols = node.find("symbols").findall("symbol") if symbols != None and len(devicesetsLst) != 0: #strange if not? for symbol in symbols: sn = symbol.get("name") if sn in symbolsHash: print("The symbol with the same name %s already exists!" % sn) else: symbolsHash[sn] = symbol for deviceset in devicesetsLst: #strange if not? #just iterater over all posible device packages for device in deviceset.getDevices(): #we have to create a number of symbols to match diffrent pin configurations #the real name of device is <deviceset> name plus name of <device> #symlink is just a scheme representation of the set of devices or devicessts device.setFullName(deviceset.name) dp = DevicePart(device, symbolsHash, deviceset.getGates(), symConverter) self.deviceparts.append(dp) def writeLibrary(self, modFile=None, symFile=None, docFile=None): if modFile != None: self.writeModFile(modFile) if symFile != None: self.writeSymFile(symFile) if docFile != None: #not used at the moment self.writeDocFile(docFile) def writeModFile(self, modFile): modFile.write("PCBNEW-LibModule-V1 00/00/0000-00:00:00\n") modFile.write("$INDEX\n") for module in self.modules: modFile.write(module.package + "\n") modFile.write("$EndINDEX\n") for module in self.modules: module.write(modFile) modFile.write("$EndLIBRARY") modFile.close() def writeSymFile(self, symFile): symFile.write("EESchema-LIBRARY Version 0.0 00/00/0000-00:00:00\n") for devicepart in self.deviceparts: devicepart.write(symFile) symFile.write("# End Library") def writeDocFile(self, docFile): docFile.write("EESchema-DOCLIB Version 0.0 Date: 00/00/0000 00:00:00\n")<|fim▁end|>
symConverter = SchemConverter()
<|file_name|>test_short_term_loss_prestress_01.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import division '''Test for checking variation of initial prestress force along a post-tensioned member. Data and rough calculation are taken from Example 4.3 of the topic 4 of course "Prestressed Concrete Design (SAB 4323) by Baderul Hisham Ahmad ocw.utm.my Problem statement: Determine the initial prestress force distribution along the beam if the anchorage draw-in is 5 mm. Given the following: • Span = 20m, μ= 0.25 & K = 17 x 10-4 per metre • fpi = 1239 N/ mm2 ; A ps = 2850 mm2 • e at both ends = 0 • e at mid-span = 558 mm • Es = 195 kN/mm2 ''' __author__= "Ana Ortega (AO_O)" __copyright__= "Copyright 2017, AO_O" __license__= "GPL" __version__= "3.0" __email__= "[email protected]" import numpy as np import math from materials.prestressing import prestressed_concrete as presconc from model.geometry import geom_utils #Geometry lBeam=20 #beam span [m] #Parabola eEnds=0 #eccentricity of cables at both ends of the beam eMidspan=-0.558 #eccentricity of cables at midspan [m] angl_Parab_XZ=math.pi/4 #angle between the vertical plane that contains the #parabola and the plane XZ #Material Ep=195e9 #elastic modulus of prestressing steel [Pa] #Prestressing process mu=0.25 #coefficient of friction between the cables and their sheating k=0.0017 #wobble coefficient per meter length of cable [1/m] sigmap0max=1239e6 #Initial stress of cable [Pa] Aps=2850e-6 #Area of cable [m2]<|fim▁hole|>n_points_rough=5 #number of points provided to the interpolation algorithm n_points_fine=101 #number of points interpolated #Anchorage slip deltaL=5e-3 #anchorage draw-in (provided by manufacturer) [m] #Rough results from direct calculation (formula): lp_anch_lhe=419.3 #loss of prestress force at left-hand end anchorage [kN] fl_frc=15.82 #loss of prestress due to friction [kN/m] P_le=3111.9 #prestress force at left end [kN] P_ms=3270.1 #prestress force at midspan [kN] P_re=3214.8 #prestress force at right end [kN] # XC model #Tendon [m] definition, layout and friction losses a,b,c=geom_utils.fit_parabola(x=np.array([0,lBeam/2.0,lBeam]), y=np.array([eEnds,eMidspan,eEnds])) x_parab_rough,y_parab_rough,z_parab_rough=geom_utils.eq_points_parabola(0,lBeam,n_points_rough,a,b,c,angl_Parab_XZ) tendon=presconc.PrestressTendon([]) tendon.roughCoordMtr=np.array([x_parab_rough,y_parab_rough,z_parab_rough]) #Interpolated 3D spline tendon.pntsInterpTendon(n_points_fine,smoothness=1,kgrade=3) # Losses of prestressing due to friction lssFrict=tendon.getLossFriction(coefFric=mu,k=k,sigmaP0_extr1=sigmap0max,sigmaP0_extr2=0.0) # Losses of prestressing due to anchorage slip (loss due to friction must be # previously calculated lssAnch=tendon.getLossAnchor(Ep=Ep,anc_slip_extr1=deltaL,anc_slip_extr2=0.0) Laffected=tendon.projXYcoordZeroAnchLoss[0] # effective length of tendon #affected by the anchorage slip in extremity 1 [m] # Results lssAnch_e1=lssAnch[0] #prestress loss due to anchorage draw-in extremity 1 lssAnch_md=lssAnch[int(len(lssAnch)/2)] #prestress loss due to anchorage draw-in midspan lssAnch_e2=lssAnch[-1] #prestress loss due to anchorage draw-in extremity 2 lssFrict_e1=lssFrict[0] #prestress loss due to friction extremity 1 lssFrict_md=lssFrict[int(len(lssFrict)/2)] #prestress loss due to friction midspan lssFrict_e2=lssFrict[-1] #prestress loss due to friction extremity 2 P_extr1=(sigmap0max-lssAnch_e1-lssFrict_e1)*Aps*1e-3 P_midspan=(sigmap0max-lssAnch_md-lssFrict_md)*Aps*1e-3 P_extr2=(sigmap0max-lssAnch_e2-lssFrict_e2)*Aps*1e-3 ratio1=abs(P_extr1-P_le)/P_le ratio2=abs(P_midspan-P_ms)/P_ms ratio3=abs(P_extr2-P_re)/P_re import os from miscUtils import LogMessages as lmsg fname= os.path.basename(__file__) if (ratio1<5.e-3 and ratio2<5.e-4 and ratio3<5.e-3): print "test ",fname,": ok." else: lmsg.error(fname+' ERROR.')<|fim▁end|>
# Interpolation
<|file_name|>karma.conf.js<|end_file_name|><|fim▁begin|>// Karma configuration // Generated on Wed Jul 15 2015 09:44:02 GMT+0200 (Romance Daylight Time) module.exports = function(config) { config.set({ // base path that will be used to resolve all patterns (eg. files, exclude) basePath: '', // frameworks to use // available frameworks: https://npmjs.org/browse/keyword/karma-adapter frameworks: ['jasmine'], // list of files / patterns to load in the browser files: [ 'node_modules/angular2/node_modules/zone.js/dist/zone-microtask.js', 'node_modules/angular2/node_modules/zone.js/dist/long-stack-trace-zone.js', 'node_modules/angular2/node_modules/zone.js/dist/jasmine-patch.js', 'node_modules/angular2/node_modules/traceur/bin/traceur-runtime.js', 'node_modules/es6-module-loader/dist/es6-module-loader-sans-promises.src.js', 'node_modules/systemjs/dist/system.src.js', 'node_modules/reflect-metadata/Reflect.js', { pattern: 'test/**/*.js', included: false, watched: true }, { pattern: 'node_modules/angular2/**/*.js', included: false, watched: false }, 'test-main.js', ], // list of files to exclude exclude: [ ], // preprocess matching files before serving them to the browser // available preprocessors: https://npmjs.org/browse/keyword/karma-preprocessor preprocessors: { }, // test results reporter to use // possible values: 'dots', 'progress' // available reporters: https://npmjs.org/browse/keyword/karma-reporter reporters: ['progress'], // web server port port: 9876, // enable / disable colors in the output (reporters and logs) colors: true, // level of logging // possible values: config.LOG_DISABLE || config.LOG_ERROR || config.LOG_WARN || config.LOG_INFO || config.LOG_DEBUG logLevel: config.LOG_INFO, // enable / disable watching file and executing tests whenever any file changes autoWatch: true, <|fim▁hole|> // Continuous Integration mode // if true, Karma captures browsers, runs the tests and exits singleRun: false }) }<|fim▁end|>
// start these browsers // available browser launchers: https://npmjs.org/browse/keyword/karma-launcher browsers: ['PhantomJS'],
<|file_name|>prometheus.py<|end_file_name|><|fim▁begin|>from kvmagent import kvmagent from zstacklib.utils import jsonobject from zstacklib.utils import http from zstacklib.utils import log from zstacklib.utils.bash import * from zstacklib.utils import linux<|fim▁hole|>import os.path import re import time import traceback from prometheus_client.core import GaugeMetricFamily,REGISTRY from prometheus_client import start_http_server logger = log.get_logger(__name__) class PrometheusPlugin(kvmagent.KvmAgent): COLLECTD_PATH = "/prometheus/collectdexporter/start" @kvmagent.replyerror @in_bash def start_collectd_exporter(self, req): cmd = jsonobject.loads(req[http.REQUEST_BODY]) rsp = kvmagent.AgentResponse() eths = bash_o("ls /sys/class/net").split() interfaces = [] for eth in eths: eth = eth.strip(' \t\n\r') if eth == 'lo': continue elif eth.startswith('vnic'): continue elif eth.startswith('outer'): continue elif eth.startswith('br_'): continue elif not eth: continue else: interfaces.append(eth) conf_path = os.path.join(os.path.dirname(cmd.binaryPath), 'collectd.conf') conf = '''Interval {{INTERVAL}} FQDNLookup false LoadPlugin syslog LoadPlugin aggregation LoadPlugin cpu LoadPlugin disk LoadPlugin interface LoadPlugin memory LoadPlugin network LoadPlugin virt <Plugin aggregation> <Aggregation> #Host "unspecified" Plugin "cpu" #PluginInstance "unspecified" Type "cpu" #TypeInstance "unspecified" GroupBy "Host" GroupBy "TypeInstance" CalculateNum false CalculateSum false CalculateAverage true CalculateMinimum false CalculateMaximum false CalculateStddev false </Aggregation> </Plugin> <Plugin cpu> ReportByCpu true ReportByState true ValuesPercentage true </Plugin> <Plugin disk> Disk "/^sd/" Disk "/^hd/" Disk "/^vd/" IgnoreSelected false </Plugin> <Plugin "interface"> {% for i in INTERFACES -%} Interface "{{i}}" {% endfor -%} IgnoreSelected false </Plugin> <Plugin memory> ValuesAbsolute true ValuesPercentage false </Plugin> <Plugin virt> Connection "qemu:///system" RefreshInterval {{INTERVAL}} HostnameFormat name PluginInstanceFormat name </Plugin> <Plugin network> Server "localhost" "25826" </Plugin> ''' tmpt = Template(conf) conf = tmpt.render({ 'INTERVAL': cmd.interval, 'INTERFACES': interfaces, }) need_restart_collectd = False if os.path.exists(conf_path): with open(conf_path, 'r') as fd: old_conf = fd.read() if old_conf != conf: with open(conf_path, 'w') as fd: fd.write(conf) need_restart_collectd = True else: with open(conf_path, 'w') as fd: fd.write(conf) need_restart_collectd = True pid = linux.find_process_by_cmdline(['collectd', conf_path]) if not pid: bash_errorout('collectd -C %s' % conf_path) else: if need_restart_collectd: bash_errorout('kill -9 %s' % pid) bash_errorout('collectd -C %s' % conf_path) pid = linux.find_process_by_cmdline([cmd.binaryPath]) if not pid: EXPORTER_PATH = cmd.binaryPath LOG_FILE = os.path.join(os.path.dirname(EXPORTER_PATH), cmd.binaryPath + '.log') ARGUMENTS = cmd.startupArguments if not ARGUMENTS: ARGUMENTS = "" bash_errorout('chmod +x {{EXPORTER_PATH}}') bash_errorout("nohup {{EXPORTER_PATH}} {{ARGUMENTS}} >{{LOG_FILE}} 2>&1 < /dev/null &\ndisown") return jsonobject.dumps(rsp) def install_colletor(self): class Collector(object): def collect(self): try: ret = [] for c in kvmagent.metric_collectors: ret.extend(c()) return ret except Exception as e: content = traceback.format_exc() err = '%s\n%s\n' % (str(e), content) logger.warn(err) return [] REGISTRY.register(Collector()) def start(self): http_server = kvmagent.get_http_server() http_server.register_async_uri(self.COLLECTD_PATH, self.start_collectd_exporter) self.install_colletor() start_http_server(7069) def stop(self): pass<|fim▁end|>
from zstacklib.utils import thread from jinja2 import Template
<|file_name|>gc.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. <|fim▁hole|>use std::collections::{HashMap, HashSet}; use autocxx_parser::IncludeCppConfig; use crate::{conversion::api::Api, types::QualifiedName}; use super::fun::FnPhase; /// This is essentially mark-and-sweep garbage collection of the /// [Api]s that we've discovered. Why do we do this, you might wonder? /// It seems a bit strange given that we pass an explicit allowlist /// to bindgen. /// There are two circumstances under which we want to discard /// some of the APIs we encounter parsing the bindgen. /// 1) We simplify some struct to be non-POD. In this case, we'll /// discard all the fields within it. Those fields can be, and /// in fact often _are_, stuff which we have trouble converting /// e.g. std::string or std::string::value_type or /// my_derived_thing<std::basic_string::value_type> or some /// other permutation. In such cases, we want to discard those /// field types with prejudice. /// 2) block! may be used to ban certain APIs. This often eliminates /// some methods from a given struct/class. In which case, we /// don't care about the other parameter types passed into those /// APIs either. pub(crate) fn filter_apis_by_following_edges_from_allowlist( mut apis: Vec<Api<FnPhase>>, config: &IncludeCppConfig, ) -> Vec<Api<FnPhase>> { let mut todos: Vec<QualifiedName> = apis .iter() .filter(|api| { let tnforal = api.typename_for_allowlist(); config.is_on_allowlist(&tnforal.to_cpp_name()) }) .map(Api::name) .cloned() .collect(); let mut by_typename: HashMap<QualifiedName, Vec<Api<FnPhase>>> = HashMap::new(); for api in apis.drain(..) { let tn = api.name().clone(); by_typename.entry(tn).or_default().push(api); } let mut done = HashSet::new(); let mut output = Vec::new(); while !todos.is_empty() { let todo = todos.remove(0); if done.contains(&todo) { continue; } if let Some(mut these_apis) = by_typename.remove(&todo) { todos.extend(these_apis.iter().flat_map(|api| api.deps())); output.append(&mut these_apis); } // otherwise, probably an intrinsic e.g. uint32_t. done.insert(todo); } output }<|fim▁end|>
<|file_name|>storage.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::codegen::Bindings::StorageBinding; use crate::dom::bindings::codegen::Bindings::StorageBinding::StorageMethods; use crate::dom::bindings::error::{Error, ErrorResult}; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::refcounted::Trusted; use crate::dom::bindings::reflector::{reflect_dom_object, DomObject, Reflector}; use crate::dom::bindings::root::DomRoot; use crate::dom::bindings::str::DOMString; use crate::dom::event::{Event, EventBubbles, EventCancelable}; use crate::dom::storageevent::StorageEvent; use crate::dom::window::Window; use crate::task_source::TaskSource; use dom_struct::dom_struct; use ipc_channel::ipc::IpcSender; use net_traits::storage_thread::{StorageThreadMsg, StorageType}; use net_traits::IpcSend; use profile_traits::ipc; use script_traits::ScriptMsg; use servo_url::ServoUrl; #[dom_struct] pub struct Storage { reflector_: Reflector, storage_type: StorageType, } impl Storage { fn new_inherited(storage_type: StorageType) -> Storage { Storage { reflector_: Reflector::new(), storage_type: storage_type, } } pub fn new(global: &Window, storage_type: StorageType) -> DomRoot<Storage> { reflect_dom_object( Box::new(Storage::new_inherited(storage_type)), global, StorageBinding::Wrap, ) } fn get_url(&self) -> ServoUrl { self.global().get_url()<|fim▁hole|> } } impl StorageMethods for Storage { // https://html.spec.whatwg.org/multipage/#dom-storage-length fn Length(&self) -> u32 { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.get_storage_thread() .send(StorageThreadMsg::Length( sender, self.get_url(), self.storage_type, )) .unwrap(); receiver.recv().unwrap() as u32 } // https://html.spec.whatwg.org/multipage/#dom-storage-key fn Key(&self, index: u32) -> Option<DOMString> { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.get_storage_thread() .send(StorageThreadMsg::Key( sender, self.get_url(), self.storage_type, index, )) .unwrap(); receiver.recv().unwrap().map(DOMString::from) } // https://html.spec.whatwg.org/multipage/#dom-storage-getitem fn GetItem(&self, name: DOMString) -> Option<DOMString> { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); let name = String::from(name); let msg = StorageThreadMsg::GetItem(sender, self.get_url(), self.storage_type, name); self.get_storage_thread().send(msg).unwrap(); receiver.recv().unwrap().map(DOMString::from) } // https://html.spec.whatwg.org/multipage/#dom-storage-setitem fn SetItem(&self, name: DOMString, value: DOMString) -> ErrorResult { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); let name = String::from(name); let value = String::from(value); let msg = StorageThreadMsg::SetItem( sender, self.get_url(), self.storage_type, name.clone(), value.clone(), ); self.get_storage_thread().send(msg).unwrap(); match receiver.recv().unwrap() { Err(_) => Err(Error::QuotaExceeded), Ok((changed, old_value)) => { if changed { self.broadcast_change_notification(Some(name), old_value, Some(value)); } Ok(()) }, } } // https://html.spec.whatwg.org/multipage/#dom-storage-removeitem fn RemoveItem(&self, name: DOMString) { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); let name = String::from(name); let msg = StorageThreadMsg::RemoveItem(sender, self.get_url(), self.storage_type, name.clone()); self.get_storage_thread().send(msg).unwrap(); if let Some(old_value) = receiver.recv().unwrap() { self.broadcast_change_notification(Some(name), Some(old_value), None); } } // https://html.spec.whatwg.org/multipage/#dom-storage-clear fn Clear(&self) { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.get_storage_thread() .send(StorageThreadMsg::Clear( sender, self.get_url(), self.storage_type, )) .unwrap(); if receiver.recv().unwrap() { self.broadcast_change_notification(None, None, None); } } // https://html.spec.whatwg.org/multipage/#the-storage-interface:supported-property-names fn SupportedPropertyNames(&self) -> Vec<DOMString> { let (sender, receiver) = ipc::channel(self.global().time_profiler_chan().clone()).unwrap(); self.get_storage_thread() .send(StorageThreadMsg::Keys( sender, self.get_url(), self.storage_type, )) .unwrap(); receiver .recv() .unwrap() .into_iter() .map(DOMString::from) .collect() } // check-tidy: no specs after this line fn NamedGetter(&self, name: DOMString) -> Option<DOMString> { self.GetItem(name) } fn NamedSetter(&self, name: DOMString, value: DOMString) -> ErrorResult { self.SetItem(name, value) } fn NamedDeleter(&self, name: DOMString) { self.RemoveItem(name); } } impl Storage { /// <https://html.spec.whatwg.org/multipage/#send-a-storage-notification> fn broadcast_change_notification( &self, key: Option<String>, old_value: Option<String>, new_value: Option<String>, ) { let storage = self.storage_type; let url = self.get_url(); let msg = ScriptMsg::BroadcastStorageEvent(storage, url, key, old_value, new_value); self.global() .script_to_constellation_chan() .send(msg) .unwrap(); } /// <https://html.spec.whatwg.org/multipage/#send-a-storage-notification> pub fn queue_storage_event( &self, url: ServoUrl, key: Option<String>, old_value: Option<String>, new_value: Option<String>, ) { let global = self.global(); let this = Trusted::new(self); global .as_window() .task_manager() .dom_manipulation_task_source() .queue( task!(send_storage_notification: move || { let this = this.root(); let global = this.global(); let event = StorageEvent::new( global.as_window(), atom!("storage"), EventBubbles::DoesNotBubble, EventCancelable::NotCancelable, key.map(DOMString::from), old_value.map(DOMString::from), new_value.map(DOMString::from), DOMString::from(url.into_string()), Some(&this), ); event.upcast::<Event>().fire(global.upcast()); }), global.upcast(), ) .unwrap(); } }<|fim▁end|>
} fn get_storage_thread(&self) -> IpcSender<StorageThreadMsg> { self.global().resource_threads().sender()
<|file_name|>InternalLoader.js<|end_file_name|><|fim▁begin|>// Copyright 2014 Traceur Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. import {assert} from '../util/assert.js'; import {LoaderCompiler} from '../runtime/LoaderCompiler.js'; import {ExportsList} from '../codegeneration/module/ModuleSymbol.js'; import {Map} from './polyfills/Map.js'; import {isAbsolute, resolveUrl} from '../util/url.js'; import {Options} from '../Options.js'; var NOT_STARTED = 0; var LOADING = 1; var LOADED = 2; var PARSED = 3; var TRANSFORMING = 4 var TRANSFORMED = 5; var COMPLETE = 6; var ERROR = 7; function mapToValues(map) { // We are having issues with cross frame/context symbols so we cannot use // iterators here. // https://github.com/google/traceur-compiler/issues/1152 var array = []; map.forEach((v) => { array.push(v); }); return array; } class LoaderError extends Error { constructor(msg, tree) { super(); this.message = msg; this.tree = tree; this.name = 'LoaderError'; } } /** * Base class representing a piece of code that is to be loaded or evaluated. * Similar to js-loader Load object */ class CodeUnit { /** * @param {LoaderCompiler} loaderCompiler Callbacks for parsing/transforming. * @param {string} normalizedName The normalized name of this dependency. * @param {string} type Either 'script' or 'module'. This determinse how to * parse the code. * @param {number} state */ constructor(loaderCompiler, normalizedName, type, state, name, referrerName, address) { this.promise = new Promise((res, rej) => { this.loaderCompiler = loaderCompiler; this.normalizedName = normalizedName; this.type = type; this.name_ = name; this.referrerName_ = referrerName; this.address = address; this.state_ = state || NOT_STARTED; this.error = null; this.result = null; this.metadata_ = {}; this.dependencies = []; this.resolve = res; this.reject = rej; }); } get state() { return this.state_; } set state(value) { if (value < this.state_) { throw new Error('Invalid state change'); } this.state_ = value; } /** * @return opaque value set and used by loaderCompiler */ get metadata() { return this.metadata_; } set metadata(value) { assert(value); this.metadata_ = value; } nameTrace() { var trace = this.specifiedAs(); if (isAbsolute(this.name_)) { return trace + 'An absolute name.\n'; } if (this.referrerName_) { return trace + this.importedBy() + this.normalizesTo(); } return trace + this.normalizesTo(); } specifiedAs() { return `Specified as ${this.name_}.\n`; } importedBy() { return `Imported by ${this.referrerName_}.\n`; } normalizesTo() { return 'Normalizes to ' + this.normalizedName + '\n'; } } /** * CodeUnit coming from {@code Loader.set}. */ class PreCompiledCodeUnit extends CodeUnit { constructor(loaderCompiler, normalizedName, name, referrerName, address, module) { super(loaderCompiler, normalizedName, 'module', COMPLETE, name, referrerName, address); this.result = module;<|fim▁hole|> /** * CodeUnit coming from {@code Loader.register}. */ class BundledCodeUnit extends CodeUnit { constructor(loaderCompiler, normalizedName, name, referrerName, address, deps, execute) { super(loaderCompiler, normalizedName, 'module', TRANSFORMED, name, referrerName, address); this.deps = deps; this.execute = execute; } getModuleSpecifiers() { return this.deps; } evaluate() { var normalizedNames = this.deps.map((name) => this.loader_.normalize(name)); var module = this.execute.apply(Reflect.global, normalizedNames); System.set(this.normalizedName, module); return module; } } /** * CodeUnit for sharing methods that just call back to loaderCompiler */ class HookedCodeUnit extends CodeUnit { getModuleSpecifiers() { return this.loaderCompiler.getModuleSpecifiers(this); } evaluate() { return this.loaderCompiler.evaluateCodeUnit(this); } } /** * CodeUnit used for {@code Loader.load}. */ class LoadCodeUnit extends HookedCodeUnit { /** * @param {InternalLoader} loader * @param {string} normalizedName */ constructor(loaderCompiler, normalizedName, name, referrerName, address) { super(loaderCompiler, normalizedName, 'module', NOT_STARTED, name, referrerName, address); } } /** * CodeUnit used for {@code Loader.eval} and {@code Loader.module}. */ class EvalCodeUnit extends HookedCodeUnit { /** * @param {LoaderCompiler} loaderCompiler * @param {string} code * @param {string} caller script or module name */ constructor(loaderCompiler, code, type = 'script', normalizedName, referrerName, address) { super(loaderCompiler, normalizedName, type, LOADED, null, referrerName, address); this.source = code; } } var uniqueNameCount = 0; /** * The internal implementation of the code loader. */ export class InternalLoader { /** * @param {loaderCompiler} loaderCompiler */ constructor(loader, loaderCompiler) { assert(loaderCompiler); this.loader_ = loader; this.loaderCompiler = loaderCompiler; this.cache = new Map(); this.urlToKey = Object.create(null); this.sync_ = false; this.sourceMapsByURL_ = Object.create(null); this.sourceMapsByOutputName_ = Object.create(null); } defaultMetadata_(metadata = {}) { let incoming = metadata.traceurOptions; if (incoming && !(incoming instanceof Options)) { var unknown = Options.listUnknownOptions(incoming); if (unknown.length) { console.warn('Unknown metadata.traceurOptions ignored: ' + unknown.join(',')); } } metadata.traceurOptions = incoming || new Options(); return metadata; } defaultModuleMetadata_(metadata = {}) { var metadata = this.defaultMetadata_(metadata); metadata.traceurOptions.script = false; return metadata; } getSourceMap(url) { // The caller may want the sourcemap from input to output or vice versa. return this.sourceMapsByURL_[url] || this.sourceMapsByOutputName_[url]; } load(name, referrerName = this.loader_.baseURL, address, metadata = {}) { metadata = this.defaultMetadata_(metadata); var codeUnit = this.getOrCreateCodeUnit_(name, referrerName, address, metadata); this.load_(codeUnit); return codeUnit.promise.then(() => codeUnit); } load_(codeUnit) { if (codeUnit.state === ERROR) { return codeUnit; } if (codeUnit.state === TRANSFORMED) { this.handleCodeUnitLoaded(codeUnit) } else { if (codeUnit.state !== NOT_STARTED) return codeUnit; codeUnit.state = LOADING; codeUnit.address = this.loader_.locate(codeUnit); this.loader_.fetch(codeUnit).then((text) => { codeUnit.source = text; return codeUnit; }). then((load) => { return this.loader_.translate(load) }). then((source) => { codeUnit.source = source; codeUnit.state = LOADED; this.handleCodeUnitLoaded(codeUnit); return codeUnit; }). catch((err) => { try { codeUnit.state = ERROR; codeUnit.error = err; this.handleCodeUnitLoadError(codeUnit); } catch (ex) { console.error('Internal Error ' + (ex.stack || ex)); } }); } return codeUnit; } module(code, referrerName, address, metadata) { var codeUnit = new EvalCodeUnit(this.loaderCompiler, code, 'module', null, referrerName, address); codeUnit.metadata = this.defaultMetadata_(metadata); this.cache.set({}, codeUnit); this.handleCodeUnitLoaded(codeUnit); return codeUnit.promise; } define(normalizedName, code, address, metadata) { var codeUnit = new EvalCodeUnit(this.loaderCompiler, code, 'module', normalizedName, null, address); var key = this.getKey(normalizedName, 'module'); codeUnit.metadata = this.defaultMetadata_(metadata); this.cache.set(key, codeUnit); this.handleCodeUnitLoaded(codeUnit); return codeUnit.promise; } /** * @param {string} code, source to be compiled as 'Script' * @param {string=} name, ModuleSpecifier-like name, not normalized. * @param {string=} referrerName, normalized name of container * @param {string=} address, URL */ script(code, name, referrerName, address, metadata) { var normalizedName = System.normalize(name || '', referrerName, address); var codeUnit = new EvalCodeUnit(this.loaderCompiler, code, 'script', normalizedName, referrerName, address); var key = {}; if (name) key = this.getKey(normalizedName, 'script'); codeUnit.metadata = this.defaultMetadata_(metadata); this.cache.set(key, codeUnit); this.handleCodeUnitLoaded(codeUnit); return codeUnit.promise; } getKey(url, type) { var combined = type + ':' + url; if (combined in this.urlToKey) { return this.urlToKey[combined]; } return this.urlToKey[combined] = {}; } getCodeUnit_(normalizedName, type) { var key = this.getKey(normalizedName, type); var codeUnit = this.cache.get(key); return {key, codeUnit}; } getOrCreateCodeUnit_(name, referrerName, address, metadata) { var normalizedName = System.normalize(name, referrerName, address); // TODO(jjb): embed type in name per es-discuss Yehuda Katz, // eg import 'name,script'; var type = 'module'; if (metadata && metadata.traceurOptions && metadata.traceurOptions.script) type = 'script'; var {key, codeUnit} = this.getCodeUnit_(normalizedName, type); if (!codeUnit) { // All new code units need metadata set. assert(metadata && metadata.traceurOptions); var module = this.loader_.get(normalizedName); if (module) { codeUnit = new PreCompiledCodeUnit(this.loaderCompiler, normalizedName, name, referrerName, address, module); codeUnit.type = 'module'; } else { var bundledModule = this.loader_.bundledModule(name); if (bundledModule) { codeUnit = new BundledCodeUnit(this.loaderCompiler, normalizedName, name, referrerName, address, bundledModule.deps, bundledModule.execute); } else { codeUnit = new LoadCodeUnit(this.loaderCompiler, normalizedName, name, referrerName, address); codeUnit.type = type; } } // We copy the incoming metadata to pass values from the API and to // inherit value from the API call into modules imported by the root. // But we don't want to inherit tree etc. // TODO(jjb): move this into the CodeUnit constructors. codeUnit.metadata = { traceurOptions: metadata.traceurOptions, outputName: metadata.outputName, }; this.cache.set(key, codeUnit); } return codeUnit; } areAll(state) { return mapToValues(this.cache).every((codeUnit) => codeUnit.state >= state); } getCodeUnitForModuleSpecifier(name, referrerName) { var normalizedName = this.loader_.normalize(name, referrerName); return this.getCodeUnit_(normalizedName, 'module').codeUnit; } getExportsListForModuleSpecifier(name, referrer) { var codeUnit = this.getCodeUnitForModuleSpecifier(name, referrer); var exportsList = codeUnit.metadata.moduleSymbol; if (!exportsList) { if (codeUnit.result) { exportsList = new ExportsList(codeUnit.normalizedName); exportsList.addExportsFromModule(codeUnit.result); } else { throw new Error( `InternalError: ${name} is not a module, required by ${referrer}`); } } return exportsList; } /** * This is called when a codeUnit is loaded. * @param {CodeUnit} codeUnit */ handleCodeUnitLoaded(codeUnit) { var referrerName = codeUnit.normalizedName; try { var moduleSpecifiers = codeUnit.getModuleSpecifiers(); if (!moduleSpecifiers) { this.abortAll(`No module specifiers in ${referrerName}`); return; } codeUnit.dependencies = moduleSpecifiers.sort().map((name) => { return this.getOrCreateCodeUnit_(name, referrerName, null, this.defaultModuleMetadata_(codeUnit.metadata)); }); } catch (error) { this.rejectOneAndAll(codeUnit, error); return; } codeUnit.dependencies.forEach((dependency) => { this.load_(dependency); }); if (this.areAll(PARSED)) { try { // Currently analyze is only needed for module dependencies. if (codeUnit.type === 'module') this.analyze(); this.transform(); this.evaluate(); } catch (error) { this.rejectOneAndAll(codeUnit, error); } } } rejectOneAndAll(codeUnit, error) { codeUnit.state.ERROR; codeUnit.error = error; codeUnit.reject(error); // TODO(jjb): reject the other codeUnits with a distinct error. this.abortAll(error); } /** * This is called when a code unit failed to load. * @param {CodeUnit} codeUnit */ handleCodeUnitLoadError(codeUnit) { var message = codeUnit.error ? String(codeUnit.error) + '\n' : `Failed to load '${codeUnit.address}'.\n`; message += codeUnit.nameTrace() + this.loader_.nameTrace(codeUnit); this.rejectOneAndAll(codeUnit, new Error(message)); } /** * Aborts all loading code units. */ abortAll(errorMessage) { // Notify all codeUnit listeners (else tests hang til timeout). this.cache.forEach((codeUnit) => { if (codeUnit.state !== ERROR) codeUnit.reject(errorMessage); }); } analyze() { this.loaderCompiler.analyzeDependencies(mapToValues(this.cache), this); } transform() { this.transformDependencies_(mapToValues(this.cache)); } transformDependencies_(dependencies, dependentName) { for (var i = 0; i < dependencies.length; i++) { var codeUnit = dependencies[i]; if (codeUnit.state >= TRANSFORMED) { continue; } if (codeUnit.state === TRANSFORMING) { var cir = codeUnit.normalizedName; var cle = dependentName; this.rejectOneAndAll(codeUnit, new Error( `Unsupported circular dependency between ${cir} and ${cle}`)); return; } codeUnit.state = TRANSFORMING; try { this.transformCodeUnit_(codeUnit); } catch(error) { this.rejectOneAndAll(codeUnit, error); return; } } } transformCodeUnit_(codeUnit) { this.transformDependencies_(codeUnit.dependencies, codeUnit.normalizedName); if (codeUnit.state === ERROR) return; this.loaderCompiler.transform(codeUnit); codeUnit.state = TRANSFORMED; this.loaderCompiler.write(codeUnit); var info = codeUnit.metadata.compiler.sourceMapInfo; if (info) { this.sourceMapsByURL_[info.url] = info.map; this.sourceMapsByOutputName_[info.outputName] = info.map; } this.loader_.instantiate(codeUnit); } orderDependencies() { // Order the dependencies. var visited = new Map(); var ordered = []; function orderCodeUnits(codeUnit) { // Cyclic dependency. if (visited.has(codeUnit)) { return; } visited.set(codeUnit, true); codeUnit.dependencies.forEach(orderCodeUnits); ordered.push(codeUnit); } this.cache.forEach(orderCodeUnits); return ordered; } evaluate() { var dependencies = this.orderDependencies(); for (var i = 0; i < dependencies.length; i++) { var codeUnit = dependencies[i]; if (codeUnit.state >= COMPLETE) { continue; } var result; try { result = codeUnit.evaluate(); } catch (ex) { this.rejectOneAndAll(codeUnit, ex); return; } codeUnit.result = result; codeUnit.source = null; } for (var i = 0; i < dependencies.length; i++) { var codeUnit = dependencies[i]; if (codeUnit.state >= COMPLETE) { continue; } codeUnit.state = COMPLETE; codeUnit.resolve(codeUnit.result); } } } export var internals = { CodeUnit, EvalCodeUnit, LoadCodeUnit, LoaderCompiler };<|fim▁end|>
this.resolve(this.result); } }
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>#![allow(unknown_lints)] use crate::core::{TargetKind, Workspace}; use crate::ops::CompileOptions; use anyhow::Error; use std::fmt; use std::path::PathBuf; use std::process::{ExitStatus, Output}; use std::str; pub type CargoResult<T> = anyhow::Result<T>; // TODO: should delete this trait and just use `with_context` instead pub trait CargoResultExt<T, E> { fn chain_err<F, D>(self, f: F) -> CargoResult<T> where F: FnOnce() -> D, D: fmt::Display + Send + Sync + 'static; } impl<T, E> CargoResultExt<T, E> for Result<T, E> where E: Into<Error>, { fn chain_err<F, D>(self, f: F) -> CargoResult<T> where F: FnOnce() -> D, D: fmt::Display + Send + Sync + 'static, { self.map_err(|e| e.into().context(f())) } } #[derive(Debug)] pub struct HttpNot200 { pub code: u32, pub url: String, } impl fmt::Display for HttpNot200 { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "failed to get 200 response from `{}`, got {}", self.url, self.code ) } } impl std::error::Error for HttpNot200 {} // ============================================================================= // Verbose error /// An error wrapper for errors that should only be displayed with `--verbose`. /// /// This should only be used in rare cases. When emitting this error, you /// should have a normal error higher up the error-cause chain (like "could /// not compile `foo`"), so at least *something* gets printed without /// `--verbose`. pub struct VerboseError { inner: Error, } impl VerboseError { pub fn new(inner: Error) -> VerboseError { VerboseError { inner } } } impl std::error::Error for VerboseError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.inner.source() } } impl fmt::Debug for VerboseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } impl fmt::Display for VerboseError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } // ============================================================================= // Internal error /// An unexpected, internal error. /// /// This should only be used for unexpected errors. It prints a message asking /// the user to file a bug report. pub struct InternalError { inner: Error, } impl InternalError { pub fn new(inner: Error) -> InternalError { InternalError { inner } } } impl std::error::Error for InternalError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.inner.source() } } impl fmt::Debug for InternalError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } impl fmt::Display for InternalError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } } // ============================================================================= // Manifest error /// Error wrapper related to a particular manifest and providing it's path. /// /// This error adds no displayable info of it's own. pub struct ManifestError { cause: Error, manifest: PathBuf, } impl ManifestError { pub fn new<E: Into<Error>>(cause: E, manifest: PathBuf) -> Self { Self { cause: cause.into(), manifest, } } pub fn manifest_path(&self) -> &PathBuf { &self.manifest } /// Returns an iterator over the `ManifestError` chain of causes. /// /// So if this error was not caused by another `ManifestError` this will be empty. pub fn manifest_causes(&self) -> ManifestCauses<'_> { ManifestCauses { current: self } } } impl std::error::Error for ManifestError { fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { self.cause.source() } } impl fmt::Debug for ManifestError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.cause.fmt(f) } } impl fmt::Display for ManifestError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.cause.fmt(f) } } /// An iterator over the `ManifestError` chain of causes. pub struct ManifestCauses<'a> { current: &'a ManifestError, } impl<'a> Iterator for ManifestCauses<'a> { type Item = &'a ManifestError; fn next(&mut self) -> Option<Self::Item> { self.current = self.current.cause.downcast_ref()?; Some(self.current) } } impl<'a> ::std::iter::FusedIterator for ManifestCauses<'a> {} // ============================================================================= // Process errors #[derive(Debug)] pub struct ProcessError { /// A detailed description to show to the user why the process failed. pub desc: String, /// The exit status of the process. /// /// This can be `None` if the process failed to launch (like process not found). pub exit: Option<ExitStatus>, /// The output from the process. /// /// This can be `None` if the process failed to launch, or the output was not captured. pub output: Option<Output>, } impl fmt::Display for ProcessError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.desc.fmt(f) } } impl std::error::Error for ProcessError {} // ============================================================================= // Cargo test errors. /// Error when testcases fail #[derive(Debug)] pub struct CargoTestError { pub test: Test, pub desc: String, pub exit: Option<ExitStatus>, pub causes: Vec<ProcessError>, } impl fmt::Display for CargoTestError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.desc.fmt(f) } } impl std::error::Error for CargoTestError {} #[derive(Debug)] pub enum Test { Multiple, Doc, UnitTest { kind: TargetKind, name: String, pkg_name: String, }, } impl CargoTestError { pub fn new(test: Test, errors: Vec<ProcessError>) -> Self { if errors.is_empty() { panic!("Cannot create CargoTestError from empty Vec") } let desc = errors .iter() .map(|error| error.desc.clone()) .collect::<Vec<String>>() .join("\n"); CargoTestError { test, desc, exit: errors[0].exit, causes: errors, } } pub fn hint(&self, ws: &Workspace<'_>, opts: &CompileOptions) -> String { match self.test { Test::UnitTest { ref kind, ref name, ref pkg_name, } => { let pkg_info = if opts.spec.needs_spec_flag(ws) { format!("-p {} ", pkg_name) } else { String::new() }; match *kind { TargetKind::Bench => { format!("test failed, to rerun pass '{}--bench {}'", pkg_info, name) } TargetKind::Bin => { format!("test failed, to rerun pass '{}--bin {}'", pkg_info, name) } TargetKind::Lib(_) => format!("test failed, to rerun pass '{}--lib'", pkg_info), TargetKind::Test => { format!("test failed, to rerun pass '{}--test {}'", pkg_info, name) } TargetKind::ExampleBin | TargetKind::ExampleLib(_) => { format!("test failed, to rerun pass '{}--example {}", pkg_info, name) } _ => "test failed.".into(), } } Test::Doc => "test failed, to rerun pass '--doc'".into(), _ => "test failed.".into(), } } } // ============================================================================= // CLI errors pub type CliResult = Result<(), CliError>; #[derive(Debug)] /// The CLI error is the error type used at Cargo's CLI-layer. /// /// All errors from the lib side of Cargo will get wrapped with this error. /// Other errors (such as command-line argument validation) will create this /// directly. pub struct CliError { /// The error to display. This can be `None` in rare cases to exit with a /// code without displaying a message. For example `cargo run -q` where /// the resulting process exits with a nonzero code (on Windows), or an /// external subcommand that exits nonzero (we assume it printed its own /// message). pub error: Option<anyhow::Error>, /// The process exit code. pub exit_code: i32, } impl CliError { pub fn new(error: anyhow::Error, code: i32) -> CliError { CliError { error: Some(error), exit_code: code, } } pub fn code(code: i32) -> CliError { CliError {<|fim▁hole|> } } impl From<anyhow::Error> for CliError { fn from(err: anyhow::Error) -> CliError { CliError::new(err, 101) } } impl From<clap::Error> for CliError { fn from(err: clap::Error) -> CliError { let code = if err.use_stderr() { 1 } else { 0 }; CliError::new(err.into(), code) } } // ============================================================================= // Construction helpers /// Creates a new process error. /// /// `status` can be `None` if the process did not launch. /// `output` can be `None` if the process did not launch, or output was not captured. pub fn process_error( msg: &str, status: Option<ExitStatus>, output: Option<&Output>, ) -> ProcessError { let exit = match status { Some(s) => status_to_string(s), None => "never executed".to_string(), }; let mut desc = format!("{} ({})", &msg, exit); if let Some(out) = output { match str::from_utf8(&out.stdout) { Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stdout\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } match str::from_utf8(&out.stderr) { Ok(s) if !s.trim().is_empty() => { desc.push_str("\n--- stderr\n"); desc.push_str(s); } Ok(..) | Err(..) => {} } } return ProcessError { desc, exit: status, output: output.cloned(), }; #[cfg(unix)] fn status_to_string(status: ExitStatus) -> String { use std::os::unix::process::*; if let Some(signal) = status.signal() { let name = match signal as libc::c_int { libc::SIGABRT => ", SIGABRT: process abort signal", libc::SIGALRM => ", SIGALRM: alarm clock", libc::SIGFPE => ", SIGFPE: erroneous arithmetic operation", libc::SIGHUP => ", SIGHUP: hangup", libc::SIGILL => ", SIGILL: illegal instruction", libc::SIGINT => ", SIGINT: terminal interrupt signal", libc::SIGKILL => ", SIGKILL: kill", libc::SIGPIPE => ", SIGPIPE: write on a pipe with no one to read", libc::SIGQUIT => ", SIGQUIT: terminal quit signal", libc::SIGSEGV => ", SIGSEGV: invalid memory reference", libc::SIGTERM => ", SIGTERM: termination signal", libc::SIGBUS => ", SIGBUS: access to undefined memory", #[cfg(not(target_os = "haiku"))] libc::SIGSYS => ", SIGSYS: bad system call", libc::SIGTRAP => ", SIGTRAP: trace/breakpoint trap", _ => "", }; format!("signal: {}{}", signal, name) } else { status.to_string() } } #[cfg(windows)] fn status_to_string(status: ExitStatus) -> String { use winapi::shared::minwindef::DWORD; use winapi::um::winnt::*; let mut base = status.to_string(); let extra = match status.code().unwrap() as DWORD { STATUS_ACCESS_VIOLATION => "STATUS_ACCESS_VIOLATION", STATUS_IN_PAGE_ERROR => "STATUS_IN_PAGE_ERROR", STATUS_INVALID_HANDLE => "STATUS_INVALID_HANDLE", STATUS_INVALID_PARAMETER => "STATUS_INVALID_PARAMETER", STATUS_NO_MEMORY => "STATUS_NO_MEMORY", STATUS_ILLEGAL_INSTRUCTION => "STATUS_ILLEGAL_INSTRUCTION", STATUS_NONCONTINUABLE_EXCEPTION => "STATUS_NONCONTINUABLE_EXCEPTION", STATUS_INVALID_DISPOSITION => "STATUS_INVALID_DISPOSITION", STATUS_ARRAY_BOUNDS_EXCEEDED => "STATUS_ARRAY_BOUNDS_EXCEEDED", STATUS_FLOAT_DENORMAL_OPERAND => "STATUS_FLOAT_DENORMAL_OPERAND", STATUS_FLOAT_DIVIDE_BY_ZERO => "STATUS_FLOAT_DIVIDE_BY_ZERO", STATUS_FLOAT_INEXACT_RESULT => "STATUS_FLOAT_INEXACT_RESULT", STATUS_FLOAT_INVALID_OPERATION => "STATUS_FLOAT_INVALID_OPERATION", STATUS_FLOAT_OVERFLOW => "STATUS_FLOAT_OVERFLOW", STATUS_FLOAT_STACK_CHECK => "STATUS_FLOAT_STACK_CHECK", STATUS_FLOAT_UNDERFLOW => "STATUS_FLOAT_UNDERFLOW", STATUS_INTEGER_DIVIDE_BY_ZERO => "STATUS_INTEGER_DIVIDE_BY_ZERO", STATUS_INTEGER_OVERFLOW => "STATUS_INTEGER_OVERFLOW", STATUS_PRIVILEGED_INSTRUCTION => "STATUS_PRIVILEGED_INSTRUCTION", STATUS_STACK_OVERFLOW => "STATUS_STACK_OVERFLOW", STATUS_DLL_NOT_FOUND => "STATUS_DLL_NOT_FOUND", STATUS_ORDINAL_NOT_FOUND => "STATUS_ORDINAL_NOT_FOUND", STATUS_ENTRYPOINT_NOT_FOUND => "STATUS_ENTRYPOINT_NOT_FOUND", STATUS_CONTROL_C_EXIT => "STATUS_CONTROL_C_EXIT", STATUS_DLL_INIT_FAILED => "STATUS_DLL_INIT_FAILED", STATUS_FLOAT_MULTIPLE_FAULTS => "STATUS_FLOAT_MULTIPLE_FAULTS", STATUS_FLOAT_MULTIPLE_TRAPS => "STATUS_FLOAT_MULTIPLE_TRAPS", STATUS_REG_NAT_CONSUMPTION => "STATUS_REG_NAT_CONSUMPTION", STATUS_HEAP_CORRUPTION => "STATUS_HEAP_CORRUPTION", STATUS_STACK_BUFFER_OVERRUN => "STATUS_STACK_BUFFER_OVERRUN", STATUS_ASSERTION_FAILURE => "STATUS_ASSERTION_FAILURE", _ => return base, }; base.push_str(", "); base.push_str(extra); base } } pub fn is_simple_exit_code(code: i32) -> bool { // Typical unix exit codes are 0 to 127. // Windows doesn't have anything "typical", and is a // 32-bit number (which appears signed here, but is really // unsigned). However, most of the interesting NTSTATUS // codes are very large. This is just a rough // approximation of which codes are "normal" and which // ones are abnormal termination. code >= 0 && code <= 127 } pub fn internal<S: fmt::Display>(error: S) -> anyhow::Error { InternalError::new(anyhow::format_err!("{}", error)).into() }<|fim▁end|>
error: None, exit_code: code, }
<|file_name|>print.go<|end_file_name|><|fim▁begin|>package bitrise import ( "fmt" "strings" "time" log "github.com/Sirupsen/logrus" "github.com/bitrise-io/bitrise/models" "github.com/bitrise-io/go-utils/colorstring" "github.com/bitrise-io/go-utils/stringutil" ) const ( // should not be under ~45 stepRunSummaryBoxWidthInChars = 65 ) // PrintRunningWorkflow ... func PrintRunningWorkflow(title string) { fmt.Println() log.Info(colorstring.Bluef("Running workflow (%s)", title)) fmt.Println() } // PrintRunningStep ... func PrintRunningStep(stepInfo models.StepInfoModel, idx int) { title := stepInfo.ID version := stepInfo.Version if len(version) > 25 { version = "..." + stringutil.MaxLastChars(version, 22) } content := fmt.Sprintf("| (%d) %s (%s) |", idx, title, version) charDiff := len(content) - stepRunSummaryBoxWidthInChars if charDiff < 0 { // shorter than desired - fill with space content = fmt.Sprintf("| (%d) %s (%s)%s |", idx, title, version, strings.Repeat(" ", -charDiff)) } else if charDiff > 0 { // longer than desired - trim title trimmedTitleWidth := len(title) - charDiff - 3 if trimmedTitleWidth < 0 { log.Errorf("Step Version too long, can't present title at all! : %s", version) } else { content = fmt.Sprintf("| (%d) %s... (%s) |", idx, title[0:trimmedTitleWidth], version) } } sep := strings.Repeat("-", len(content)) log.Info(sep) log.Infof(content) log.Info(sep) log.Info("|" + strings.Repeat(" ", stepRunSummaryBoxWidthInChars-2) + "|") } func getTrimmedStepName(stepRunResult models.StepRunResultsModel) string { iconBoxWidth := len(" ") timeBoxWidth := len(" time (s) ") titleBoxWidth := stepRunSummaryBoxWidthInChars - 4 - iconBoxWidth - timeBoxWidth - 1 stepInfo := stepRunResult.StepInfo title := stepInfo.ID version := stepInfo.Version if len(version) > 25 { version = "..." + stringutil.MaxLastChars(version, 22) } titleBox := "" switch stepRunResult.Status { case models.StepRunStatusCodeSuccess, models.StepRunStatusCodeSkipped, models.StepRunStatusCodeSkippedWithRunIf: titleBox = fmt.Sprintf("%s (%s)", title, version) if len(titleBox) > titleBoxWidth { dif := len(titleBox) - titleBoxWidth title = title[:len(title)-dif-3] + "..." titleBox = fmt.Sprintf("%s (%s)", title, version) } break case models.StepRunStatusCodeFailed, models.StepRunStatusCodeFailedSkippable: titleBox = fmt.Sprintf("%s (%s) (exit code: %d)", title, version, stepRunResult.ExitCode) if len(titleBox) > titleBoxWidth { dif := len(titleBox) - titleBoxWidth title = title[:len(title)-dif-3] + "..." titleBox = fmt.Sprintf("%s (%s) (exit code: %d)", title, version, stepRunResult.ExitCode) } break default: log.Error("Unkown result code") return "" } return titleBox } func stepNoteCell(stepRunResult models.StepRunResultsModel) string { iconBoxWidth := len(" ") timeBoxWidth := len(" time (s) ") titleBoxWidth := stepRunSummaryBoxWidthInChars - 4 - iconBoxWidth - timeBoxWidth - 2 stepInfo := stepRunResult.StepInfo whitespaceWidth := titleBoxWidth - len(fmt.Sprintf("update available %s -> %s", stepInfo.Version, stepInfo.Latest)) content := colorstring.Yellow(fmt.Sprintf(" Update available: %s -> %s%s", stepInfo.Version, stepInfo.Latest, strings.Repeat(" ", whitespaceWidth))) return fmt.Sprintf("|%s|%s|%s|", strings.Repeat("-", iconBoxWidth), content, strings.Repeat("-", timeBoxWidth)) } func stepResultCell(stepRunResult models.StepRunResultsModel) string { iconBoxWidth := len(" ") timeBoxWidth := len(" time (s) ") titleBoxWidth := stepRunSummaryBoxWidthInChars - 4 - iconBoxWidth - timeBoxWidth - 1 icon := "" title := getTrimmedStepName(stepRunResult) runTimeStr := TimeToFormattedSeconds(stepRunResult.RunTime, " sec") coloringFunc := colorstring.Green switch stepRunResult.Status { case models.StepRunStatusCodeSuccess: icon = "✅" coloringFunc = colorstring.Green break case models.StepRunStatusCodeFailed: icon = "🚫" coloringFunc = colorstring.Red break case models.StepRunStatusCodeFailedSkippable: icon = "⚠️" coloringFunc = colorstring.Yellow break case models.StepRunStatusCodeSkipped, models.StepRunStatusCodeSkippedWithRunIf: icon = "➡" coloringFunc = colorstring.Blue break default: log.Error("Unkown result code") return "" } iconBox := fmt.Sprintf(" %s ", icon) titleWhiteSpaceWidth := titleBoxWidth - len(title) titleBox := fmt.Sprintf(" %s%s", coloringFunc(title), strings.Repeat(" ", titleWhiteSpaceWidth)) timeWhiteSpaceWidth := timeBoxWidth - len(runTimeStr) - 1 timeBox := fmt.Sprintf(" %s%s", runTimeStr, strings.Repeat(" ", timeWhiteSpaceWidth)) return fmt.Sprintf("|%s|%s|%s|", iconBox, titleBox, timeBox) } // PrintStepSummary .. func PrintStepSummary(stepRunResult models.StepRunResultsModel, isLastStepInWorkflow bool) { iconBoxWidth := len(" ") timeBoxWidth := len(" time (s) ") titleBoxWidth := stepRunSummaryBoxWidthInChars - 4 - iconBoxWidth - timeBoxWidth sep := fmt.Sprintf("+%s+%s+%s+", strings.Repeat("-", iconBoxWidth), strings.Repeat("-", titleBoxWidth), strings.Repeat("-", timeBoxWidth)) log.Info("|" + strings.Repeat(" ", stepRunSummaryBoxWidthInChars-2) + "|") log.Info(sep) log.Infof(stepResultCell(stepRunResult)) if stepRunResult.Error != nil && stepRunResult.StepInfo.IsUpdateAvailable() { log.Info(stepNoteCell(stepRunResult)) } log.Info(sep) if !isLastStepInWorkflow { fmt.Println() fmt.Println(strings.Repeat(" ", 42) + "▼") fmt.Println() } } // PrintSummary ... func PrintSummary(buildRunResults models.BuildRunResultsModel) { iconBoxWidth := len(" ") timeBoxWidth := len(" time (s) ") titleBoxWidth := stepRunSummaryBoxWidthInChars - 4 - iconBoxWidth - timeBoxWidth fmt.Println() fmt.Println() log.Infof("+%s+", strings.Repeat("-", stepRunSummaryBoxWidthInChars-2)) whitespaceWidth := (stepRunSummaryBoxWidthInChars - 2 - len("bitrise summary")) / 2 log.Infof("|%sbitrise summary%s|", strings.Repeat(" ", whitespaceWidth), strings.Repeat(" ", whitespaceWidth)) log.Infof("+%s+%s+%s+", strings.Repeat("-", iconBoxWidth), strings.Repeat("-", titleBoxWidth), strings.Repeat("-", timeBoxWidth)) whitespaceWidth = stepRunSummaryBoxWidthInChars - len("| | title") - len("| time (s) |") log.Infof("| | title%s| time (s) |", strings.Repeat(" ", whitespaceWidth)) log.Infof("+%s+%s+%s+", strings.Repeat("-", iconBoxWidth), strings.Repeat("-", titleBoxWidth), strings.Repeat("-", timeBoxWidth)) orderedResults := buildRunResults.OrderedResults() tmpTime := time.Time{} for _, stepRunResult := range orderedResults { tmpTime = tmpTime.Add(stepRunResult.RunTime) log.Info(stepResultCell(stepRunResult)) if stepRunResult.Error != nil && stepRunResult.StepInfo.IsUpdateAvailable() { log.Info(stepNoteCell(stepRunResult)) } log.Infof("+%s+%s+%s+", strings.Repeat("-", iconBoxWidth), strings.Repeat("-", titleBoxWidth), strings.Repeat("-", timeBoxWidth)) } runtime := tmpTime.Sub(time.Time{}) runtimeStr := TimeToFormattedSeconds(runtime, " sec") whitespaceWidth = stepRunSummaryBoxWidthInChars - len(fmt.Sprintf("| Total runtime: %s|", runtimeStr)) log.Infof("| Total runtime: %s%s|", runtimeStr, strings.Repeat(" ", whitespaceWidth)) log.Infof("+%s+", strings.Repeat("-", stepRunSummaryBoxWidthInChars-2)) fmt.Println() } // PrintStepStatusList ... func PrintStepStatusList(header string, stepList []models.StepRunResultsModel) { if len(stepList) > 0 {<|fim▁hole|> for _, stepResult := range stepList { stepInfo := stepResult.StepInfo if stepResult.Error != nil { log.Infof(" * Step: (%s) | error: (%v)", stepInfo.ID, stepResult.Error) } else { log.Infof(" * Step: (%s)", stepInfo.ID) } } } }<|fim▁end|>
log.Infof(header)
<|file_name|>0007_auto_20151005_1333.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.db import migrations def create_switch(apps, schema_editor): """Create the async_order_fulfillment switch if it does not already exist.""" Switch = apps.get_model('waffle', 'Switch') Switch.objects.get_or_create(name='async_order_fulfillment', defaults={'active': False}) def delete_switch(apps, schema_editor): """Delete the async_order_fulfillment switch.""" Switch = apps.get_model('waffle', 'Switch') Switch.objects.filter(name='async_order_fulfillment').delete() class Migration(migrations.Migration): dependencies = [ ('core', '0006_add_service_user'), ('waffle', '0001_initial'), ] operations = [<|fim▁hole|><|fim▁end|>
migrations.RunPython(create_switch, reverse_code=delete_switch), ]
<|file_name|>tokenize.py<|end_file_name|><|fim▁begin|># Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006 Python Software Foundation. # All rights reserved. """Tokenization help for Python programs. generate_tokens(readline) is a generator that breaks a stream of<|fim▁hole|>text into Python tokens. It accepts a readline-like method which is called repeatedly to get the next line of input (or "" for EOF). It generates 5-tuples with these members: the token type (see token.py) the token (a string) the starting (row, column) indices of the token (a 2-tuple of ints) the ending (row, column) indices of the token (a 2-tuple of ints) the original line (string) It is designed to match the working of the Python tokenizer exactly, except that it produces COMMENT tokens for comments and gives type OP for all operators Older entry points tokenize_loop(readline, tokeneater) tokenize(readline, tokeneater=printtoken) are the same, except instead of generating tokens, tokeneater is a callback function to which the 5 fields described above are passed as 5 arguments, each time a new token is found.""" __author__ = 'Ka-Ping Yee <[email protected]>' __credits__ = \ 'GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, Skip Montanaro' import string, re from codecs import BOM_UTF8, lookup from lib2to3.pgen2.token import * from . import token __all__ = [x for x in dir(token) if x[0] != '_'] + ["tokenize", "generate_tokens", "untokenize"] del token try: bytes except NameError: # Support bytes type in Python <= 2.5, so 2to3 turns itself into # valid Python 3 code. bytes = str def group(*choices): return '(' + '|'.join(choices) + ')' def any(*choices): return group(*choices) + '*' def maybe(*choices): return group(*choices) + '?' Whitespace = r'[ \f\t]*' Comment = r'#[^\r\n]*' Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment) Name = r'[a-zA-Z_]\w*' Binnumber = r'0[bB][01]*' Hexnumber = r'0[xX][\da-fA-F]*[lL]?' Octnumber = r'0[oO]?[0-7]*[lL]?' Decnumber = r'[1-9]\d*[lL]?' Intnumber = group(Binnumber, Hexnumber, Octnumber, Decnumber) Exponent = r'[eE][-+]?\d+' Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent) Expfloat = r'\d+' + Exponent Floatnumber = group(Pointfloat, Expfloat) Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]') Number = group(Imagnumber, Floatnumber, Intnumber) # Tail end of ' string. Single = r"[^'\\]*(?:\\.[^'\\]*)*'" # Tail end of " string. Double = r'[^"\\]*(?:\\.[^"\\]*)*"' # Tail end of ''' string. Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''" # Tail end of """ string. Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""' Triple = group("[ubUB]?[rR]?'''", '[ubUB]?[rR]?"""') # Single-line ' or " string. String = group(r"[uU]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'", r'[uU]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"') # Because of leftmost-then-longest match semantics, be sure to put the # longest operators first (e.g., if = came before ==, == would get # recognized as two instances of =). Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=", r"//=?", r"->", r"[+\-*/%&|^=<>]=?", r"~") Bracket = '[][(){}]' Special = group(r'\r?\n', r'[:;.,`@]') Funny = group(Operator, Bracket, Special) PlainToken = group(Number, Funny, String, Name) Token = Ignore + PlainToken # First (or only) line of ' or " string. ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" + group("'", r'\\\r?\n'), r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' + group('"', r'\\\r?\n')) PseudoExtras = group(r'\\\r?\n', Comment, Triple) PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name) tokenprog, pseudoprog, single3prog, double3prog = list(map( re.compile, (Token, PseudoToken, Single3, Double3))) endprogs = {"'": re.compile(Single), '"': re.compile(Double), "'''": single3prog, '"""': double3prog, "r'''": single3prog, 'r"""': double3prog, "u'''": single3prog, 'u"""': double3prog, "b'''": single3prog, 'b"""': double3prog, "ur'''": single3prog, 'ur"""': double3prog, "br'''": single3prog, 'br"""': double3prog, "R'''": single3prog, 'R"""': double3prog, "U'''": single3prog, 'U"""': double3prog, "B'''": single3prog, 'B"""': double3prog, "uR'''": single3prog, 'uR"""': double3prog, "Ur'''": single3prog, 'Ur"""': double3prog, "UR'''": single3prog, 'UR"""': double3prog, "bR'''": single3prog, 'bR"""': double3prog, "Br'''": single3prog, 'Br"""': double3prog, "BR'''": single3prog, 'BR"""': double3prog, 'r': None, 'R': None, 'u': None, 'U': None, 'b': None, 'B': None} triple_quoted = {} for t in ("'''", '"""', "r'''", 'r"""', "R'''", 'R"""', "u'''", 'u"""', "U'''", 'U"""', "b'''", 'b"""', "B'''", 'B"""', "ur'''", 'ur"""', "Ur'''", 'Ur"""', "uR'''", 'uR"""', "UR'''", 'UR"""', "br'''", 'br"""', "Br'''", 'Br"""', "bR'''", 'bR"""', "BR'''", 'BR"""',): triple_quoted[t] = t single_quoted = {} for t in ("'", '"', "r'", 'r"', "R'", 'R"', "u'", 'u"', "U'", 'U"', "b'", 'b"', "B'", 'B"', "ur'", 'ur"', "Ur'", 'Ur"', "uR'", 'uR"', "UR'", 'UR"', "br'", 'br"', "Br'", 'Br"', "bR'", 'bR"', "BR'", 'BR"', ): single_quoted[t] = t tabsize = 8 class TokenError(Exception): pass class StopTokenizing(Exception): pass def printtoken(type, token, xxx_todo_changeme, xxx_todo_changeme1, line): # for testing (srow, scol) = xxx_todo_changeme (erow, ecol) = xxx_todo_changeme1 print("%d,%d-%d,%d:\t%s\t%s" % \ (srow, scol, erow, ecol, tok_name[type], repr(token))) def tokenize(readline, tokeneater=printtoken): """ The tokenize() function accepts two parameters: one representing the input stream, and one providing an output mechanism for tokenize(). The first parameter, readline, must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. The second parameter, tokeneater, must also be a callable object. It is called once for each token, with five arguments, corresponding to the tuples generated by generate_tokens(). """ try: tokenize_loop(readline, tokeneater) except StopTokenizing: pass # backwards compatible interface def tokenize_loop(readline, tokeneater): for token_info in generate_tokens(readline): tokeneater(*token_info) class Untokenizer: def __init__(self): self.tokens = [] self.prev_row = 1 self.prev_col = 0 def add_whitespace(self, start): row, col = start assert row <= self.prev_row col_offset = col - self.prev_col if col_offset: self.tokens.append(" " * col_offset) def untokenize(self, iterable): for t in iterable: if len(t) == 2: self.compat(t, iterable) break tok_type, token, start, end, line = t self.add_whitespace(start) self.tokens.append(token) self.prev_row, self.prev_col = end if tok_type in (NEWLINE, NL): self.prev_row += 1 self.prev_col = 0 return "".join(self.tokens) def compat(self, token, iterable): startline = False indents = [] toks_append = self.tokens.append toknum, tokval = token if toknum in (NAME, NUMBER): tokval += ' ' if toknum in (NEWLINE, NL): startline = True for tok in iterable: toknum, tokval = tok[:2] if toknum in (NAME, NUMBER): tokval += ' ' if toknum == INDENT: indents.append(tokval) continue elif toknum == DEDENT: indents.pop() continue elif toknum in (NEWLINE, NL): startline = True elif startline and indents: toks_append(indents[-1]) startline = False toks_append(tokval) cookie_re = re.compile(r'^[ \t\f]*#.*coding[:=][ \t]*([-\w.]+)', re.ASCII) blank_re = re.compile(br'^[ \t\f]*(?:[#\r\n]|$)', re.ASCII) def _get_normal_name(orig_enc): """Imitates get_normal_name in tokenizer.c.""" # Only care about the first 12 characters. enc = orig_enc[:12].lower().replace("_", "-") if enc == "utf-8" or enc.startswith("utf-8-"): return "utf-8" if enc in ("latin-1", "iso-8859-1", "iso-latin-1") or \ enc.startswith(("latin-1-", "iso-8859-1-", "iso-latin-1-")): return "iso-8859-1" return orig_enc def detect_encoding(readline): """ The detect_encoding() function is used to detect the encoding that should be used to decode a Python source file. It requires one argument, readline, in the same way as the tokenize() generator. It will call readline a maximum of twice, and return the encoding used (as a string) and a list of any lines (left as bytes) it has read in. It detects the encoding from the presence of a utf-8 bom or an encoding cookie as specified in pep-0263. If both a bom and a cookie are present, but disagree, a SyntaxError will be raised. If the encoding cookie is an invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found, 'utf-8-sig' is returned. If no encoding is specified, then the default of 'utf-8' will be returned. """ bom_found = False encoding = None default = 'utf-8' def read_or_stop(): try: return readline() except StopIteration: return bytes() def find_cookie(line): try: line_string = line.decode('ascii') except UnicodeDecodeError: return None match = cookie_re.match(line_string) if not match: return None encoding = _get_normal_name(match.group(1)) try: codec = lookup(encoding) except LookupError: # This behaviour mimics the Python interpreter raise SyntaxError("unknown encoding: " + encoding) if bom_found: if codec.name != 'utf-8': # This behaviour mimics the Python interpreter raise SyntaxError('encoding problem: utf-8') encoding += '-sig' return encoding first = read_or_stop() if first.startswith(BOM_UTF8): bom_found = True first = first[3:] default = 'utf-8-sig' if not first: return default, [] encoding = find_cookie(first) if encoding: return encoding, [first] if not blank_re.match(first): return default, [first] second = read_or_stop() if not second: return default, [first] encoding = find_cookie(second) if encoding: return encoding, [first, second] return default, [first, second] def untokenize(iterable): """Transform tokens back into Python source code. Each element returned by the iterable must be a token sequence with at least two elements, a token number and token value. If only two tokens are passed, the resulting output is poor. Round-trip invariant for full input: Untokenized source will match input source exactly Round-trip invariant for limited intput: # Output text will tokenize the back to the input t1 = [tok[:2] for tok in generate_tokens(f.readline)] newcode = untokenize(t1) readline = iter(newcode.splitlines(1)).next t2 = [tok[:2] for tokin generate_tokens(readline)] assert t1 == t2 """ ut = Untokenizer() return ut.untokenize(iterable) def generate_tokens(readline): """ The generate_tokens() generator requires one argument, readline, which must be a callable object which provides the same interface as the readline() method of built-in file objects. Each call to the function should return one line of input as a string. Alternately, readline can be a callable function terminating with StopIteration: readline = open(myfile).next # Example of alternate readline The generator produces 5-tuples with these members: the token type; the token string; a 2-tuple (srow, scol) of ints specifying the row and column where the token begins in the source; a 2-tuple (erow, ecol) of ints specifying the row and column where the token ends in the source; and the line on which the token was found. The line passed is the logical line; continuation lines are included. """ lnum = parenlev = continued = 0 namechars, numchars = string.ascii_letters + '_', '0123456789' contstr, needcont = '', 0 contline = None indents = [0] while 1: # loop over lines in stream try: line = readline() except StopIteration: line = '' lnum = lnum + 1 pos, max = 0, len(line) if contstr: # continued string if not line: raise TokenError("EOF in multi-line string", strstart) endmatch = endprog.match(line) if endmatch: pos = end = endmatch.end(0) yield (STRING, contstr + line[:end], strstart, (lnum, end), contline + line) contstr, needcont = '', 0 contline = None elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n': yield (ERRORTOKEN, contstr + line, strstart, (lnum, len(line)), contline) contstr = '' contline = None continue else: contstr = contstr + line contline = contline + line continue elif parenlev == 0 and not continued: # new statement if not line: break column = 0 while pos < max: # measure leading whitespace if line[pos] == ' ': column = column + 1 elif line[pos] == '\t': column = (column//tabsize + 1)*tabsize elif line[pos] == '\f': column = 0 else: break pos = pos + 1 if pos == max: break if line[pos] in '#\r\n': # skip comments or blank lines if line[pos] == '#': comment_token = line[pos:].rstrip('\r\n') nl_pos = pos + len(comment_token) yield (COMMENT, comment_token, (lnum, pos), (lnum, pos + len(comment_token)), line) yield (NL, line[nl_pos:], (lnum, nl_pos), (lnum, len(line)), line) else: yield ((NL, COMMENT)[line[pos] == '#'], line[pos:], (lnum, pos), (lnum, len(line)), line) continue if column > indents[-1]: # count indents or dedents indents.append(column) yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line) while column < indents[-1]: if column not in indents: raise IndentationError( "unindent does not match any outer indentation level", ("<tokenize>", lnum, pos, line)) indents = indents[:-1] yield (DEDENT, '', (lnum, pos), (lnum, pos), line) else: # continued statement if not line: raise TokenError("EOF in multi-line statement", (lnum, 0)) continued = 0 while pos < max: pseudomatch = pseudoprog.match(line, pos) if pseudomatch: # scan for tokens start, end = pseudomatch.span(1) spos, epos, pos = (lnum, start), (lnum, end), end token, initial = line[start:end], line[start] if initial in numchars or \ (initial == '.' and token != '.'): # ordinary number yield (NUMBER, token, spos, epos, line) elif initial in '\r\n': newline = NEWLINE if parenlev > 0: newline = NL yield (newline, token, spos, epos, line) elif initial == '#': assert not token.endswith("\n") yield (COMMENT, token, spos, epos, line) elif token in triple_quoted: endprog = endprogs[token] endmatch = endprog.match(line, pos) if endmatch: # all on one line pos = endmatch.end(0) token = line[start:pos] yield (STRING, token, spos, (lnum, pos), line) else: strstart = (lnum, start) # multiple lines contstr = line[start:] contline = line break elif initial in single_quoted or \ token[:2] in single_quoted or \ token[:3] in single_quoted: if token[-1] == '\n': # continued string strstart = (lnum, start) endprog = (endprogs[initial] or endprogs[token[1]] or endprogs[token[2]]) contstr, needcont = line[start:], 1 contline = line break else: # ordinary string yield (STRING, token, spos, epos, line) elif initial in namechars: # ordinary name yield (NAME, token, spos, epos, line) elif initial == '\\': # continued stmt # This yield is new; needed for better idempotency: yield (NL, token, spos, (lnum, pos), line) continued = 1 else: if initial in '([{': parenlev = parenlev + 1 elif initial in ')]}': parenlev = parenlev - 1 yield (OP, token, spos, epos, line) else: yield (ERRORTOKEN, line[pos], (lnum, pos), (lnum, pos+1), line) pos = pos + 1 for indent in indents[1:]: # pop remaining indent levels yield (DEDENT, '', (lnum, 0), (lnum, 0), '') yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '') if __name__ == '__main__': # testing import sys if len(sys.argv) > 1: tokenize(open(sys.argv[1]).readline) else: tokenize(sys.stdin.readline)<|fim▁end|>
<|file_name|>Distance Joint Description.cpp<|end_file_name|><|fim▁begin|>#include "StdAfx.h" #include "Distance Joint Description.h" #include "Spring Description.h" #include "Distance Joint.h" #include <NxDistanceJointDesc.h> using namespace StillDesign::PhysX; DistanceJointDescription::DistanceJointDescription() : JointDescription( new NxDistanceJointDesc() ) { } DistanceJointDescription::DistanceJointDescription( NxDistanceJointDesc* desc ) : JointDescription( desc ) { } float DistanceJointDescription::MinimumDistance::get() { return this->UnmanagedPointer->minDistance; } void DistanceJointDescription::MinimumDistance::set( float value ) { this->UnmanagedPointer->minDistance = value; } float DistanceJointDescription::MaximumDistance::get() { return this->UnmanagedPointer->maxDistance; } void DistanceJointDescription::MaximumDistance::set( float value ) { this->UnmanagedPointer->maxDistance = value; } SpringDescription DistanceJointDescription::Spring::get() { return (SpringDescription)this->UnmanagedPointer->spring; <|fim▁hole|>void DistanceJointDescription::Spring::set( SpringDescription value ) { this->UnmanagedPointer->spring = (NxSpringDesc)value; } DistanceJointFlag DistanceJointDescription::Flags::get() { return (DistanceJointFlag)this->UnmanagedPointer->flags; } void DistanceJointDescription::Flags::set( DistanceJointFlag value ) { this->UnmanagedPointer->flags = (NxDistanceJointFlag)value; } NxDistanceJointDesc* DistanceJointDescription::UnmanagedPointer::get() { return (NxDistanceJointDesc*)JointDescription::UnmanagedPointer; }<|fim▁end|>
}
<|file_name|>DataSet.java<|end_file_name|><|fim▁begin|>package org.anyline.entity; import com.fasterxml.jackson.databind.JsonNode; import org.anyline.util.*; import org.anyline.util.regular.Regular; import org.anyline.util.regular.RegularUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.Serializable; import java.math.BigDecimal; import java.util.*; public class DataSet implements Collection<DataRow>, Serializable { private static final long serialVersionUID = 6443551515441660101L; protected static final Logger log = LoggerFactory.getLogger(DataSet.class); private boolean result = true; // 执行结果 private Exception exception = null; // 异常 private String message = null; // 提示信息 private PageNavi navi = null; // 分页 private List<String> head = null; // 表头 private List<DataRow> rows = null; // 数据 private List<String> primaryKeys = null; // 主键 private String datalink = null; // 数据连接 private String dataSource = null; // 数据源(表|视图|XML定义SQL) private String schema = null; private String table = null; private long createTime = 0; //创建时间 private long expires = -1; //过期时间(毫秒) 从创建时刻计时expires毫秒后过期 private boolean isFromCache = false; //是否来自缓存 private boolean isAsc = false; private boolean isDesc = false; private Map<String, Object> queryParams = new HashMap<String, Object>();//查询条件 /** * 创建索引 * * @param key key * @return return * crateIndex("ID"); * crateIndex("ID:ASC"); */ public DataSet creatIndex(String key) { return this; } public DataSet() { rows = new ArrayList<DataRow>(); createTime = System.currentTimeMillis(); } public DataSet(List<Map<String, Object>> list) { rows = new ArrayList<DataRow>(); if (null == list) return; for (Map<String, Object> map : list) { DataRow row = new DataRow(map); rows.add(row); } } public static DataSet build(Collection<?> list, String ... fields) { return parse(list, fields); } /** * list解析成DataSet * @param list list * @param fields 如果list是二维数据 * fields 下标对应的属性(字段/key)名称 如"ID","CODE","NAME" * 如果不输入则以下标作为DataRow的key 如row.put("0","100").put("1","A01").put("2","张三"); * 如果属性数量超出list长度,取null值存入DataRow * * 如果list是一组数组 * fileds对应条目的属性值 如果不输入 则以条目的属性作DataRow的key 如"USER_ID:id","USER_NM:name" * * @return DataSet */ public static DataSet parse(Collection<?> list, String ... fields) { DataSet set = new DataSet(); if (null != list) { for (Object obj : list) { DataRow row = null; if(obj instanceof Collection){ row = DataRow.parseList((Collection)obj, fields); }else { row = DataRow.parse(obj, fields); } set.add(row); } } return set; } public static DataSet parseJson(DataRow.KEY_CASE keyCase, String json) { if (null != json) { try { return parseJson(keyCase, BeanUtil.JSON_MAPPER.readTree(json)); } catch (Exception e) { } } return null; } public static DataSet parseJson(String json) { return parseJson(DataRow.KEY_CASE.CONFIG, json); } public static DataSet parseJson(DataRow.KEY_CASE keyCase, JsonNode json) { DataSet set = new DataSet(); if (null != json) { if (json.isArray()) { Iterator<JsonNode> items = json.iterator(); while (items.hasNext()) { JsonNode item = items.next(); set.add(DataRow.parseJson(keyCase, item)); } } } return set; } public static DataSet parseJson(JsonNode json) { return parseJson(DataRow.KEY_CASE.CONFIG, json); } public DataSet Camel(){ for(DataRow row:rows){ row.Camel(); } return this; } public DataSet camel(){ for(DataRow row:rows){ row.camel(); } return this; } public DataSet setIsNew(boolean bol) { for (DataRow row : rows) { row.setIsNew(bol); } return this; } /** * 移除每个条目中指定的key * * @param keys keys * @return DataSet */ public DataSet remove(String... keys) { for (DataRow row : rows) { for (String key : keys) { row.remove(key); } } return this; } public DataSet trim(){ for(DataRow row:rows){ row.trim(); } return this; } /** * 添加主键 * * @param applyItem 是否应用到集合中的DataRow 默认true * @param pks pks * @return return */ public DataSet addPrimaryKey(boolean applyItem, String... pks) { if (null != pks) { List<String> list = new ArrayList<>(); for (String pk : pks) { list.add(pk); } addPrimaryKey(applyItem, list); } return this; } public DataSet addPrimaryKey(String... pks) { return addPrimaryKey(true, pks); } public DataSet addPrimaryKey(boolean applyItem, Collection<String> pks) { if (null == primaryKeys) { primaryKeys = new ArrayList<>(); } if (null == pks) { return this; } for (String pk : pks) { if (BasicUtil.isEmpty(pk)) { continue; } pk = key(pk); if (!primaryKeys.contains(pk)) { primaryKeys.add(pk); } } if (applyItem) { for (DataRow row : rows) { row.setPrimaryKey(false, primaryKeys); } } return this; } public DataSet addPrimaryKey(Collection<String> pks) { return addPrimaryKey(true, pks); } /** * 设置主键 * * @param applyItem applyItem * @param pks pks * @return return */ public DataSet setPrimaryKey(boolean applyItem, String... pks) { if (null != pks) { List<String> list = new ArrayList<>(); for (String pk : pks) { list.add(pk); } setPrimaryKey(applyItem, list); } return this; } public DataSet setPrimaryKey(String... pks) { return setPrimaryKey(true, pks); } public DataSet setPrimaryKey(boolean applyItem, Collection<String> pks) { if (null == pks) { return this; } this.primaryKeys = new ArrayList<>(); addPrimaryKey(applyItem, pks); return this; } public DataSet setPrimaryKey(Collection<String> pks) { return setPrimaryKey(true, pks); } public DataSet set(int index, DataRow item) { rows.set(index, item); return this; } /** * 是否有主键 * * @return return */ public boolean hasPrimaryKeys() { if (null != primaryKeys && primaryKeys.size() > 0) { return true; } else { return false; } } /** * 提取主键 * * @return return */ public List<String> getPrimaryKeys() { if (null == primaryKeys) { primaryKeys = new ArrayList<>(); } return primaryKeys; } /** * 添加表头 * * @param col col * @return return */ public DataSet addHead(String col) { if (null == head) { head = new ArrayList<>(); } if ("ROW_NUMBER".equals(col)) { return this; } if (head.contains(col)) { return this; } head.add(col); return this; } /** * 表头 * * @return return */ public List<String> getHead() { return head; } public int indexOf(Object obj) { return rows.indexOf(obj); } /** * 从begin开始截断到end,方法执行将改变原DataSet长度 * * @param begin 开始位置 * @param end 结束位置 * @return DataSet */ public DataSet truncates(int begin, int end) { if (!rows.isEmpty()) { if (begin < 0) { begin = 0; } if (end >= rows.size()) { end = rows.size() - 1; } if (begin >= rows.size()) { begin = rows.size() - 1; } if (end <= 0) { end = 0; } rows = rows.subList(begin, end); } return this; } /** * 从begin开始截断到最后一个 * * @param begin 开始位置 * @return DataSet */ public DataSet truncates(int begin) { if (begin < 0) { begin = rows.size() + begin; int end = rows.size() - 1; return truncates(begin, end); } else { return truncates(begin, rows.size() - 1); } } /** * 从begin开始截断到最后一个并返回其中第一个DataRow * * @param begin 开始位置 * @return DataRow */ public DataRow truncate(int begin) { return truncate(begin, rows.size() - 1); } /** * 从begin开始截断到end位置并返回其中第一个DataRow * * @param begin 开始位置 * @param end 结束位置 * @return DataRow */ public DataRow truncate(int begin, int end) { truncates(begin, end); if (rows.size() > 0) { return rows.get(0); } else { return null; } } /** * 从begin开始截取到最后一个 * * @param begin 开始位置 * 如果输入负数则取后n个,如果造成数量不足,则取全部 * @return DataSet */ public DataSet cuts(int begin) { if (begin < 0) { begin = rows.size() + begin; int end = rows.size() - 1; return cuts(begin, end); } else { return cuts(begin, rows.size() - 1); } } /** * 从begin开始截取到end位置,方法执行时会创建新的DataSet并不改变原有set长度 * * @param begin 开始位置 * @param end 结束位置 * @return DataSet */ public DataSet cuts(int begin, int end) { DataSet result = new DataSet(); if (rows.isEmpty()) { return result; } if (begin < 0) { begin = 0; } if (end >= rows.size()) { end = rows.size() - 1; } if (begin >= rows.size()) { begin = rows.size() - 1; } if (end <= 0) { end = 0; } for (int i = begin; i <= end; i++) { result.add(rows.get(i)); } return result; } /** * 从begin开始截取到最后一个,并返回其中第一个DataRow * * @param begin 开始位置 * @return DataSet */ public DataRow cut(int begin) { return cut(begin, rows.size() - 1); } /** * 从begin开始截取到end位置,并返回其中第一个DataRow,方法执行时会创建新的DataSet并不改变原有set长度 * * @param begin 开始位置 * @param end 结束位置 * @return DataSet */ public DataRow cut(int begin, int end) { DataSet result = cuts(begin, end); if (result.size() > 0) { return result.getRow(0); } return null; } /** * 记录数量 * * @return return */ public int size() { int result = 0; if (null != rows) result = rows.size(); return result; } public int getSize() { return size(); } /** * 是否出现异常 * * @return return */ public boolean isException() { return null != exception; } public boolean isFromCache() { return isFromCache; } public DataSet setIsFromCache(boolean bol) { this.isFromCache = bol; return this; } /** * 返回数据是否为空 * * @return return */ public boolean isEmpty() { boolean result = true; if (null == rows) { result = true; } else if (rows instanceof Collection) { result = ((Collection<?>) rows).isEmpty(); } return result; } /** * 读取一行数据 * * @param index index * @return return */ public DataRow getRow(int index) { DataRow row = null; if (null != rows && index < rows.size()) { row = rows.get(index); } if (null != row) { row.setContainer(this); } return row; } public boolean exists(String ... params){ DataRow row = getRow(0, params); return row != null; } public DataRow getRow(String... params) { return getRow(0, params); } public DataRow getRow(DataRow params) { return getRow(0, params); } public DataRow getRow(List<String> params) { String[] kvs = BeanUtil.list2array(params); return getRow(0, kvs); } public DataRow getRow(int begin, String... params) { DataSet set = getRows(begin, 1, params); if (set.size() > 0) { return set.getRow(0); } return null; } public DataRow getRow(int begin, DataRow params) { DataSet set = getRows(begin, 1, params); if (set.size() > 0) { return set.getRow(0); } return null; } /** * 根据keys去重 * * @param keys keys * @return DataSet */ public DataSet distinct(String... keys) { DataSet result = new DataSet(); if (null != rows) { int size = rows.size(); for (int i = 0; i < size; i++) { DataRow row = rows.get(i); //查看result中是否已存在 String[] params = packParam(row, keys); if (result.getRow(params) == null) { DataRow tmp = new DataRow(); for (String key : keys) { tmp.put(key, row.get(key)); } result.addRow(tmp); } } } result.cloneProperty(this); return result; } public DataSet distinct(List<String> keys) { DataSet result = new DataSet(); if (null != rows) { for (DataRow row:rows) { //查看result中是否已存在 String[] params = packParam(row, keys); if (result.getRow(params) == null) { DataRow tmp = new DataRow(); for (String key : keys) { tmp.put(key, row.get(key)); } result.addRow(tmp); } } } result.cloneProperty(this); return result; } public Object clone() { DataSet set = new DataSet(); List<DataRow> rows = new ArrayList<DataRow>(); for (DataRow row : this.rows) { rows.add((DataRow) row.clone()); } set.setRows(rows); set.cloneProperty(this); return set; } private DataSet cloneProperty(DataSet from) { return cloneProperty(from, this); } public static DataSet cloneProperty(DataSet from, DataSet to) { if (null != from && null != to) { to.exception = from.exception; to.message = from.message; to.navi = from.navi; to.head = from.head; to.primaryKeys = from.primaryKeys; to.dataSource = from.dataSource; to.datalink = from.datalink; to.schema = from.schema; to.table = from.table; } return to; } /** * 指定key转换成number * @param keys keys * @return DataRow */ public DataSet convertNumber(String ... keys){ if(null != keys) { for(DataRow row:rows){ row.convertNumber(keys); } } return this; } public DataSet convertString(String ... keys){ if(null != keys) { for(DataRow row:rows){ row.convertString(keys); } } return this; } public DataSet skip(boolean skip){ for(DataRow row:rows){ row.skip = skip; } return this; } /** * 筛选符合条件的集合 * 注意如果String类型 1与1.0比较不相等, 可以先调用convertNumber转换一下数据类型 * @param params key1,value1,key2:value2,key3,value3 * "NM:zh%","AGE:&gt;20","NM","%zh%" * @param begin begin * @param qty 最多筛选多少个 0表示不限制 * @return return */ public DataSet getRows(int begin, int qty, String... params) { DataSet set = new DataSet(); Map<String, String> kvs = new HashMap<String, String>(); int len = params.length; int i = 0; String srcFlagTag = "srcFlag"; //参数含有{}的 在kvs中根据key值+tag 放入一个新的键值对,如时间格式TIME:{10:10} while (i < len) { String p1 = params[i]; if (BasicUtil.isEmpty(p1)) { i++; continue; } else if (p1.contains(":")) { String ks[] = BeanUtil.parseKeyValue(p1); kvs.put(ks[0], ks[1]); i++; continue; } else { if (i + 1 < len) { String p2 = params[i + 1]; if (BasicUtil.isEmpty(p2) || !p2.contains(":")) { kvs.put(p1, p2); i += 2; continue; } else if (p2.startsWith("{") && p2.endsWith("}")) { p2 = p2.substring(1, p2.length() - 1); kvs.put(p1, p2); kvs.put(p1 + srcFlagTag, "true"); i += 2; continue; } else { String ks[] = BeanUtil.parseKeyValue(p2); kvs.put(ks[0], ks[1]); i += 2; continue; } } } i++; } return getRows(begin, qty, kvs); } public DataSet getRows(int begin, int qty, DataRow kvs) { Map<String,String> map = new HashMap<String,String>(); for(String k:kvs.keySet()){ map.put(k, kvs.getString(k)); } return getRows(begin, qty, map); } public DataSet getRows(int begin, int qty, Map<String, String> kvs) { DataSet set = new DataSet(); String srcFlagTag = "srcFlag"; //参数含有{}的 在kvs中根据key值+tag 放入一个新的键值对 BigDecimal d1; BigDecimal d2; for (DataRow row:rows) { if(row.skip){ continue; } boolean chk = true;//对比结果 for (String k : kvs.keySet()) { boolean srcFlag = false; if (k.endsWith(srcFlagTag)) { continue; } else { String srcFlagValue = kvs.get(k + srcFlagTag); if (BasicUtil.isNotEmpty(srcFlagValue)) { srcFlag = true; }<|fim▁hole|> String v = kvs.get(k); Object value = row.get(k); if(!row.containsKey(k) && null == value){ //注意这里有可能是个复合key chk = false; break; } if (null == v) { if (null != value) { chk = false; break; }else{ continue; } } else { if (null == value) { chk = false; break; } //与SQL.COMPARE_TYPE保持一致 int compare = 10; if (v.startsWith("=")) { compare = 10; v = v.substring(1); } else if (v.startsWith(">")) { compare = 20; v = v.substring(1); } else if (v.startsWith(">=")) { compare = 21; v = v.substring(2); } else if (v.startsWith("<")) { compare = 30; v = v.substring(1); } else if (v.startsWith("<=")) { compare = 31; v = v.substring(2); } else if (v.startsWith("%") && v.endsWith("%")) { compare = 50; v = v.substring(1, v.length() - 1); } else if (v.endsWith("%")) { compare = 51; v = v.substring(0, v.length() - 1); } else if (v.startsWith("%")) { compare = 52; v = v.substring(1); } if(compare <= 31 && value instanceof Number) { try { d1 = new BigDecimal(value.toString()); d2 = new BigDecimal(v); int cr = d1.compareTo(d2); if (compare == 10) { if (cr != 0) { chk = false; break; } } else if (compare == 20) { if (cr <= 0) { chk = false; break; } } else if (compare == 21) { if (cr < 0) { chk = false; break; } } else if (compare == 30) { if (cr >= 0) { chk = false; break; } } else if (compare == 31) { if (cr > 0) { chk = false; break; } } }catch (NumberFormatException e){ chk = false; break; } } String str = value + ""; str = str.toLowerCase(); v = v.toLowerCase(); if (srcFlag) { v = "{" + v + "}"; } if (compare == 10) { if (!v.equals(str)) { chk = false; break; } } else if (compare == 50) { if (!str.contains(v)) { chk = false; break; } } else if (compare == 51) { if (!str.startsWith(v)) { chk = false; break; } } else if (compare == 52) { if (!str.endsWith(v)) { chk = false; break; } } } }//end for kvs if (chk) { set.add(row); if (qty > 0 && set.size() >= qty) { break; } } }//end for rows set.cloneProperty(this); return set; } public DataSet getRows(int begin, String... params) { return getRows(begin, -1, params); } public DataSet getRows(String... params) { return getRows(0, params); } public DataSet getRows(DataSet set, String key) { String kvs[] = new String[set.size()]; int i = 0; for (DataRow row : set) { String value = row.getString(key); if (BasicUtil.isNotEmpty(value)) { kvs[i++] = key + ":" + value; } } return getRows(kvs); } public DataSet getRows(DataRow row, String... keys) { List<String> list = new ArrayList<>(); int i = 0; for (String key : keys) { String value = row.getString(key); if (BasicUtil.isNotEmpty(value)) { list.add(key + ":" + value); } } String[] kvs = BeanUtil.list2array(list); return getRows(kvs); } /** * 数字格式化 * * @param format format * @param cols cols * @return return */ public DataSet formatNumber(String format, String... cols) { if (null == cols || BasicUtil.isEmpty(format)) { return this; } int size = size(); for (int i = 0; i < size; i++) { DataRow row = getRow(i); row.formatNumber(format, cols); } return this; } public DataSet numberFormat(String target, String key, String format){ for(DataRow row: rows){ numberFormat(target, key, format); } return this; } public DataSet numberFormat(String key, String format){ return numberFormat(key, key, format); } /** * 日期格式化 * * @param format format * @param cols cols * @return return */ public DataSet formatDate(String format, String... cols) { if (null == cols || BasicUtil.isEmpty(format)) { return this; } int size = size(); for (int i = 0; i < size; i++) { DataRow row = getRow(i); row.formatDate(format, cols); } return this; } public DataSet dateFormat(String target, String key, String format){ for(DataRow row: rows){ dateFormat(target, key, format); } return this; } public DataSet dateFormat(String key, String format){ return dateFormat(key, key, format); } /** * 提取符合指定属性值的集合 * * @param begin begin * @param end end * @param key key * @param value value * @return return */ public DataSet filter(int begin, int end, String key, String value) { DataSet set = new DataSet(); String tmpValue; int size = size(); if (begin < 0) { begin = 0; } for (int i = begin; i < size && i <= end; i++) { tmpValue = getString(i, key, ""); if ((null == value && null == tmpValue) || (null != value && value.equals(tmpValue))) { set.add(getRow(i)); } } set.cloneProperty(this); return set; } public DataSet getRows(int fr, int to) { DataSet set = new DataSet(); int size = this.size(); if (fr < 0) { fr = 0; } for (int i = fr; i < size && i <= to; i++) { set.addRow(getRow(i)); } return set; } /** * 合计 * @param begin 开始 * @param end 结束 * @param key key * @return BigDecimal */ public BigDecimal sum(int begin, int end, String key) { BigDecimal result = BigDecimal.ZERO; int size = rows.size(); if (begin <= 0) { begin = 0; } for (int i = begin; i < size && i <= end; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp) { result = result.add(getDecimal(i, key, 0)); } } return result; } public BigDecimal sum(String key) { BigDecimal result = BigDecimal.ZERO; result = sum(0, size() - 1, key); return result; } /** * 多列合计 * @param result 保存合计结果 * @param keys keys * @return DataRow */ public DataRow sums(DataRow result, String... keys) { if(null == result){ result = new DataRow(); } if (size() > 0) { if (null != keys) { for (String key : keys) { result.put(key, sum(key)); } } else { List<String> numberKeys = getRow(0).numberKeys(); for (String key : numberKeys) { result.put(key, sum(key)); } } } return result; } public DataRow sums(String... keys) { return sums(new DataRow(), keys); } /** * 多列平均值 * * @param result 保存合计结果 * @param keys keys * @return DataRow */ public DataRow avgs(DataRow result, String... keys) { if(null == result){ result = new DataRow(); } if (size() > 0) { if (null != keys) { for (String key : keys) { result.put(key, avg(key)); } } else { List<String> numberKeys = getRow(0).numberKeys(); for (String key : numberKeys) { result.put(key, avg(key)); } } } return result; } public DataRow avgs(String... keys) { return avgs(new DataRow(), keys); } /** * 多列平均值 * @param result 保存合计结果 * @param scale scale * @param round round * @param keys keys * @return DataRow */ public DataRow avgs(DataRow result, int scale, int round, String... keys) { if(null == result){ result = new DataRow(); } if (size() > 0) { if (null != keys) { for (String key : keys) { result.put(key, avg(key, scale, round)); } } else { List<String> numberKeys = getRow(0).numberKeys(); for (String key : numberKeys) { result.put(key, avg(key, scale, round)); } } } return result; } public DataRow avgs(int scale, int round, String... keys) { return avgs(new DataRow(), scale, round, keys); } /** * 最大值 * * @param top 多少行 * @param key key * @return return */ public BigDecimal maxDecimal(int top, String key) { BigDecimal result = null; int size = rows.size(); if (size > top) { size = top; } for (int i = 0; i < size; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp && (null == result || tmp.compareTo(result) > 0)) { result = tmp; } } return result; } public BigDecimal maxDecimal(String key) { return maxDecimal(size(), key); } public int maxInt(int top, String key) { BigDecimal result = maxDecimal(top, key); if (null == result) { return 0; } return result.intValue(); } public int maxInt(String key) { return maxInt(size(), key); } public double maxDouble(int top, String key) { BigDecimal result = maxDecimal(top, key); if (null == result) { return 0; } return result.doubleValue(); } public double maxDouble(String key) { return maxDouble(size(), key); } // public BigDecimal max(int top, String key){ // BigDecimal result = maxDecimal(top, key); // return result; // } // public BigDecimal max(String key){ // return maxDecimal(size(), key); // } /** * 最小值 * * @param top 多少行 * @param key key * @return return */ public BigDecimal minDecimal(int top, String key) { BigDecimal result = null; int size = rows.size(); if (size > top) { size = top; } for (int i = 0; i < size; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp && (null == result || tmp.compareTo(result) < 0)) { result = tmp; } } return result; } public BigDecimal minDecimal(String key) { return minDecimal(size(), key); } public int minInt(int top, String key) { BigDecimal result = minDecimal(top, key); if (null == result) { return 0; } return result.intValue(); } public int minInt(String key) { return minInt(size(), key); } public double minDouble(int top, String key) { BigDecimal result = minDecimal(top, key); if (null == result) { return 0; } return result.doubleValue(); } public double minDouble(String key) { return minDouble(size(), key); } // public BigDecimal min(int top, String key){ // BigDecimal result = minDecimal(top, key); // return result; // } // public BigDecimal min(String key){ // return minDecimal(size(), key); // } /** * key对应的value最大的一行 * * @param key key * @return return */ public DataRow max(String key) { int size = size(); if (size == 0) { return null; } DataRow row = null; if (isAsc) { row = getRow(size - 1); } else if (isDesc) { row = getRow(0); } else { asc(key); row = getRow(size - 1); } return row; } public DataRow min(String key) { int size = size(); if (size == 0) { return null; } DataRow row = null; if (isAsc) { row = getRow(0); } else if (isDesc) { row = getRow(size - 1); } else { asc(key); row = getRow(0); } return row; } /** * 平均值 空数据不参与加法但参与除法 * * @param top 多少行 * @param key key * @param scale scale * @param round round * @return return */ public BigDecimal avg(int top, String key, int scale, int round) { BigDecimal result = BigDecimal.ZERO; int size = rows.size(); if (size > top) { size = top; } int count = 0; for (int i = 0; i < size; i++) { BigDecimal tmp = getDecimal(i, key, 0); if (null != tmp) { result = result.add(tmp); } count++; } if (count > 0) { result = result.divide(new BigDecimal(count), scale, round); } return result; } public BigDecimal avg(String key, int scale, int round) { BigDecimal result = avg(size(), key, scale ,round); return result; } public BigDecimal avg(String key) { BigDecimal result = avg(size(), key, 2, BigDecimal.ROUND_HALF_UP); return result; } public DataSet addRow(DataRow row) { if (null != row) { rows.add(row); } return this; } public DataSet addRow(int idx, DataRow row) { if (null != row) { rows.add(idx, row); } return this; } /** * 合并key例的值 以connector连接 * * @param key key * @param connector connector * @return return v1,v2,v3 */ public String concat(String key, String connector) { return BasicUtil.concat(getStrings(key), connector); } public String concatNvl(String key, String connector) { return BasicUtil.concat(getNvlStrings(key), connector); } /** * 合并key例的值 以connector连接(不取null值) * * @param key key * @param connector connector * @return return v1,v2,v3 */ public String concatWithoutNull(String key, String connector) { return BasicUtil.concat(getStringsWithoutNull(key), connector); } /** * 合并key例的值 以connector连接(不取空值) * * @param key key * @param connector connector * @return return v1,v2,v3 */ public String concatWithoutEmpty(String key, String connector) { return BasicUtil.concat(getStringsWithoutEmpty(key), connector); } public String concatNvl(String key) { return BasicUtil.concat(getNvlStrings(key), ","); } public String concatWithoutNull(String key) { return BasicUtil.concat(getStringsWithoutNull(key), ","); } public String concatWithoutEmpty(String key) { return BasicUtil.concat(getStringsWithoutEmpty(key), ","); } public String concat(String key) { return BasicUtil.concat(getStrings(key), ","); } /** * 提取单列值 * * @param key key * @return return */ public List<Object> fetchValues(String key) { List<Object> result = new ArrayList<Object>(); for (int i = 0; i < size(); i++) { result.add(get(i, key)); } return result; } /** * 取单列不重复的值 * * @param key key * @return return */ public List<String> fetchDistinctValue(String key) { List<String> result = new ArrayList<>(); for (int i = 0; i < size(); i++) { String value = getString(i, key, ""); if (result.contains(value)) { continue; } result.add(value); } return result; } public List<String> fetchDistinctValues(String key) { return fetchDistinctValue(key); } /** * 分页 * * @param link link * @return return */ public String displayNavi(String link) { String result = ""; if (null != navi) { result = navi.getHtml(); } return result; } public String navi(String link) { return displayNavi(link); } public String displayNavi() { return displayNavi(null); } public String navi() { return displayNavi(null); } public DataSet put(int idx, String key, Object value) { DataRow row = getRow(idx); if (null != row) { row.put(key, value); } return this; } public DataSet removes(String... keys) { for (DataRow row : rows) { row.removes(keys); } return this; } /** * String * * @param index index * @param key key * @return String * @throws Exception Exception */ public String getString(int index, String key) throws Exception { return getRow(index).getString(key); } public String getString(int index, String key, String def) { try { return getString(index, key); } catch (Exception e) { return def; } } public String getString(String key) throws Exception { return getString(0, key); } public String getString(String key, String def) { return getString(0, key, def); } public Object get(int index, String key) { DataRow row = getRow(index); if (null != row) { return row.get(key); } return null; } public List<Object> gets(String key) { List<Object> list = new ArrayList<Object>(); for (DataRow row : rows) { list.add(row.getString(key)); } return list; } public List<DataSet> getSets(String key) { List<DataSet> list = new ArrayList<DataSet>(); for (DataRow row : rows) { DataSet set = row.getSet(key); if (null != set) { list.add(set); } } return list; } public List<String> getStrings(String key) { List<String> result = new ArrayList<>(); for (DataRow row : rows) { result.add(row.getString(key)); } return result; } public List<Integer> getInts(String key) throws Exception { List<Integer> result = new ArrayList<Integer>(); for (DataRow row : rows) { result.add(row.getInt(key)); } return result; } public List<Object> getObjects(String key) { List<Object> result = new ArrayList<Object>(); for (DataRow row : rows) { result.add(row.get(key)); } return result; } public List<String> getDistinctStrings(String key) { return fetchDistinctValue(key); } public List<String> getNvlStrings(String key) { List<String> result = new ArrayList<>(); List<Object> list = fetchValues(key); for (Object val : list) { if (null != val) { result.add(val.toString()); } else { result.add(""); } } return result; } public List<String> getStringsWithoutEmpty(String key) { List<String> result = new ArrayList<>(); List<Object> list = fetchValues(key); for (Object val : list) { if (BasicUtil.isNotEmpty(val)) { result.add(val.toString()); } } return result; } public List<String> getStringsWithoutNull(String key) { List<String> result = new ArrayList<>(); List<Object> list = fetchValues(key); for (Object val : list) { if (null != val) { result.add(val.toString()); } } return result; } public BigDecimal getDecimal(int idx, String key) throws Exception { return getRow(idx).getDecimal(key); } public BigDecimal getDecimal(int idx, String key, double def) { return getDecimal(idx, key, new BigDecimal(def)); } public BigDecimal getDecimal(int idx, String key, BigDecimal def) { try { BigDecimal val = getDecimal(idx, key); if (null == val) { return def; } return val; } catch (Exception e) { return def; } } /** * 抽取指定列生成新的DataSet 新的DataSet只包括指定列的值与分页信息,不包含其他附加信息(如来源表) * @param keys keys * @return DataSet */ public DataSet extract(String ... keys){ DataSet result = new DataSet(); for(DataRow row:rows){ DataRow item = row.extract(keys); result.add(item); } result.navi = this.navi; return result; } public DataSet extract(List<String> keys){ DataSet result = new DataSet(); for(DataRow row:rows){ DataRow item = row.extract(keys); result.add(item); } result.navi = this.navi; return result; } /** * html格式(未实现) * * @param index index * @param key key * @return return * @throws Exception Exception */ public String getHtmlString(int index, String key) throws Exception { return getString(index, key); } public String getHtmlString(int index, String key, String def) { return getString(index, key, def); } public String getHtmlString(String key) throws Exception { return getHtmlString(0, key); } /** * escape String * * @param index index * @param key key * @return return * @throws Exception Exception */ public String getEscapeString(int index, String key) throws Exception { return EscapeUtil.escape(getString(index, key)).toString(); } public String getEscapeString(int index, String key, String def) { try { return getEscapeString(index, key); } catch (Exception e) { return EscapeUtil.escape(def).toString(); } } public String getDoubleEscapeString(int index, String key) throws Exception { return EscapeUtil.doubleEscape(getString(index, key)); } public String getDoubleEscapeString(int index, String key, String def) { try { return getDoubleEscapeString(index, key); } catch (Exception e) { return EscapeUtil.doubleEscape(def); } } public String getEscapeString(String key) throws Exception { return getEscapeString(0, key); } public String getDoubleEscapeString(String key) throws Exception { return getDoubleEscapeString(0, key); } /** * int * * @param index index * @param key key * @return return * @throws Exception Exception */ public int getInt(int index, String key) throws Exception { return getRow(index).getInt(key); } public int getInt(int index, String key, int def) { try { return getInt(index, key); } catch (Exception e) { return def; } } public int getInt(String key) throws Exception { return getInt(0, key); } public int getInt(String key, int def) { return getInt(0, key, def); } /** * double * * @param index index * @param key key * @return return * @throws Exception Exception */ public double getDouble(int index, String key) throws Exception { return getRow(index).getDouble(key); } public double getDouble(int index, String key, double def) { try { return getDouble(index, key); } catch (Exception e) { return def; } } public double getDouble(String key) throws Exception { return getDouble(0, key); } public double getDouble(String key, double def) { return getDouble(0, key, def); } /** * 在key列基础上 +value,如果原来没有key列则默认0并put到target * @param target 计算结果key * @param key key * @param value value * @return this */ public DataSet add(String target, String key, int value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, double value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, short value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, float value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String target, String key, BigDecimal value){ for(DataRow row:rows){ row.add(target, key, value); } return this; } public DataSet add(String key, int value){ return add(key, key, value); } public DataSet add(String key, double value){ return add(key, key, value); } public DataSet add(String key, short value){ return add(key, key, value); } public DataSet add(String key, float value){ return add(key, key, value); } public DataSet add(String key, BigDecimal value){ return add(key, key, value); } public DataSet subtract(String target, String key, int value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, double value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, short value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, float value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String target, String key, BigDecimal value){ for(DataRow row:rows){ row.subtract(target, key, value); } return this; } public DataSet subtract(String key, int value){ return subtract(key, key, value); } public DataSet subtract(String key, double value){ return subtract(key, key, value); } public DataSet subtract(String key, short value){ return subtract(key, key, value); } public DataSet subtract(String key, float value){ return subtract(key, key, value); } public DataSet subtract(String key, BigDecimal value){ return subtract(key, key, value); } public DataSet multiply(String target, String key, int value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, double value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, short value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, float value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String target, String key, BigDecimal value){ for(DataRow row:rows){ row.multiply(target, key, value); } return this; } public DataSet multiply(String key, int value){ return multiply(key,key,value); } public DataSet multiply(String key, double value){ return multiply(key,key,value); } public DataSet multiply(String key, short value){ return multiply(key,key,value); } public DataSet multiply(String key, float value){ return multiply(key,key,value); } public DataSet multiply(String key, BigDecimal value){ return multiply(key,key,value); } public DataSet divide(String target, String key, int value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, double value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, short value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, float value){ for(DataRow row:rows){ row.divide(target, key, value); } return this; } public DataSet divide(String target, String key, BigDecimal value, int mode){ for(DataRow row:rows){ row.divide(target, key, value, mode); } return this; } public DataSet divide(String key, int value){ return divide(key,key, value); } public DataSet divide(String key, double value){ return divide(key,key, value); } public DataSet divide(String key, short value){ return divide(key,key, value); } public DataSet divide(String key, float value){ return divide(key,key, value); } public DataSet divide(String key, BigDecimal value, int mode){ return divide(key,key, value, mode); } public DataSet round(String target, String key, int scale, int mode){ for (DataRow row:rows){ row.round(target, key, scale, mode); } return this; } public DataSet round(String key, int scale, int mode){ return round(key, key, scale, mode); } /** * DataSet拆分成size部分 * @param page 拆成多少部分 * @return list */ public List<DataSet> split(int page){ List<DataSet> list = new ArrayList<>(); int size = this.size(); int vol = size / page;//每页多少行 for(int i=0; i<page; i++){ int fr = i*vol; int to = (i+1)*vol-1; if(i == page-1){ to = size-1; } DataSet set = this.cuts(fr, to); list.add(set); } return list; } /** * rows 列表中的数据格式化成json格式 不同与toJSON * map.put("type", "list"); * map.put("result", result); * map.put("message", message); * map.put("rows", rows); * map.put("success", result); * map.put("navi", navi); */ public String toString() { Map<String, Object> map = new HashMap<String, Object>(); map.put("type", "list"); map.put("result", result); map.put("message", message); map.put("rows", rows); map.put("success", result); if(null != navi){ Map<String,Object> navi_ = new HashMap<String,Object>(); navi_.put("page", navi.getCurPage()); navi_.put("pages", navi.getTotalPage()); navi_.put("rows", navi.getTotalRow()); navi_.put("vol", navi.getPageRows()); map.put("navi", navi_); } return BeanUtil.map2json(map); } /** * rows 列表中的数据格式化成json格式 不同与toString * * @return return */ public String toJson() { return BeanUtil.object2json(this); } public String getJson() { return toJSON(); } public String toJSON() { return toJson(); } /** * 根据指定列生成map * * @param key ID,{ID}_{NM} * @return return */ public Map<String, DataRow> toMap(String key) { Map<String, DataRow> maps = new HashMap<String, DataRow>(); for (DataRow row : rows) { maps.put(row.getString(key), row); } return maps; } /** * 子类 * * @param idx idx * @return return */ public Object getChildren(int idx) { DataRow row = getRow(idx); if (null != row) { return row.getChildren(); } return null; } public Object getChildren() { return getChildren(0); } public DataSet setChildren(int idx, Object children) { DataRow row = getRow(idx); if (null != row) { row.setChildren(children); } return this; } public DataSet setChildren(Object children) { setChildren(0, children); return this; } /** * 父类 * * @param idx idx * @return return */ public Object getParent(int idx) { DataRow row = getRow(idx); if (null != row) { return row.getParent(); } return null; } public Object getParent() { return getParent(0); } public DataSet setParent(int idx, Object parent) { DataRow row = getRow(idx); if (null != row) { row.setParent(parent); } return this; } public DataSet setParent(Object parent) { setParent(0, parent); return this; } /** * 转换成对象 * * @param <T> T * @param index index * @param clazz clazz * @return return */ public <T> T entity(int index, Class<T> clazz) { DataRow row = getRow(index); if (null != row) { return row.entity(clazz); } return null; } /** * 转换成对象集合 * * @param <T> T * @param clazz clazz * @return return */ public <T> List<T> entity(Class<T> clazz) { List<T> list = new ArrayList<T>(); if (null != rows) { for (DataRow row : rows) { list.add(row.entity(clazz)); } } return list; } public <T> T entity(Class<T> clazz, int idx) { DataRow row = getRow(idx); if (null != row) { return row.entity(clazz); } return null; } public DataSet setDataSource(String dataSource) { if (null == dataSource) { return this; } this.dataSource = dataSource; if (dataSource.contains(".") && !dataSource.contains(":")) { schema = dataSource.substring(0, dataSource.indexOf(".")); table = dataSource.substring(dataSource.indexOf(".") + 1); } for (DataRow row : rows) { if (BasicUtil.isEmpty(row.getDataSource())) { row.setDataSource(dataSource); } } return this; } /** * 合并 * @param set DataSet * @param keys 根据keys去重 * @return DataSet */ public DataSet union(DataSet set, String... keys) { DataSet result = new DataSet(); if (null != rows) { int size = rows.size(); for (int i = 0; i < size; i++) { result.add(rows.get(i)); } } if (null == keys || keys.length == 0) { keys = new String[1]; keys[0] = ConfigTable.getString("DEFAULT_PRIMARY_KEY"); } int size = set.size(); for (int i = 0; i < size; i++) { DataRow item = set.getRow(i); if (!result.contains(item, keys)) { result.add(item); } } return result; } /** * 合并合并不去重 * * @param set set * @return return */ public DataSet unionAll(DataSet set) { DataSet result = new DataSet(); if (null != rows) { int size = rows.size(); for (int i = 0; i < size; i++) { result.add(rows.get(i)); } } int size = set.size(); for (int i = 0; i < size; i++) { DataRow item = set.getRow(i); result.add(item); } return result; } /** * 是否包含这一行 * * @param row row * @param keys keys * @return return */ public boolean contains(DataRow row, String... keys) { if (null == rows || rows.size() == 0 || null == row) { return false; } if (null == keys || keys.length == 0) { keys = new String[1]; keys[0] = ConfigTable.getString("DEFAULT_PRIMARY_KEY", "ID"); } String params[] = packParam(row, keys); return exists(params); } public String[] packParam(DataRow row, String... keys) { if (null == keys || null == row) { return null; } String params[] = new String[keys.length * 2]; int idx = 0; for (String key : keys) { if (null == key) { continue; } String ks[] = BeanUtil.parseKeyValue(key); params[idx++] = ks[0]; params[idx++] = row.getString(ks[1]); } return params; } /** * 根据数据与属性列表 封装kvs * ["ID","1","CODE","A01"] * @param row 数据 DataRow * @param keys 属性 ID,CODE * @return kvs */ public String[] packParam(DataRow row, List<String> keys) { if (null == keys || null == row) { return null; } String params[] = new String[keys.size() * 2]; int idx = 0; for (String key : keys) { if (null == key) { continue; } String ks[] = BeanUtil.parseKeyValue(key); params[idx++] = ks[0]; params[idx++] = row.getString(ks[1]); } return params; } /** * 从items中按相应的key提取数据 存入 * dispatch("children",items, "DEPAT_CD") * dispatchs("children",items, "CD:BASE_CD") * * @param field 默认"ITEMS" * @param unique 是否只分配一次(同一个条目不能分配到多个组中) * @param recursion 是否递归 * @param items items * @param keys keys ID:DEPT_ID或ID * @return return */ public DataSet dispatchs(String field, boolean unique, boolean recursion, DataSet items, String... keys) { if(null == keys || keys.length == 0){ throw new RuntimeException("未指定对应关系"); } if (null == items) { return this; } if (BasicUtil.isEmpty(field)) { field = "ITEMS"; } for (DataRow row : rows) { if (null == row.get(field)) { String[] kvs = packParam(row, reverseKey(keys)); DataSet set = items.getRows(kvs); if (recursion) { set.dispatchs(field, unique, recursion, items, keys); } if(unique) { set.skip(true); } row.put(field, set); } } items.skip(false); return this; } public DataSet dispatchs(boolean unique, boolean recursion, DataSet items, String... keys) { return dispatchs("ITEMS", unique, recursion, items, keys); } public DataSet dispatchs(String field, DataSet items, String... keys) { return dispatchs(field,false, false, items, keys); } public DataSet dispatchs(DataSet items, String... keys) { return dispatchs("ITEMS", items, keys); } public DataSet dispatchs(boolean unique, boolean recursion, String... keys) { return dispatchs("ITEMS", unique, recursion, this, keys); } public DataSet dispatchs(String field, boolean unique, boolean recursion, String... keys) { return dispatchs(field, unique, recursion, this, keys); } public DataSet dispatch(String field, boolean unique, boolean recursion, DataSet items, String... keys) { if(null == keys || keys.length == 0){ throw new RuntimeException("未指定对应关系"); } if (null == items) { return this; } if (BasicUtil.isEmpty(field)) { field = "ITEM"; } for (DataRow row : rows) { if (null == row.get(field)) { String[] params = packParam(row, reverseKey(keys)); DataRow result = items.getRow(params); if(unique){ result.skip = true; } row.put(field, result); } } items.skip(false); return this; } public DataSet dispatch(String field, DataSet items, String... keys) { return dispatch(field, false, false, items, keys); } public DataSet dispatch(DataSet items, String... keys) { return dispatch("ITEM", items, keys); } public DataSet dispatch(boolean unique, boolean recursion, String... keys) { return dispatch("ITEM", unique, recursion, this, keys); } public DataSet dispatch(String field, boolean unique, boolean recursion, String... keys) { return dispatch(field, unique, recursion, this, keys); } /** * 直接调用dispatchs * @param field 默认"ITEMS" * @param unique 是否只分配一次(同一个条目不能分配到多个组中) * @param recursion 是否递归 * @param items items * @param keys keys ID:DEPT_ID或ID * @return return */ @Deprecated public DataSet dispatchItems(String field, boolean unique, boolean recursion, DataSet items, String... keys) { return dispatchs(field, unique, recursion, items, keys); } @Deprecated public DataSet dispatchItems(boolean unique, boolean recursion, DataSet items, String... keys) { return dispatchs( unique, recursion, items, keys); } @Deprecated public DataSet dispatchItems(String field, DataSet items, String... keys) { return dispatchs(field, items, keys); } @Deprecated public DataSet dispatchItems(DataSet items, String... keys) { return dispatchs(items, keys); } @Deprecated public DataSet dispatchItems(boolean unique, boolean recursion, String... keys) { return dispatchs( unique, recursion, keys); } @Deprecated public DataSet dispatchItems(String field, boolean unique, boolean recursion, String... keys) { return dispatchs(field, unique, recursion, keys); } @Deprecated public DataSet dispatchItem(String field, boolean unique, boolean recursion, DataSet items, String... keys) { return dispatch(field, unique, recursion, items, keys); } @Deprecated public DataSet dispatchItem(String field, DataSet items, String... keys) { return dispatch(field, items, keys); } @Deprecated public DataSet dispatchItem(DataSet items, String... keys) { return dispatch(items, keys); } @Deprecated public DataSet dispatchItem(boolean unique, boolean recursion, String... keys) { return dispatch(unique, recursion, keys); } @Deprecated public DataSet dispatchItem(String field, boolean unique, boolean recursion, String... keys) { return dispatch(field, unique, recursion, keys); } /** * 根据keys列建立关联,并将关联出来的结果拼接到集合的条目上,如果有重复则覆盖条目 * * @param items 被查询的集合 * @param keys 关联条件列 * @return return */ public DataSet join(DataSet items, String... keys) { if (null == items || null == keys || keys.length == 0) { return this; } for (DataRow row : rows) { String[] params = packParam(row, reverseKey(keys)); DataRow result = items.getRow(params); if (null != result) { row.copy(result, result.keys()); } } return this; } public DataSet toLowerKey() { for (DataRow row : rows) { row.toLowerKey(); } return this; } public DataSet toUpperKey() { for (DataRow row : rows) { row.toUpperKey(); } return this; } /** * 按keys分组 * * @param keys keys * @return return */ public DataSet group(String... keys) { DataSet result = distinct(keys); result.dispatchs(true,false, this, keys); return result; } public DataSet or(DataSet set, String... keys) { return this.union(set, keys); } public DataSet getRows(Map<String, String> kvs) { return getRows(0, -1, kvs); } /** * 多个集合的交集 * * @param distinct 是否根据keys抽取不重复的集合 * @param sets 集合 * @param keys 判断依据 * @return DataSet */ public static DataSet intersection(boolean distinct, List<DataSet> sets, String... keys) { DataSet result = null; if (null != sets && sets.size() > 0) { for (DataSet set : sets) { if (null == result) { result = set; } else { result = result.intersection(distinct, set, keys); } } } if (null == result) { result = new DataSet(); } return result; } public static DataSet intersection(List<DataSet> sets, String... keys) { return intersection(false, sets, keys); } /** * 交集 * * @param distinct 是否根据keys抽取不重复的集合(根据keys去重) * @param set set * @param keys 根据keys列比较是否相等,如果列名不一致"ID:USER_ID",ID表示当前DataSet的列,USER_ID表示参数中DataSet的列 * @return return */ public DataSet intersection(boolean distinct, DataSet set, String... keys) { DataSet result = new DataSet(); if (null == set) { return result; } for (DataRow row : rows) { String[] kv = reverseKey(keys); if (set.contains(row, kv)) { //符合交集 if(!result.contains(row, kv)){//result中没有 result.add((DataRow) row.clone()); }else { if(!distinct){//result中有但不要求distinct result.add((DataRow) row.clone()); } } } } return result; } public DataSet intersection(DataSet set, String... keys) { return intersection(false, set, keys); } public DataSet and(boolean distinct, DataSet set, String... keys) { return intersection(distinct, set, keys); } public DataSet and(DataSet set, String... keys) { return intersection(false, set, keys); } /** * 补集 * 在this中,但不在set中 * this作为超集 set作为子集 * * @param distinct 是否根据keys抽取不重复的集合 * @param set set * @param keys keys * @return return */ public DataSet complement(boolean distinct, DataSet set, String... keys) { DataSet result = new DataSet(); for (DataRow row : rows) { String[] kv = reverseKey(keys); if (null == set || !set.contains(row, kv)) { if (!distinct || !result.contains(row, kv)) { result.add((DataRow) row.clone()); } } } return result; } public DataSet complement(DataSet set, String... keys) { return complement(false, set, keys); } /** * 差集 * 从当前集合中删除set中存在的row,生成新的DataSet并不修改当前对象 * this中有 set中没有的 * * @param distinct 是否根据keys抽取不重复的集合 * @param set set * @param keys CD,"CD:WORK_CD" * @return return */ public DataSet difference(boolean distinct, DataSet set, String... keys) { DataSet result = new DataSet(); for (DataRow row : rows) { String[] kv = reverseKey(keys); if (null == set || !set.contains(row, kv)) { if (!distinct || !result.contains(row, kv)) { result.add((DataRow) row.clone()); } } } return result; } public DataSet difference(DataSet set, String... keys) { return difference(false, set, keys); } /** * 颠倒kv-vk * * @param keys kv * @return String[] */ private String[] reverseKey(String[] keys) { if (null == keys) { return new String[0]; } int size = keys.length; String result[] = new String[size]; for (int i = 0; i < size; i++) { String key = keys[i]; if (BasicUtil.isNotEmpty(key) && key.contains(":")) { String ks[] = BeanUtil.parseKeyValue(key); key = ks[1] + ":" + ks[0]; } result[i] = key; } return result; } /** * 清除指定列全为空的行,如果不指定keys,则清除所有列都为空的行 * * @param keys keys * @return DataSet */ public DataSet removeEmptyRow(String... keys) { int size = this.size(); for (int i = size - 1; i >= 0; i--) { DataRow row = getRow(i); if (null == keys || keys.length == 0) { if (row.isEmpty()) { this.remove(row); } } else { boolean isEmpty = true; for (String key : keys) { if (row.isNotEmpty(key)) { isEmpty = false; break; } } if (isEmpty) { this.remove(row); } } } return this; } public DataSet changeKey(String key, String target, boolean remove) { for(DataRow row:rows){ row.changeKey(key, target, remove); } return this; } public DataSet changeKey(String key, String target) { return changeKey(key, target, true); } /** * 删除rows中的columns列 * * @param columns 检测的列,如果不输入则检测所有列 * @return DataSet */ public DataSet removeColumn(String... columns) { if (null != columns) { for (String column : columns) { for (DataRow row : rows) { row.remove(column); } } } return this; } /** * 删除rows中值为空(null|'')的列 * * @param columns 检测的列,如果不输入则检测所有列 * @return DataSet */ public DataSet removeEmptyColumn(String... columns) { for (DataRow row : rows) { row.removeEmpty(columns); } return this; } /** * NULL &gt; "" * * @return DataSet */ public DataSet nvl() { for (DataRow row : rows) { row.nvl(); } return this; } /* ********************************************** 实现接口 *********************************************************** */ public boolean add(DataRow e) { return rows.add((DataRow) e); } @SuppressWarnings({"rawtypes", "unchecked"}) public boolean addAll(Collection c) { return rows.addAll(c); } public void clear() { rows.clear(); } public boolean contains(Object o) { return rows.contains(o); } public boolean containsAll(Collection<?> c) { return rows.containsAll(c); } public Iterator<DataRow> iterator() { return rows.iterator(); } public boolean remove(Object o) { return rows.remove(o); } public boolean removeAll(Collection<?> c) { return rows.removeAll(c); } public boolean retainAll(Collection<?> c) { return rows.retainAll(c); } public Object[] toArray() { return rows.toArray(); } @SuppressWarnings("unchecked") public Object[] toArray(Object[] a) { return rows.toArray(a); } public String getSchema() { return schema; } public DataSet setSchema(String schema) { this.schema = schema; return this; } public String getTable() { return table; } public DataSet setTable(String table) { if (null != table && table.contains(".")) { String[] tbs = table.split("\\."); this.table = tbs[1]; this.schema = tbs[0]; } else { this.table = table; } return this; } /** * 验证是否过期 * 根据当前时间与创建时间对比 * 过期返回 true * * @param millisecond 过期时间(毫秒) millisecond 过期时间(毫秒) * @return boolean */ public boolean isExpire(int millisecond) { if (System.currentTimeMillis() - createTime > millisecond) { return true; } return false; } public boolean isExpire(long millisecond) { if (System.currentTimeMillis() - createTime > millisecond) { return true; } return false; } public boolean isExpire() { if (getExpires() == -1) { return false; } if (System.currentTimeMillis() - createTime > getExpires()) { return true; } return false; } public long getCreateTime() { return createTime; } public List<DataRow> getRows() { return rows; } /************************** getter setter ***************************************/ /** * 过期时间(毫秒) * * @return long */ public long getExpires() { return expires; } public DataSet setExpires(long millisecond) { this.expires = millisecond; return this; } public DataSet setExpires(int millisecond) { this.expires = millisecond; return this; } public boolean isResult() { return result; } public boolean isSuccess() { return result; } public DataSet setResult(boolean result) { this.result = result; return this; } public Exception getException() { return exception; } public DataSet setException(Exception exception) { this.exception = exception; return this; } public String getMessage() { return message; } public DataSet setMessage(String message) { this.message = message; return this; } public PageNavi getNavi() { return navi; } public DataSet setNavi(PageNavi navi) { this.navi = navi; return this; } public DataSet setRows(List<DataRow> rows) { this.rows = rows; return this; } public String getDataSource() { String ds = table; if (BasicUtil.isNotEmpty(ds) && BasicUtil.isNotEmpty(schema)) { ds = schema + "." + ds; } if (BasicUtil.isEmpty(ds)) { ds = dataSource; } return ds; } public DataSet order(final String... keys) { return asc(keys); } public DataSet put(String key, Object value, boolean pk, boolean override) { for (DataRow row : rows) { row.put(key, value, pk, override); } return this; } public DataSet put(String key, Object value, boolean pk) { for (DataRow row : rows) { row.put(key, value, pk); } return this; } public DataSet put(String key, Object value) { for (DataRow row : rows) { row.put(key, value); } return this; } /** * 行转列 * 表结构(编号, 姓名, 年度, 科目, 分数, 等级) * @param pks 唯一标识key(如编号,姓名) * @param classKeys 分类key(如年度,科目) * @param valueKeys 取值key(如分数,等级),如果不指定key则将整行作为value * @return * 如果指定key * 返回结构 [ * {编号:01,姓名:张三,2010-数学-分数:100}, * {编号:01,姓名:张三,2010-数学-等级:A}, * {编号:01,姓名:张三,2010-物理-分数:100} * ] * 如果只有一个valueKey则返回[ * {编号:01,姓名:张三,2010-数学:100}, * {编号:01,姓名:张三,2010-物理:90} * ] * 不指定valuekey则返回 [ * {编号:01,姓名:张三,2010-数学:{分数:100,等级:A}}, * {编号:01,姓名:张三,2010-物理:{分数:100,等级:A}} * ] */ public DataSet pivot(List<String> pks, List<String> classKeys, List<String> valueKeys) { DataSet result = distinct(pks); DataSet classValues = distinct(classKeys); //[{年度:2010,科目:数学},{年度:2010,科目:物理},{年度:2011,科目:数学}] for (DataRow row : result) { for (DataRow classValue : classValues) { DataRow params = new DataRow(); params.copy(row, pks).copy(classValue); DataRow valueRow = getRow(params); if(null != valueRow){ valueRow.skip = true; } String finalKey = concatValue(classValue,"-");//2010-数学 if(null != valueKeys && valueKeys.size() > 0){ if(valueKeys.size() == 1){ if (null != valueRow) { row.put(finalKey, valueRow.get(valueKeys.get(0))); } else { row.put(finalKey, null); } }else { for (String valueKey : valueKeys) { //{2010-数学-分数:100;2010-数学-等级:A} if (null != valueRow) { row.put(finalKey + "-" + valueKey, valueRow.get(valueKey)); } else { row.put(finalKey + "-" + valueKey, null); } } } }else{ if (null != valueRow){ row.put(finalKey, valueRow); }else{ row.put(finalKey, null); } } } } skip(false); return result; } public DataSet pivot(String[] pks, String[] classKeys, String[] valueKeys) { return pivot(Arrays.asList(pks),Arrays.asList(classKeys),Arrays.asList(valueKeys)); } /** * 行转列 * @param pk 唯一标识key(如姓名)多个key以,分隔如(编号,姓名) * @param classKey 分类key(如科目)多个key以,分隔如(科目,年度) * @param valueKey 取值key(如分数)多个key以,分隔如(分数,等级) * @return * 表结构(姓名,科目,分数) * 返回结构 [{姓名:张三,数学:100,物理:90,英语:80},{姓名:李四,数学:100,物理:90,英语:80}] */ public DataSet pivot(String pk, String classKey, String valueKey) { List<String> pks = new ArrayList<>(Arrays.asList(pk.trim().split(","))); List<String> classKeys = new ArrayList<>(Arrays.asList(classKey.trim().split(","))); List<String> valueKeys = new ArrayList<>(Arrays.asList(valueKey.trim().split(","))); return pivot(pks, classKeys, valueKeys); } public DataSet pivot(String pk, String classKey) { List<String> pks = new ArrayList<>(Arrays.asList(pk.trim().split(","))); List<String> classKeys = new ArrayList<>(Arrays.asList(classKey.trim().split(","))); List<String> valueKeys = new ArrayList<>(); return pivot(pks, classKeys, valueKeys); } public DataSet pivot(List<String> pks, List<String> classKeys, String ... valueKeys) { List<String> list = new ArrayList<>(); if(null != valueKeys){ for(String item:valueKeys){ list.add(item); } } return pivot(pks, classKeys, valueKeys); } private String concatValue(DataRow row, String split){ StringBuilder builder = new StringBuilder(); List<String> keys = row.keys(); for(String key:keys){ if(builder.length() > 0){ builder.append(split); } builder.append(row.getString(key)); } return builder.toString(); } private String[] kvs(DataRow row){ List<String> keys = row.keys(); int size = keys.size(); String[] kvs = new String[size*2]; for(int i=0; i<size; i++){ String k = keys.get(i); String v = row.getStringNvl(k); kvs[i*2] = k; kvs[i*2+1] = v; } return kvs; } /** * 排序 * * @param keys keys * @return DataSet */ public DataSet asc(final String... keys) { Collections.sort(rows, new Comparator<DataRow>() { public int compare(DataRow r1, DataRow r2) { int result = 0; for (String key : keys) { Object v1 = r1.get(key); Object v2 = r2.get(key); if (null == v1) { if (null == v2) { continue; } return -1; } else { if (null == v2) { return 1; } } if (BasicUtil.isNumber(v1) && BasicUtil.isNumber(v2)) { BigDecimal num1 = new BigDecimal(v1.toString()); BigDecimal num2 = new BigDecimal(v2.toString()); result = num1.compareTo(num2); } else if (v1 instanceof Date && v2 instanceof Date) { Date date1 = (Date)v1; Date date2 = (Date)v2; result = date1.compareTo(date2); } else { result = v1.toString().compareTo(v2.toString()); } if (result != 0) { return result; } } return 0; } }); isAsc = true; isDesc = false; return this; } public DataSet desc(final String... keys) { Collections.sort(rows, new Comparator<DataRow>() { public int compare(DataRow r1, DataRow r2) { int result = 0; for (String key : keys) { Object v1 = r1.get(key); Object v2 = r2.get(key); if (null == v1) { if (null == v2) { continue; } return 1; } else { if (null == v2) { return -1; } } if (BasicUtil.isNumber(v1) && BasicUtil.isNumber(v2)) { BigDecimal val1 = new BigDecimal(v1.toString()); BigDecimal val2 = new BigDecimal(v2.toString()); result = val2.compareTo(val1); } else if (v1 instanceof Date && v2 instanceof Date) { Date date1 = (Date)v1; Date date2 = (Date)v2; result = date2.compareTo(date1); } else { result = v2.toString().compareTo(v1.toString()); } if (result != 0) { return result; } } return 0; } }); isAsc = false; isDesc = true; return this; } public DataSet addAllUpdateColumns() { for (DataRow row : rows) { row.addAllUpdateColumns(); } return this; } public DataSet clearUpdateColumns() { for (DataRow row : rows) { row.clearUpdateColumns(); } return this; } public DataSet removeNull(String... keys) { for (DataRow row : rows) { row.removeNull(keys); } return this; } private static String key(String key) { if (null != key && ConfigTable.IS_UPPER_KEY) { key = key.toUpperCase(); } return key; } /** * 替换所有NULL值 * * @param value value * @return return */ public DataSet replaceNull(String value) { for (DataRow row : rows) { row.replaceNull(value); } return this; } /** * 替换所有空值 * * @param value value * @return return */ public DataSet replaceEmpty(String value) { for (DataRow row : rows) { row.replaceEmpty(value); } return this; } /** * 替换所有NULL值 * * @param key key * @param value value * @return return */ public DataSet replaceNull(String key, String value) { for (DataRow row : rows) { row.replaceNull(key, value); } return this; } /** * 替换所有空值 * * @param key key * @param value value * @return return */ public DataSet replaceEmpty(String key, String value) { for (DataRow row : rows) { row.replaceEmpty(key, value); } return this; } public DataSet replace(String key, String oldChar, String newChar) { if (null == key || null == oldChar || null == newChar) { return this; } for (DataRow row : rows) { row.replace(key, oldChar, newChar); } return this; } public DataSet replace(String oldChar, String newChar) { for (DataRow row : rows) { row.replace(oldChar, newChar); } return this; } /* ************************* 类sql操作 ************************************** */ /** * 随机取一行 * @return DataRow */ public DataRow random() { DataRow row = null; int size = size(); if (size > 0) { row = getRow(BasicUtil.getRandomNumber(0, size - 1)); } return row; } /** * 随机取qty行 * @param qty 行数 * @return DataSet */ public DataSet randoms(int qty) { DataSet set = new DataSet(); int size = size(); if (qty < 0) { qty = 0; } if (qty > size) { qty = size; } for (int i = 0; i < qty; i++) { while (true) { int idx = BasicUtil.getRandomNumber(0, size - 1); DataRow row = set.getRow(idx); if (!set.contains(row)) { set.add(row); break; } } } set.cloneProperty(this); return set; } /** * 随机取min到max行 * @param min min * @param max max * @return DataSet */ public DataSet randoms(int min, int max) { int qty = BasicUtil.getRandomNumber(min, max); return randoms(qty); } public DataSet unique(String... keys) { return distinct(keys); } /** * 根据正则提取集合 * @param key key * @param regex 正则 * @param mode 匹配方式 * @return DataSet */ public DataSet regex(String key, String regex, Regular.MATCH_MODE mode) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : this) { tmpValue = row.getString(key); if (RegularUtil.match(tmpValue, regex, mode)) { set.add(row); } } set.cloneProperty(this); return set; } public DataSet regex(String key, String regex) { return regex(key, regex, Regular.MATCH_MODE.MATCH); } public boolean checkRequired(String... keys) { for (DataRow row : rows) { if (!row.checkRequired(keys)) { return false; } } return true; } public Map<String, Object> getQueryParams() { return queryParams; } public DataSet setQueryParams(Map<String, Object> params) { this.queryParams = params; return this; } public Object getQueryParam(String key) { return queryParams.get(key); } public DataSet addQueryParam(String key, Object param) { queryParams.put(key, param); return this; } public String getDatalink() { return datalink; } public void setDatalink(String datalink) { this.datalink = datalink; } public class Select implements Serializable { private static final long serialVersionUID = 1L; private boolean ignoreCase = true; //是否忽略大小写 /** * 是否忽略NULL 如果设置成true 在执行equal notEqual like contains进 null与null比较返回false * 左右出现NULL时直接返回false * true会导致一行数据 equal notEqual都筛选不到 */ private boolean ignoreNull = true; public DataSet setIgnoreCase(boolean bol) { this.ignoreCase = bol; return DataSet.this; } public DataSet setIgnoreNull(boolean bol) { this.ignoreNull = bol; return DataSet.this; } /** * 筛选key=value的子集 * * @param key key * @param value value * @return DataSet */ public DataSet equals(String key, String value) { return equals(DataSet.this, key, value); } private DataSet equals(DataSet src, String key, String value) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == value) { continue; } } else { if (null == tmpValue && null == value) { set.add(row); continue; } } if (null != tmpValue) { boolean chk = false; if (ignoreCase) { chk = tmpValue.equalsIgnoreCase(value); } else { chk = tmpValue.equals(value); } if (chk) { set.add(row); } } } set.cloneProperty(src); return set; } /** * 筛选key != value的子集 * * @param key key * @param value value * @return DataSet */ public DataSet notEquals(String key, String value) { return notEquals(DataSet.this, key, value); } private DataSet notEquals(DataSet src, String key, String value) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == value) { continue; } } else { if (null == tmpValue && null == value) { set.add(row); continue; } } if (null != tmpValue) { boolean chk = false; if (ignoreCase) { chk = !tmpValue.equalsIgnoreCase(value); } else { chk = !tmpValue.equals(value); } if (chk) { set.add(row); } } } set.cloneProperty(src); return set; } /** * 筛选key列的值是否包含value的子集 * * @param key key * @param value value * @return DataSet */ public DataSet contains(String key, String value) { return contains(DataSet.this, key, value); } private DataSet contains(DataSet src, String key, String value) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == value) { continue; } } else { if (null == tmpValue && null == value) { set.add(row); continue; } } if (null != tmpValue) { if (null == value) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); value = value.toLowerCase(); } if (tmpValue.contains(value)) { set.add(row); } } } set.cloneProperty(src); return set; } /** * 筛选key列的值like pattern的子集,pattern遵循sql通配符的规则,%表示任意个字符,_表示一个字符 * * @param key 列 * @param pattern 表达式 * @return DataSet */ public DataSet like(String key, String pattern) { return like(DataSet.this, key, pattern); } private DataSet like(DataSet src, String key, String pattern) { DataSet set = new DataSet(); if (null != pattern) { pattern = pattern.replace("!", "^").replace("_", "\\s|\\S").replace("%", "(\\s|\\S)*"); } String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == pattern) { continue; } } else { if (null == tmpValue && null == pattern) { set.add(row); continue; } } if (null != tmpValue) { if (null == pattern) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); pattern = pattern.toLowerCase(); } if (RegularUtil.match(tmpValue, pattern, Regular.MATCH_MODE.MATCH)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet notLike(String key, String pattern) { return notLike(DataSet.this, key, pattern); } private DataSet notLike(DataSet src, String key, String pattern) { DataSet set = new DataSet(); if (null == pattern) { return set; } pattern = pattern.replace("!", "^").replace("_", "\\s|\\S").replace("%", "(\\s|\\S)*"); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == pattern) { continue; } } else { if (null == tmpValue && null == pattern) { set.add(row); continue; } } if (null != tmpValue) { if (null == pattern) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); pattern = pattern.toLowerCase(); } if (!RegularUtil.match(tmpValue, pattern, Regular.MATCH_MODE.MATCH)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet startWith(String key, String prefix) { return startWith(DataSet.this, key, prefix); } private DataSet startWith(DataSet src, String key, String prefix) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == prefix) { continue; } } else { if (null == tmpValue && null == prefix) { set.add(row); continue; } } if (null != tmpValue) { if (null == prefix) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); prefix = prefix.toLowerCase(); } if (tmpValue.startsWith(prefix)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet endWith(String key, String suffix) { return endWith(DataSet.this, key, suffix); } private DataSet endWith(DataSet src, String key, String suffix) { DataSet set = new DataSet(); String tmpValue; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull) { if (null == tmpValue || null == suffix) { continue; } } else { if (null == tmpValue && null == suffix) { set.add(row); continue; } } if (null != tmpValue) { if (null == suffix) { continue; } if (ignoreCase) { tmpValue = tmpValue.toLowerCase(); suffix = suffix.toLowerCase(); } if (tmpValue.endsWith(suffix)) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet in(String key, T... values) { return in(DataSet.this, key, BeanUtil.array2list(values)); } public <T> DataSet in(String key, Collection<T> values) { return in(DataSet.this, key, values); } private <T> DataSet in(DataSet src, String key, Collection<T> values) { DataSet set = new DataSet(); for (DataRow row : src) { if (BasicUtil.containsString(ignoreNull, ignoreCase, values, row.getString(key))) { set.add(row); } } set.cloneProperty(src); return set; } public <T> DataSet notIn(String key, T... values) { return notIn(DataSet.this, key, BeanUtil.array2list(values)); } public <T> DataSet notIn(String key, Collection<T> values) { return notIn(DataSet.this, key, values); } private <T> DataSet notIn(DataSet src, String key, Collection<T> values) { DataSet set = new DataSet(); if (null != values) { String tmpValue = null; for (DataRow row : src) { tmpValue = row.getString(key); if (ignoreNull && null == tmpValue) { continue; } if (!BasicUtil.containsString(ignoreNull, ignoreCase, values, tmpValue)) { set.add(row); } } } set.cloneProperty(src); return set; } public DataSet isNull(String... keys) { return isNull(DataSet.this, keys); } private DataSet isNull(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isNull(set, key); } } return set; } private DataSet isNull(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(null == row.get(key)){ set.add(row); } } return set; } public DataSet isNotNull(String... keys) { return isNotNull(DataSet.this, keys); } private DataSet isNotNull(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isNotNull(set, key); } } return set; } private DataSet isNotNull(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(null != row.get(key)){ set.add(row); } } return set; } public DataSet notNull(String... keys) { return isNotNull(keys); } public DataSet isEmpty(String... keys) { return isEmpty(DataSet.this, keys); } private DataSet isEmpty(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isEmpty(set, key); } } return set; } private DataSet isEmpty(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(row.isEmpty(key)){ set.add(row); } } return set; } public DataSet empty(String... keys) { return isEmpty(keys); } public DataSet isNotEmpty(String... keys) { return isNotEmpty(DataSet.this, keys); } private DataSet isNotEmpty(DataSet src, String... keys) { DataSet set = src; if (null != keys) { for (String key : keys) { set = isNotEmpty(set, key); } } return set; } private DataSet isNotEmpty(DataSet src, String key) { DataSet set = new DataSet(); for(DataRow row:src){ if(row.isNotEmpty(key)){ set.add(row); } } return set; } public DataSet notEmpty(String... keys) { return isNotEmpty(keys); } public <T> DataSet less(String key, T value) { return less(DataSet.this, key, value); } private <T> DataSet less(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) < 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) < 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) < 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet lessEqual(String key, T value) { return lessEqual(DataSet.this, key, value); } private <T> DataSet lessEqual(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) <= 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) <= 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) >= 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet greater(String key, T value) { return greater(DataSet.this, key, value); } private <T> DataSet greater(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) > 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) > 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) > 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet greaterEqual(String key, T value) { return greaterEqual(DataSet.this, key, value); } private <T> DataSet greaterEqual(DataSet src, String key, T value) { DataSet set = new DataSet(); if (null == value) { return set; } if (BasicUtil.isNumber(value)) { BigDecimal number = new BigDecimal(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getDecimal(key, 0).compareTo(number) >= 0) { set.add(row); } } } else if (BasicUtil.isDate(value) || BasicUtil.isDateTime(value)) { Date date = DateUtil.parse(value.toString()); for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.isNotEmpty(key) && DateUtil.diff(DateUtil.DATE_PART_MILLISECOND, date, row.getDate(key, new Date())) >= 0) { set.add(row); } } } else { for (DataRow row : src) { if (null == row.get(key)) { continue; } if (row.getString(key).compareTo(value.toString()) >= 0) { set.add(row); } } } set.cloneProperty(src); return set; } public <T> DataSet between(String key, T min, T max) { return between(DataSet.this, key, min, max); } private <T> DataSet between(DataSet src, String key, T min, T max) { DataSet set = greaterEqual(src, key, min); set = lessEqual(set, key, max); return set; } } public Select select = new Select(); }<|fim▁end|>
}
<|file_name|>ar1TestScript.py<|end_file_name|><|fim▁begin|>from pybrain.rl.environments.timeseries.maximizereturntask import DifferentialSharpeRatioTask<|fim▁hole|> from matplotlib import pyplot """ This script aims to create a trading model that trades on a simple AR(1) process """ env=AR1Environment(2000) task=DifferentialSharpeRatioTask(env) learner = Q_LinFA(2,1) agent = LinearFA_Agent(learner) exp = ContinuousExperiment(task,agent) from decimal import Decimal ts=env.ts.tolist() exp.doInteractionsAndLearn(1999) actionHist=env.actionHistory pyplot.plot(ts[0]) pyplot.plot(actionHist) pyplot.show() #snp_rets=env.importSnP().tolist()[0] #print(snp_rets.tolist()[0]) #pyplot.plot(snp_rets) #pyplot.show() #cumret= cumsum(multiply(ts,actionHist)) #exp.doInteractions(200)<|fim▁end|>
from pybrain.rl.environments.timeseries.timeseries import AR1Environment, SnPEnvironment from pybrain.rl.learners.valuebased.linearfa import Q_LinFA from pybrain.rl.agents.linearfa import LinearFA_Agent from pybrain.rl.experiments import ContinuousExperiment
<|file_name|>vm_env.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The go-ethereum Authors // This file is part of the go-ethereum library. // // The go-ethereum library is free software: you can redistribute it and/or modify // it under the terms of the GNU Lesser General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // The go-ethereum library is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Lesser General Public License for more details. // // You should have received a copy of the GNU Lesser General Public License // along with the go-ethereum library. If not, see <http://www.gnu.org/licenses/>. package light import ( "math/big" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/core/vm" "github.com/ethereum/go-ethereum/crypto" "github.com/ethereum/go-ethereum/params" "golang.org/x/net/context" ) // VMEnv is the light client version of the vm execution environment. // Unlike other structures, VMEnv holds a context that is applied by state // retrieval requests through the entire execution. If any state operation // returns an error, the execution fails. type VMEnv struct { vm.Environment ctx context.Context chainConfig *params.ChainConfig evm *vm.EVM state *VMState header *types.Header msg core.Message depth int chain *LightChain err error } // NewEnv creates a new execution environment based on an ODR capable light state func NewEnv(ctx context.Context, state *LightState, chainConfig *params.ChainConfig, chain *LightChain, msg core.Message, header *types.Header, cfg vm.Config) *VMEnv { env := &VMEnv{ chainConfig: chainConfig, chain: chain, header: header, msg: msg, } env.state = &VMState{ctx: ctx, state: state, env: env} env.evm = vm.New(env, cfg) return env } func (self *VMEnv) ChainConfig() *params.ChainConfig { return self.chainConfig } func (self *VMEnv) Vm() vm.Vm { return self.evm } func (self *VMEnv) Origin() common.Address { return self.msg.From() } func (self *VMEnv) BlockNumber() *big.Int { return self.header.Number } func (self *VMEnv) Coinbase() common.Address { return self.header.Coinbase } func (self *VMEnv) Time() *big.Int { return self.header.Time } func (self *VMEnv) Difficulty() *big.Int { return self.header.Difficulty } func (self *VMEnv) GasLimit() *big.Int { return self.header.GasLimit } func (self *VMEnv) Db() vm.Database { return self.state } func (self *VMEnv) Depth() int { return self.depth } func (self *VMEnv) SetDepth(i int) { self.depth = i } func (self *VMEnv) GetHash(n uint64) common.Hash { for header := self.chain.GetHeader(self.header.ParentHash, self.header.Number.Uint64()-1); header != nil; header = self.chain.GetHeader(header.ParentHash, header.Number.Uint64()-1) { if header.Number.Uint64() == n { return header.Hash() } } return common.Hash{} } func (self *VMEnv) AddLog(log *vm.Log) { //self.state.AddLog(log) } func (self *VMEnv) CanTransfer(from common.Address, balance *big.Int) bool { return self.state.GetBalance(from).Cmp(balance) >= 0 } func (self *VMEnv) SnapshotDatabase() int { return self.state.SnapshotDatabase() } func (self *VMEnv) RevertToSnapshot(idx int) { self.state.RevertToSnapshot(idx) } func (self *VMEnv) Transfer(from, to vm.Account, amount *big.Int) { core.Transfer(from, to, amount) } func (self *VMEnv) Call(me vm.ContractRef, addr common.Address, data []byte, gas, price, value *big.Int) ([]byte, error) { return core.Call(self, me, addr, data, gas, price, value) } func (self *VMEnv) CallCode(me vm.ContractRef, addr common.Address, data []byte, gas, price, value *big.Int) ([]byte, error) { return core.CallCode(self, me, addr, data, gas, price, value) } func (self *VMEnv) DelegateCall(me vm.ContractRef, addr common.Address, data []byte, gas, price *big.Int) ([]byte, error) { return core.DelegateCall(self, me, addr, data, gas, price) } func (self *VMEnv) Create(me vm.ContractRef, data []byte, gas, price, value *big.Int) ([]byte, common.Address, error) { return core.Create(self, me, data, gas, price, value) } // Error returns the error (if any) that happened during execution. func (self *VMEnv) Error() error { return self.err } // VMState is a wrapper for the light state that holds the actual context and // passes it to any state operation that requires it. type VMState struct { vm.Database ctx context.Context state *LightState snapshots []*LightState env *VMEnv } // errHandler handles and stores any state error that happens during execution. func (s *VMState) errHandler(err error) { if err != nil && s.env.err == nil { s.env.err = err } }<|fim▁hole|> return len(self.snapshots) - 1 } func (self *VMState) RevertToSnapshot(idx int) { self.state.Set(self.snapshots[idx]) self.snapshots = self.snapshots[:idx] } // GetAccount returns the account object of the given account or nil if the // account does not exist func (s *VMState) GetAccount(addr common.Address) vm.Account { so, err := s.state.GetStateObject(s.ctx, addr) s.errHandler(err) if err != nil { // return a dummy state object to avoid panics so = s.state.newStateObject(addr) } return so } // CreateAccount creates creates a new account object and takes ownership. func (s *VMState) CreateAccount(addr common.Address) vm.Account { so, err := s.state.CreateStateObject(s.ctx, addr) s.errHandler(err) if err != nil { // return a dummy state object to avoid panics so = s.state.newStateObject(addr) } return so } // AddBalance adds the given amount to the balance of the specified account func (s *VMState) AddBalance(addr common.Address, amount *big.Int) { err := s.state.AddBalance(s.ctx, addr, amount) s.errHandler(err) } // GetBalance retrieves the balance from the given address or 0 if the account does // not exist func (s *VMState) GetBalance(addr common.Address) *big.Int { res, err := s.state.GetBalance(s.ctx, addr) s.errHandler(err) return res } // GetNonce returns the nonce at the given address or 0 if the account does // not exist func (s *VMState) GetNonce(addr common.Address) uint64 { res, err := s.state.GetNonce(s.ctx, addr) s.errHandler(err) return res } // SetNonce sets the nonce of the specified account func (s *VMState) SetNonce(addr common.Address, nonce uint64) { err := s.state.SetNonce(s.ctx, addr, nonce) s.errHandler(err) } // GetCode returns the contract code at the given address or nil if the account // does not exist func (s *VMState) GetCode(addr common.Address) []byte { res, err := s.state.GetCode(s.ctx, addr) s.errHandler(err) return res } // GetCodeHash returns the contract code hash at the given address func (s *VMState) GetCodeHash(addr common.Address) common.Hash { res, err := s.state.GetCode(s.ctx, addr) s.errHandler(err) return crypto.Keccak256Hash(res) } // GetCodeSize returns the contract code size at the given address func (s *VMState) GetCodeSize(addr common.Address) int { res, err := s.state.GetCode(s.ctx, addr) s.errHandler(err) return len(res) } // SetCode sets the contract code at the specified account func (s *VMState) SetCode(addr common.Address, code []byte) { err := s.state.SetCode(s.ctx, addr, code) s.errHandler(err) } // AddRefund adds an amount to the refund value collected during a vm execution func (s *VMState) AddRefund(gas *big.Int) { s.state.AddRefund(gas) } // GetRefund returns the refund value collected during a vm execution func (s *VMState) GetRefund() *big.Int { return s.state.GetRefund() } // GetState returns the contract storage value at storage address b from the // contract address a or common.Hash{} if the account does not exist func (s *VMState) GetState(a common.Address, b common.Hash) common.Hash { res, err := s.state.GetState(s.ctx, a, b) s.errHandler(err) return res } // SetState sets the storage value at storage address key of the account addr func (s *VMState) SetState(addr common.Address, key common.Hash, value common.Hash) { err := s.state.SetState(s.ctx, addr, key, value) s.errHandler(err) } // Suicide marks an account to be removed and clears its balance func (s *VMState) Suicide(addr common.Address) bool { res, err := s.state.Suicide(s.ctx, addr) s.errHandler(err) return res } // Exist returns true if an account exists at the given address func (s *VMState) Exist(addr common.Address) bool { res, err := s.state.HasAccount(s.ctx, addr) s.errHandler(err) return res } // Empty returns true if the account at the given address is considered empty func (s *VMState) Empty(addr common.Address) bool { so, err := s.state.GetStateObject(s.ctx, addr) s.errHandler(err) return so == nil || so.empty() } // HasSuicided returns true if the given account has been marked for deletion // or false if the account does not exist func (s *VMState) HasSuicided(addr common.Address) bool { res, err := s.state.HasSuicided(s.ctx, addr) s.errHandler(err) return res }<|fim▁end|>
func (self *VMState) SnapshotDatabase() int { self.snapshots = append(self.snapshots, self.state.Copy())
<|file_name|>crawl.py<|end_file_name|><|fim▁begin|>__author__ = 'vg' import io import dircache import sys, os <|fim▁hole|>#default path base_dir = os.path.dirname('C:\\c-list\\country\\icu\\') s = dircache.listdir(base_dir) #creating country code list our_codes = io.open('our_codes.txt', mode='r',encoding='utf8').readlines() code_list = [] for k in our_codes: code_list.append(k.rstrip('\n')) #iso links lang_codes = io.open('lang_code.txt', mode='r', encoding='utf8').readlines() lang = {} for l in lang_codes: lang[l[:2]] = l[3:5] #lang[l[3:5]] = l[:2] print lang print len(lang) #lang_to_lang.txt lang_to_lang = io.open('lang_to_lang.txt', mode='r', encoding='utf8').readlines() lang_to = {} lang_to1 = {} for l in lang_to_lang: lang_to1[l[:2]] = l[3:5] lang_to[l[3:5]] = l[:2] print lang_to print lang_to1 print len(lang_to) print len(lang_to1) #parsing files dir_folder = io.open('all_our_countries.txt', mode='w', encoding='utf8') full_codes = io.open('full_list_lang_to_lang.txt', mode='w', encoding='utf8') for i in s: folder = io.open(base_dir+"\\"+i+'\country.txt', mode='r', encoding='utf8') for f in folder: f1 = f.index('(') #start index in line f2 = f.index(')') #last index in line f3 = f[f1+1:f2] # Code of county if len(i) == 2 and len(f3) == 2 and f3.lower() in code_list: f4 = f.replace(f[f1-1:f2+1], '') dir_folder.writelines(f3+'\t'+i+'\t'+f4) print f3 if f3.lower() in lang.keys() and i in lang_to.values(): full_codes.writelines(f3+'\t'+lang_to1[i]+'\t'+f4) folder.close() dir_folder.close()<|fim▁end|>
<|file_name|>totalstatistics.py<|end_file_name|><|fim▁begin|># Copyright 2008-2015 Nokia Networks # Copyright 2016- Robot Framework Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .stats import TotalStat from .visitor import SuiteVisitor class TotalStatistics(object): """Container for total statistics.""" def __init__(self): #: Instance of :class:`~robot.model.stats.TotalStat` for critical tests. self.critical = TotalStat('Critical Tests') #: Instance of :class:`~robot.model.stats.TotalStat` for all the tests. self.all = TotalStat('All Tests') def visit(self, visitor): visitor.visit_total_statistics(self) def __iter__(self): return iter([self.critical, self.all]) @property def message(self): """String representation of the statistics. For example:: 2 critical tests, 1 passed, 1 failed 2 tests total, 1 passed, 1 failed """ ctotal, cend, cpass, cfail = self._get_counts(self.critical) atotal, aend, apass, afail = self._get_counts(self.all) return ('%d critical test%s, %d passed, %d failed\n' '%d test%s total, %d passed, %d failed' % (ctotal, cend, cpass, cfail, atotal, aend, apass, afail)) def _get_counts(self, stat): ending = 's' if stat.total != 1 else '' return stat.total, ending, stat.passed, stat.failed class TotalStatisticsBuilder(SuiteVisitor): def __init__(self, suite=None): self.stats = TotalStatistics() if suite: suite.visit(self)<|fim▁hole|> self.stats.critical.add_test(test) def visit_test(self, test): self.add_test(test) def visit_keyword(self, kw): pass<|fim▁end|>
def add_test(self, test): self.stats.all.add_test(test) if test.critical:
<|file_name|>quic_packet_reader.cc<|end_file_name|><|fim▁begin|>// Copyright 2015 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "net/tools/quic/quic_packet_reader.h" #include <errno.h> #ifndef __APPLE__ // This is a GNU header that is not present in /usr/include on MacOS #include <features.h> #endif #include <string.h> #include <sys/epoll.h> #include "base/logging.h" #include "net/base/ip_endpoint.h" #include "net/quic/quic_flags.h" #include "net/tools/quic/quic_dispatcher.h" #include "net/tools/quic/quic_socket_utils.h" #define MMSG_MORE 0 #ifndef SO_RXQ_OVFL #define SO_RXQ_OVFL 40 #endif namespace net { namespace tools { QuicPacketReader::QuicPacketReader() { Initialize(); } void QuicPacketReader::Initialize() { // Zero initialize uninitialized memory. memset(cbuf_, 0, arraysize(cbuf_)); memset(buf_, 0, arraysize(buf_)); memset(raw_address_, 0, sizeof(raw_address_)); memset(mmsg_hdr_, 0, sizeof(mmsg_hdr_)); for (int i = 0; i < kNumPacketsPerReadMmsgCall; ++i) { iov_[i].iov_base = buf_ + (2 * kMaxPacketSize * i); iov_[i].iov_len = 2 * kMaxPacketSize; msghdr* hdr = &mmsg_hdr_[i].msg_hdr; hdr->msg_name = &raw_address_[i]; hdr->msg_namelen = sizeof(sockaddr_storage); hdr->msg_iov = &iov_[i]; hdr->msg_iovlen = 1; hdr->msg_control = cbuf_ + kSpaceForOverflowAndIp * i; hdr->msg_controllen = kSpaceForOverflowAndIp; } } QuicPacketReader::~QuicPacketReader() { } bool QuicPacketReader::ReadAndDispatchPackets( int fd, int port, ProcessPacketInterface* processor, QuicPacketCount* packets_dropped) { #if MMSG_MORE // Re-set the length fields in case recvmmsg has changed them. for (int i = 0; i < kNumPacketsPerReadMmsgCall; ++i) { iov_[i].iov_len = 2 * kMaxPacketSize; mmsg_hdr_[i].msg_len = 0; msghdr* hdr = &mmsg_hdr_[i].msg_hdr; hdr->msg_namelen = sizeof(sockaddr_storage); hdr->msg_iovlen = 1; hdr->msg_controllen = kSpaceForOverflowAndIp; } int packets_read = recvmmsg(fd, mmsg_hdr_, kNumPacketsPerReadMmsgCall, 0, nullptr); if (packets_read <= 0) {<|fim▁hole|> if (mmsg_hdr_[i].msg_len == 0) { continue; } IPEndPoint client_address = IPEndPoint(raw_address_[i]); IPAddressNumber server_ip = QuicSocketUtils::GetAddressFromMsghdr(&mmsg_hdr_[i].msg_hdr); if (!IsInitializedAddress(server_ip)) { LOG(DFATAL) << "Unable to get server address."; continue; } QuicEncryptedPacket packet(reinterpret_cast<char*>(iov_[i].iov_base), mmsg_hdr_[i].msg_len, false); IPEndPoint server_address(server_ip, port); processor->ProcessPacket(server_address, client_address, packet); } if (packets_dropped != nullptr) { QuicSocketUtils::GetOverflowFromMsghdr(&mmsg_hdr_[0].msg_hdr, packets_dropped); } if (FLAGS_quic_read_packets_full_recvmmsg) { // We may not have read all of the packets available on the socket. return packets_read == kNumPacketsPerReadMmsgCall; } else { return true; } #else LOG(FATAL) << "Unsupported"; return false; #endif } /* static */ bool QuicPacketReader::ReadAndDispatchSinglePacket( int fd, int port, ProcessPacketInterface* processor, QuicPacketCount* packets_dropped) { // Allocate some extra space so we can send an error if the packet is larger // than kMaxPacketSize. char buf[2 * kMaxPacketSize]; IPEndPoint client_address; IPAddressNumber server_ip; int bytes_read = QuicSocketUtils::ReadPacket( fd, buf, arraysize(buf), packets_dropped, &server_ip, &client_address); if (bytes_read < 0) { return false; // ReadPacket failed. } QuicEncryptedPacket packet(buf, bytes_read, false); IPEndPoint server_address(server_ip, port); processor->ProcessPacket(server_address, client_address, packet); // The socket read was successful, so return true even if packet dispatch // failed. return true; } } // namespace tools } // namespace net<|fim▁end|>
return false; // recvmmsg failed. } for (int i = 0; i < packets_read; ++i) {
<|file_name|>f.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*-<|fim▁hole|> pass assert f(1, 2) == 2 assert f(2, 6) == 3 assert f(3, 14) == 14<|fim▁end|>
def f(k, n):
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! An actor-based remote devtools server implementation. Only tested with //! nightly Firefox versions at time of writing. Largely based on //! reverse-engineering of Firefox chrome devtool logs and reading of //! [code](http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/). #![crate_name = "devtools"] #![crate_type = "rlib"] #![feature(int_uint, box_syntax, io, old_io, core, rustc_private)] #![feature(collections, std_misc)] #![allow(non_snake_case)] #[macro_use] extern crate log; extern crate collections; extern crate core; extern crate devtools_traits; extern crate "rustc-serialize" as rustc_serialize; extern crate msg; extern crate time; extern crate util; use actor::{Actor, ActorRegistry}; use actors::console::ConsoleActor; use actors::inspector::InspectorActor; use actors::root::RootActor; use actors::tab::TabActor; use protocol::JsonPacketStream; use devtools_traits::{ConsoleMessage, DevtoolsControlMsg}; use devtools_traits::{DevtoolsPageInfo, DevtoolScriptControlMsg}; use msg::constellation_msg::PipelineId; use util::task::spawn_named; use std::borrow::ToOwned; use std::cell::RefCell; use std::collections::HashMap; use std::sync::mpsc::{channel, Receiver, Sender}; use std::sync::mpsc::TryRecvError::{Disconnected, Empty}; use std::old_io::{TcpListener, TcpStream}; use std::old_io::{Acceptor, Listener, TimedOut}; use std::sync::{Arc, Mutex}; use time::precise_time_ns; mod actor; /// Corresponds to http://mxr.mozilla.org/mozilla-central/source/toolkit/devtools/server/actors/<|fim▁hole|> pub mod console; pub mod inspector; pub mod root; pub mod tab; } mod protocol; #[derive(RustcEncodable)] struct ConsoleAPICall { from: String, __type__: String, message: ConsoleMsg, } #[derive(RustcEncodable)] struct ConsoleMsg { level: String, timeStamp: u64, arguments: Vec<String>, filename: String, lineNumber: u32, columnNumber: u32, } /// Spin up a devtools server that listens for connections on the specified port. pub fn start_server(port: u16) -> Sender<DevtoolsControlMsg> { let (sender, receiver) = channel(); spawn_named("Devtools".to_owned(), move || { run_server(receiver, port) }); sender } static POLL_TIMEOUT: u64 = 300; fn run_server(receiver: Receiver<DevtoolsControlMsg>, port: u16) { let listener = TcpListener::bind(&*format!("{}:{}", "127.0.0.1", port)); // bind the listener to the specified address let mut acceptor = listener.listen().unwrap(); acceptor.set_timeout(Some(POLL_TIMEOUT)); let mut registry = ActorRegistry::new(); let root = box RootActor { tabs: vec!(), }; registry.register(root); registry.find::<RootActor>("root"); let actors = Arc::new(Mutex::new(registry)); let mut accepted_connections: Vec<TcpStream> = Vec::new(); let mut actor_pipelines: HashMap<PipelineId, String> = HashMap::new(); /// Process the input from a single devtools client until EOF. fn handle_client(actors: Arc<Mutex<ActorRegistry>>, mut stream: TcpStream) { println!("connection established to {}", stream.peer_name().unwrap()); { let actors = actors.lock().unwrap(); let msg = actors.find::<RootActor>("root").encodable(); stream.write_json_packet(&msg); } 'outer: loop { match stream.read_json_packet() { Ok(json_packet) => { let mut actors = actors.lock().unwrap(); match actors.handle_message(json_packet.as_object().unwrap(), &mut stream) { Ok(()) => {}, Err(()) => { println!("error: devtools actor stopped responding"); let _ = stream.close_read(); let _ = stream.close_write(); break 'outer } } } Err(e) => { println!("error: {}", e.desc); break 'outer } } } } // We need separate actor representations for each script global that exists; // clients can theoretically connect to multiple globals simultaneously. // TODO: move this into the root or tab modules? fn handle_new_global(actors: Arc<Mutex<ActorRegistry>>, pipeline: PipelineId, sender: Sender<DevtoolScriptControlMsg>, actor_pipelines: &mut HashMap<PipelineId, String>, page_info: DevtoolsPageInfo) { let mut actors = actors.lock().unwrap(); //TODO: move all this actor creation into a constructor method on TabActor let (tab, console, inspector) = { let console = ConsoleActor { name: actors.new_name("console"), script_chan: sender.clone(), pipeline: pipeline, streams: RefCell::new(Vec::new()), }; let inspector = InspectorActor { name: actors.new_name("inspector"), walker: RefCell::new(None), pageStyle: RefCell::new(None), highlighter: RefCell::new(None), script_chan: sender, pipeline: pipeline, }; let DevtoolsPageInfo { title, url } = page_info; let tab = TabActor { name: actors.new_name("tab"), title: title, url: url.serialize(), console: console.name(), inspector: inspector.name(), }; let root = actors.find_mut::<RootActor>("root"); root.tabs.push(tab.name.clone()); (tab, console, inspector) }; actor_pipelines.insert(pipeline, tab.name.clone()); actors.register(box tab); actors.register(box console); actors.register(box inspector); } fn handle_console_message(actors: Arc<Mutex<ActorRegistry>>, id: PipelineId, console_message: ConsoleMessage, actor_pipelines: &HashMap<PipelineId, String>) { let console_actor_name = find_console_actor(actors.clone(), id, actor_pipelines); let actors = actors.lock().unwrap(); let console_actor = actors.find::<ConsoleActor>(&console_actor_name); match console_message { ConsoleMessage::LogMessage(message, filename, lineNumber, columnNumber) => { let msg = ConsoleAPICall { from: console_actor.name.clone(), __type__: "consoleAPICall".to_string(), message: ConsoleMsg { level: "log".to_string(), timeStamp: precise_time_ns(), arguments: vec!(message), filename: filename, lineNumber: lineNumber, columnNumber: columnNumber, }, }; for stream in console_actor.streams.borrow_mut().iter_mut() { stream.write_json_packet(&msg); } } } } fn find_console_actor(actors: Arc<Mutex<ActorRegistry>>, id: PipelineId, actor_pipelines: &HashMap<PipelineId, String>) -> String { let actors = actors.lock().unwrap(); let ref tab_actor_name = (*actor_pipelines)[id]; let tab_actor = actors.find::<TabActor>(tab_actor_name); let console_actor_name = tab_actor.console.clone(); return console_actor_name; } //TODO: figure out some system that allows us to watch for new connections, // shut down existing ones at arbitrary times, and also watch for messages // from multiple script tasks simultaneously. Polling for new connections // for 300ms and then checking the receiver is not a good compromise // (and makes Servo hang on exit if there's an open connection, no less). // accept connections and process them, spawning a new tasks for each one loop { match acceptor.accept() { Err(ref e) if e.kind == TimedOut => { match receiver.try_recv() { Ok(DevtoolsControlMsg::ServerExitMsg) | Err(Disconnected) => break, Ok(DevtoolsControlMsg::NewGlobal(id, sender, pageinfo)) => handle_new_global(actors.clone(), id,sender, &mut actor_pipelines, pageinfo), Ok(DevtoolsControlMsg::SendConsoleMessage(id, console_message)) => handle_console_message(actors.clone(), id, console_message, &actor_pipelines), Err(Empty) => acceptor.set_timeout(Some(POLL_TIMEOUT)), } } Err(_e) => { /* connection failed */ } Ok(stream) => { let actors = actors.clone(); accepted_connections.push(stream.clone()); spawn_named("DevtoolsClientHandler".to_owned(), move || { // connection succeeded handle_client(actors, stream.clone()) }) } } } for connection in accepted_connections.iter_mut() { let _read = connection.close_read(); let _write = connection.close_write(); } }<|fim▁end|>
mod actors {
<|file_name|>data_import.py<|end_file_name|><|fim▁begin|># ~*~ encoding: utf-8 ~*~ from pymongo import MongoClient from pandas import read_csv from datetime import date mongodb = MongoClient('192.168.178.82', 9999) db = mongodb['dev'] drug_collection = db['drug'] drugs = read_csv('~/Dokumente/bfarm_lieferenpass_meldung.csv', delimiter=';', encoding='iso8859_2').to_dict() drugs.pop('Id', None) drugs.pop('aktuelle Bescheidart', None) drugs.pop('Meldungsart', None) drugs.pop('aktuelle Bescheidart', None) data = dict() for x in range(drugs['Verkehrsfähig'].__len__()): """ if drugs['Ende Engpass'][x] == '-': data['end'] = None else: day, month, year = drugs['Ende Engpass'][x].split('.') data['end'] = date(int(year), int(month), int(day)).__str__() if drugs['Beginn Engpass'][x] == '-': data['initial_report'] = None else: day, month, year = drugs['Beginn Engpass'][x].split('.') data['initial_report'] = date(int(year), int(month), int(day)).__str__() if drugs['Datum der letzten Meldung'][x] == '-': data['last_report'] = None else: day, month, year = drugs['Datum der letzten Meldung'][x].split('.') data['last_report'] = date(int(year), int(month), int(day)).__str__() """ data['substance'] = drugs['Wirkstoffe'][x].replace(' ', '').split(';') data['enr'] = int(drugs['Enr'][x]) data['marketability'] = True if drugs['Verkehrsfähig'][x] == 'ja' else False data['atc_code'] = drugs['ATC-Code'][x] data['pzn'] = int(drugs['PZN'][x].split(' ')[0].replace(';', '')) if drugs['PZN'][x] != '-' else None data['drug_title'] = drugs['Arzneimittelbezeichnung'][x] data['hospital'] = True if drugs['Krankenhausrelevant'][x] == 'ja' else False <|fim▁hole|><|fim▁end|>
drug_collection.update_one({'enr': data['enr']}, {'$set': data}, upsert=True)
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>#[cfg_attr(rustfmt, rustfmt_skip)] #[allow(unknown_lints)] #[allow(clippy::all)] mod grammar { // During the build step, `build.rs` will output the generated parser to `OUT_DIR` to avoid // adding it to the source directory, so we just directly include the generated parser here. // // Even with `.gitignore` and the `exclude` in the `Cargo.toml`, the generated parser can still // end up in the source directory. This could happen when `cargo build` builds the file out of // the Cargo cache (`$HOME/.cargo/registrysrc`), and the build script would then put its output // in that cached source directory because of https://github.com/lalrpop/lalrpop/issues/280. // Later runs of `cargo vendor` then copy the source from that directory, including the // generated file. include!(concat!(env!("OUT_DIR"), "/parser/grammar.rs")); } /// Contains all structures related to the AST for the WebIDL grammar. pub mod ast; /// Contains the visitor trait needed to traverse the AST and helper walk functions. pub mod visitor; pub use lalrpop_util::ParseError; use lexer::{LexicalError, Token}; /// The result that is returned when an input string is parsed. If the parse succeeds, the `Ok` /// result will be a vector of definitions representing the AST. If the parse fails, the `Err` will<|fim▁hole|>/// be either an error from the lexer or the parser. pub type ParseResult = Result<ast::AST, ParseError<usize, Token, LexicalError>>; /// Parses a given input string and returns an AST. /// /// # Example /// /// ``` /// use webidl::*; /// use webidl::ast::*; /// /// let result = parse_string("[Attribute] interface Node { };"); /// /// assert_eq!(result, /// Ok(vec![Definition::Interface(Interface::NonPartial(NonPartialInterface { /// extended_attributes: vec![ /// ExtendedAttribute::NoArguments( /// Other::Identifier("Attribute".to_string()))], /// inherits: None, /// members: vec![], /// name: "Node".to_string() /// }))])); /// ``` pub fn parse_string(input: &str) -> ParseResult { grammar::DefinitionsParser::new().parse(::Lexer::new(input)) }<|fim▁end|>
<|file_name|>070 Text Justification.py<|end_file_name|><|fim▁begin|>""" Given an array of words and a length L, format the text such that each line has exactly L characters and is fully (left and right) justified. You should pack your words in a greedy approach; that is, pack as many words as you can in each line. Pad extra spaces ' ' when necessary so that each line has exactly L characters. Extra spaces between words should be distributed as evenly as possible. If the number of spaces on a line do not divide evenly between words, the empty slots on the left will be assigned more spaces than the slots on the right. <|fim▁hole|>For the last line of text, it should be left justified and no extra space is inserted between words. For example, words: ["This", "is", "an", "example", "of", "text", "justification."] L: 16. Return the formatted lines as: [ "This is an", "example of text", "justification. " ] Note: Each word is guaranteed not to exceed L in length. click to show corner cases. Corner Cases: A line other than the last line might contain only one word. What should you do in this case? In this case, that line should be left-justified. """ __author__ = 'Danyang' class Solution: def fullJustify(self, words, L): """ :param words: a list of str :param L: int :return: a list of str """ result = [] self.break_line(words, L, result) return self.distribute_space(L, result) def break_line(self, words, L, result): if not words: return cur_length = -1 lst = [] i = 0 while i<len(words): word = words[i] cur_length += 1 # space in left justified cur_length += len(word) if cur_length>L: break lst.append(word) i += 1 result.append(lst) self.break_line(words[i:], L, result) def distribute_space(self, L, result): new_result = [] for ind, line in enumerate(result): word_cnt = len(line) str_builder = [] space_cnt = L-sum(len(word) for word in line) hole_cnt = word_cnt-1 if ind<len(result)-1: if hole_cnt>0: space = space_cnt/hole_cnt remain = space_cnt%hole_cnt for word in line[:-1]: str_builder.append(word) str_builder.append(" "*space) if remain>0: str_builder.append(" ") remain -= 1 str_builder.append(line[-1]) else: str_builder.append(line[-1]) str_builder.append(" "*space_cnt) else: # last line, special handling str_builder = [" ".join(line)] str_builder.append(" "*(space_cnt-hole_cnt)) new_result.append("".join(str_builder)) return new_result if __name__=="__main__": print Solution().fullJustify(["This", "is", "an", "example", "of", "text", "justification."], 16) print Solution().fullJustify(["What","must","be","shall","be."], 12)<|fim▁end|>
<|file_name|>ASiCSCAdESLevelLTTest.java<|end_file_name|><|fim▁begin|>/** * DSS - Digital Signature Services * Copyright (C) 2015 European Commission, provided under the CEF programme * * This file is part of the "DSS - Digital Signature Services" project. * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA */ package eu.europa.esig.dss.asic.cades.signature.asics; import eu.europa.esig.dss.asic.cades.ASiCWithCAdESContainerExtractor; import eu.europa.esig.dss.asic.cades.ASiCWithCAdESSignatureParameters; import eu.europa.esig.dss.asic.cades.ASiCWithCAdESTimestampParameters; import eu.europa.esig.dss.asic.cades.signature.ASiCWithCAdESService; import eu.europa.esig.dss.asic.common.ASiCContent; import eu.europa.esig.dss.diagnostic.DiagnosticData; import eu.europa.esig.dss.diagnostic.SignatureWrapper; import eu.europa.esig.dss.diagnostic.jaxb.XmlDigestMatcher; import eu.europa.esig.dss.enumerations.ASiCContainerType; import eu.europa.esig.dss.enumerations.SignatureLevel; import eu.europa.esig.dss.model.DSSDocument; import eu.europa.esig.dss.model.InMemoryDocument; import eu.europa.esig.dss.signature.DocumentSignatureService; import eu.europa.esig.dss.utils.Utils; import eu.europa.esig.dss.validation.SignedDocumentValidator; import eu.europa.esig.dss.validation.reports.Reports; import org.junit.jupiter.api.BeforeEach; import java.util.Arrays; import java.util.Date; import java.util.List; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; public class ASiCSCAdESLevelLTTest extends AbstractASiCSCAdESTestSignature { private DocumentSignatureService<ASiCWithCAdESSignatureParameters, ASiCWithCAdESTimestampParameters> service; private ASiCWithCAdESSignatureParameters signatureParameters; private DSSDocument documentToSign; @BeforeEach public void init() throws Exception { documentToSign = new InMemoryDocument("Hello World !".getBytes(), "test.text"); signatureParameters = new ASiCWithCAdESSignatureParameters(); signatureParameters.bLevel().setSigningDate(new Date()); signatureParameters.setSigningCertificate(getSigningCert()); signatureParameters.setCertificateChain(getCertificateChain()); signatureParameters.setSignatureLevel(SignatureLevel.CAdES_BASELINE_LT); signatureParameters.aSiC().setContainerType(ASiCContainerType.ASiC_S); service = new ASiCWithCAdESService(getCompleteCertificateVerifier()); service.setTspSource(getGoodTsa()); } @Override protected void onDocumentSigned(byte[] byteArray) { super.onDocumentSigned(byteArray); ASiCWithCAdESContainerExtractor containerExtractor = new ASiCWithCAdESContainerExtractor(new InMemoryDocument(byteArray)); ASiCContent result = containerExtractor.extract(); List<DSSDocument> signatureDocuments = result.getSignatureDocuments(); assertTrue(Utils.isCollectionNotEmpty(signatureDocuments)); for (DSSDocument signatureDocument : signatureDocuments) { // validate with no detached content DiagnosticData diagnosticData = validateDocument(signatureDocument); SignatureWrapper signature = diagnosticData.getSignatureById(diagnosticData.getFirstSignatureId()); List<XmlDigestMatcher> digestMatchers = signature.getDigestMatchers(); assertEquals(1, digestMatchers.size()); assertFalse(digestMatchers.get(0).isDataFound()); assertFalse(digestMatchers.get(0).isDataIntact()); // with detached content diagnosticData = validateDocument(signatureDocument, Arrays.asList(getSignedData(result))); signature = diagnosticData.getSignatureById(diagnosticData.getFirstSignatureId()); digestMatchers = signature.getDigestMatchers(); assertEquals(1, digestMatchers.size()); assertTrue(digestMatchers.get(0).isDataFound()); assertTrue(digestMatchers.get(0).isDataIntact()); } } private DiagnosticData validateDocument(DSSDocument signatureDocument) { return validateDocument(signatureDocument, null); } private DiagnosticData validateDocument(DSSDocument signatureDocument, List<DSSDocument> detachedContents) { SignedDocumentValidator validator = SignedDocumentValidator.fromDocument(signatureDocument); validator.setCertificateVerifier(getOfflineCertificateVerifier()); if (Utils.isCollectionNotEmpty(detachedContents)) { validator.setDetachedContents(detachedContents); } Reports reports = validator.validateDocument(); return reports.getDiagnosticData(); } @Override protected DocumentSignatureService<ASiCWithCAdESSignatureParameters, ASiCWithCAdESTimestampParameters> getService() { return service; } @Override protected ASiCWithCAdESSignatureParameters getSignatureParameters() { return signatureParameters; } @Override protected DSSDocument getDocumentToSign() { return documentToSign; }<|fim▁hole|> return GOOD_USER; } }<|fim▁end|>
@Override protected String getSigningAlias() {
<|file_name|>characters.js<|end_file_name|><|fim▁begin|>export default { A: [[1,0],[2,0],[3,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[4,3],[0,4],[4,4]], B: [[0,0],[1,0],[2,0],[3,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4]], C: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[0,3],[0,4],[1,4],[2,4],[3,4],[4,4]], D: [[0,0],[1,0],[2,0],[3,0],[0,1],[4,1],[0,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4]], E: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[1,2],[2,2],[0,3],[0,4],[1,4],[2,4],[3,4],[4,4]], F: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[1,2],[2,2],[3,2],[0,3],[0,4]], G: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[2,2],[3,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], H: [[0,0],[4,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[4,3],[0,4],[4,4]], I: [[0,0],[1,0],[2,0],[3,0],[4,0],[2,1],[2,2],[2,3],[0,4],[1,4],[2,4],[3,4],[4,4]], J: [[4,0],[4,1],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], K: [[0,0],[4,0],[0,1],[3,1],[0,2],[1,2],[2,2],[0,3],[3,3],[0,4],[4,4]],<|fim▁hole|> O: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[4,1],[0,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], P: [[0,0],[1,0],[2,0],[3,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[0,4]], Q: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[4,1],[0,2],[4,2],[0,3],[3,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], R: [[0,0],[1,0],[2,0],[3,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[3,3],[0,4],[4,4]], S: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[1,2],[2,2],[3,2],[4,2],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], T: [[0,0],[1,0],[2,0],[3,0],[4,0],[2,1],[2,2],[2,3],[2,4]], U: [[0,0],[4,0],[0,1],[4,1],[0,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], V: [[0,0],[4,0],[0,1],[4,1],[0,2],[4,2],[1,3],[3,3],[2,4]], W: [[0,0],[4,0],[0,1],[4,1],[0,2],[2,2],[4,2],[0,3],[1,3],[3,3],[4,3],[0,4],[4,4]], X: [[0,0],[4,0],[1,1],[3,1],[2,2],[1,3],[3,3],[0,4],[4,4]], Y: [[0,0],[4,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[2,3],[2,4]], Z: [[0,0],[1,0],[2,0],[3,0],[4,0],[3,1],[2,2],[1,3],[0,4],[1,4],[2,4],[3,4],[4,4]], Å: [[2,0],[0,1],[1,1],[2,1],[3,1],[4,1],[0,2],[4,2],[0,3],[1,3],[2,3],[3,3],[4,3],[0,4],[4,4]], Ä: [[1,0],[3,0],[0,1],[1,1],[2,1],[3,1],[4,1],[0,2],[4,2],[0,3],[1,3],[2,3],[3,3],[4,3],[0,4],[4,4]], Ö: [[1,0],[3,0],[0,1],[1,1],[2,1],[3,1],[4,1],[0,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 0: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[4,1],[0,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 1: [[1,0],[2,0],[3,0],[3,1],[3,2],[3,3],[3,4]], 2: [[0,0],[1,0],[2,0],[3,0],[4,0],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 3: [[0,0],[1,0],[2,0],[3,0],[4,0],[4,1],[2,2],[3,2],[4,2],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 4: [[0,0],[4,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[4,3],[4,4]], 5: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[1,2],[2,2],[3,2],[4,2],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 6: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 7: [[0,0],[1,0],[2,0],[3,0],[4,0],[4,1],[4,2],[4,3],[4,4]], 8: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[0,3],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], 9: [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[4,1],[0,2],[1,2],[2,2],[3,2],[4,2],[4,3],[0,4],[1,4],[2,4],[3,4],[4,4]], '\@': [[0,0],[1,0],[2,0],[3,0],[4,0],[0,1],[4,1],[0,2],[2,2],[3,2],[4,2],[0,3],[2,3],[4,3],[0,4],[2,4],[3,4],[4,4]], '\#': [[1,0],[3,0],[0,1],[1,1],[2,1],[3,1],[4,1],[1,2],[3,2],[0,3],[1,3],[2,3],[3,3],[4,3],[1,4],[3,4]], '\?': [[0,0],[1,0],[2,0],[3,0],[4,0],[4,1],[2,2],[3,2],[4,2],[2,4]], '\%': [[0,0],[1,0],[4,0],[0,1],[1,1],[3,1],[2,2],[1,3],[3,3],[4,3],[0,4],[3,4],[4,4]], '\/': [[4,0],[3,1],[2,2],[1,3],[0,4]], '\+': [[2,0],[2,1],[0,2],[1,2],[2,2],[3,2],[4,2],[2,3],[2,4]], '\-': [[1,2],[2,2],[3,2]], '\_': [[0,4],[1,4],[2,4],[3,4],[4,4]], '\=': [[0,1],[1,1],[2,1],[3,1],[4,1],[0,3],[1,3],[2,3],[3,3],[4,3]], '\*': [[0,1],[2,1],[4,1],[1,2],[2,2],[3,2],[0,3],[2,3],[4,3]], '\'': [[2,0],[2,1]], '\"': [[1,0],[3,0],[1,1],[3,1]], '\(': [[2,0],[1,1],[1,2],[1,3],[2,4]], '\)': [[2,0],[3,1],[3,2],[3,3],[2,4]], '\.': [[2,4]], '\,': [[3,3],[2,3]], '\;': [[2,1],[2,3],[2,4]], '\:': [[2,1],[2,4]], '\!': [[2,0],[2,1],[2,2],[2,4]], '\{': [[2,0],[3,0],[2,1],[1,2],[2,2],[2,3],[2,4],[3,4]], '\}': [[1,0],[2,0],[2,1],[2,2],[3,2],[2,3],[1,4],[2,4]], '\]': [[1,0],[2,0],[2,1],[2,2],[2,3],[1,4],[2,4]], '\[': [[2,0],[3,0],[2,1],[2,2],[2,3],[2,4],[3,4]], '\^': [[2,0],[1,1],[3,1]], '\<': [[3,0],[2,1],[1,2],[2,3],[3,4]], '\>': [[1,0],[2,1],[3,2],[2,3],[1,4]] }<|fim▁end|>
L: [[0,0],[0,1],[0,2],[0,3],[0,4],[1,4],[2,4],[3,4],[4,4]], M: [[0,0],[4,0],[0,1],[1,1],[3,1],[4,1],[0,2],[2,2],[4,2],[0,3],[4,3],[0,4],[4,4]], N: [[0,0],[4,0],[0,1],[1,1],[4,1],[0,2],[2,2],[4,2],[0,3],[3,3],[4,3],[0,4],[4,4]],
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from media_tree.contrib.cms_plugins.media_tree_image.models import MediaTreeImage from media_tree.contrib.cms_plugins.helpers import PluginLink from media_tree.models import FileNode from media_tree.contrib.views.detail.image import ImageNodeDetailView from django.utils.translation import ugettext_lazy as _ from cms.utils.page_resolver import get_page_from_path<|fim▁hole|>from django.http import Http404 class ImagePluginDetailView(ImageNodeDetailView): return_url = None def get_object(self, *args, **kwargs): obj = super(ImagePluginDetailView, self).get_object(*args, **kwargs) if obj: allowed = False # validate that the object is actually published using the plugin... for plugin in MediaTreeImage.objects.filter(node=obj): # ...and on a publicly accessible page. # TODO: Iterating all plugins and getting each page # is a bit inefficient. page = get_page_from_path(plugin.page.get_path()) if page: allowed = True break if not allowed: raise Http404 return obj def get_context_data(self, *args, **kwargs): context_data = super(ImagePluginDetailView, self).get_context_data( *args, **kwargs) if self.return_url: page = get_page_from_path(self.return_url.strip('/')) if page: context_data.update({ 'link': PluginLink(url=page.get_absolute_url(), text=_('Back to %s') % page.get_title()) }) return context_data def get(self, request, *args, **kwargs): self.return_url = request.GET.get('return_url', None) return super(ImagePluginDetailView, self).get(request, *args, **kwargs)<|fim▁end|>
<|file_name|>Pane.js<|end_file_name|><|fim▁begin|>/* ************************************************************************ qooxdoo - the new era of web development http://qooxdoo.org Copyright: 2004-2008 1&1 Internet AG, Germany, http://www.1und1.de License: LGPL: http://www.gnu.org/licenses/lgpl.html EPL: http://www.eclipse.org/org/documents/epl-v10.php See the LICENSE file in the project's top-level directory for details. Authors: * Fabian Jakobs (fjakobs) ************************************************************************ */ qx.Class.define("qx.test.ui.virtual.Pane", { extend : qx.test.ui.LayoutTestCase, members : { setUp : function() { this.base(arguments); this.defaultWidth = 30; this.defaultHeight = 10; this.rowCount = 1000; this.colCount = 200; var pane = new qx.ui.virtual.core.Pane( this.rowCount, this.colCount, this.defaultHeight, this.defaultWidth ); this.getRoot().add(pane); this.pane = pane; }, tearDown : function() { this.pane.destroy(); this.base(arguments); this.flush(); }, assertUpdateArgs : function(rowIndex, colIndex, rowSizes, colSizes, args, msg) { this.assertEquals(rowIndex, args[0], msg); this.assertEquals(colIndex, args[1], msg); this.assertArrayEquals(rowSizes, args[2], msg); this.assertArrayEquals(colSizes, args[3], msg); }, assertScrollArgs : function(rowIndex, colIndex, rowSizes, colSizes, args, msg) { this.assertEquals(rowIndex, args[0], msg); this.assertEquals(colIndex, args[1], msg); this.assertArrayEquals(rowSizes, args[2], msg); this.assertArrayEquals(colSizes, args[3], msg); }, assertScroll : function(scrollTop, scrollLeft, pane, msg) { var layerContainer = this.pane._getChildren()[0]; this.assertEquals(-scrollTop, layerContainer.getBounds().top, msg); this.assertEquals(-scrollLeft, layerContainer.getBounds().left, msg); }, testConstructor : function() { this.assertNotUndefined(this.pane); }, testScrollProperties : function() { this.flush(); this.pane.setScrollY(30); this.assertEquals(30, this.pane.getScrollY()); this.pane.setScrollX(40); this.assertEquals(40, this.pane.getScrollX()); }, testGetScrollSize : function() { var size = this.pane.getScrollSize(); this.assertEquals(this.defaultWidth * this.colCount, size.width); this.assertEquals(this.defaultHeight * this.rowCount, size.height); var rowConfig = this.pane.getRowConfig(); rowConfig.setDefaultItemSize(50); rowConfig.setItemCount(123); rowConfig.setItemSize(10, 30); this.assertEquals(50 * 123 - 20, this.pane.getScrollSize().height); }, testLayerAdd : function() { var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.assertEquals(layer, this.pane.getLayers()[0]); }, testUpdateEvent : function() { var called = 0; var pane = new qx.ui.virtual.core.Pane(100, 20, 10, 50); pane.addListener("update", function() { called ++}, this); pane.set({ width: 300, height: 200 }); // no update after creation this.getRoot().add(pane); this.assertEquals(0, called, "Expect no update after creation"); // one update after appear this.flush(); this.assertEquals(1, called, "Expect one update after appear"); // one update after pane resize called = 0; pane.setWidth(400); this.flush(); this.assertEquals(1, called, "Expect one update after pane resize"); // one update after data resize called = 0; pane.getRowConfig().setItemCount(200); this.flush(); this.assertEquals(1, called, "Expect one update after data resize"); // one update after data and pane resize called = 0; pane.getRowConfig().setItemCount(300); pane.setWidth(500); this.flush(); this.assertEquals(2, called, "Expect two updates after data and pane resize"); pane.destroy(); }, testFullUpdate : function() { var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({ width: 100, height: 50 }); this.flush(); layer.calls = []; this.pane.fullUpdate(); this.flush(); this.assertEquals(2, layer.calls.length); this.assertEquals("fullUpdate", layer.calls[0][0]); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(0, 0, this.pane); this.pane.setScrollY(4); layer.calls = []; this.pane.fullUpdate(); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [10, 10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(4, 0, this.pane); this.pane.setScrollY(10); layer.calls = []; this.pane.fullUpdate(); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(1, 0, [10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(0, 0, this.pane); this.pane.setScrollY(16); layer.calls = []; this.pane.fullUpdate(); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(1, 0, [10, 10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(6, 0, this.pane); this.pane.setScrollY(0); this.flush(); this.pane.setScrollX(4); layer.calls = []; this.pane.fullUpdate(); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(0, 4, this.pane); this.pane.setScrollX(30); layer.calls = []; this.pane.fullUpdate(); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 1, [10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(0, 0, this.pane); this.pane.setScrollX(36); layer.calls = []; this.pane.fullUpdate(); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 1, [10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(0, 6, this.pane); }, testNoRows : function() { var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); layer.calls = []; this.pane.setWidth(100); this.pane.getColumnConfig().setDefaultItemSize(100); this.pane.getColumnConfig().setItemCount(1); this.pane.getRowConfig().setItemCount(0); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [], [100], args); this.assertScroll(0, 0, this.pane); // resize layer.calls = []; this.pane.setWidth(30); this.pane.getColumnConfig().setDefaultItemSize(30); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [], [30], args); this.assertScroll(0, 0, this.pane); }, testNoColumns : function() { var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); layer.calls = []; this.pane.setHeight(100); this.pane.getRowConfig().setDefaultItemSize(100); this.pane.getRowConfig().setItemCount(1); this.pane.getColumnConfig().setItemCount(0); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [100], [], args); this.assertScroll(0, 0, this.pane); // resize layer.calls = []; this.pane.setHeight(30); this.pane.getRowConfig().setDefaultItemSize(30); this.flush(); var args = layer.calls[0][1]; this.assertUpdateArgs(0, 0, [30], [], args); this.assertScroll(0, 0, this.pane); }, testPrefetchYAtTop : function() { var layerHeight = 400; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: 300, height: layerHeight}); this.flush(); // scroll top is 0 and prefetch above this.pane.prefetchY(100, 200, 0, 0); this.flush(); this.assertEquals(layerHeight, layer.getBounds().height); this.assertScroll(0, 0, this.pane); }, testPrefetchYLimitedAtTop : function() { var layerHeight = 400; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: 300, height: layerHeight}); this.flush(); // scroll top is 100 and prefetch above 200 this.pane.setScrollY(100); this.pane.prefetchY(100, 200, 0, 0); this.flush(); this.assertEquals(layerHeight+100, layer.getBounds().height); this.assertScroll(100, 0, this.pane); }, testPrefetchYAtBottom : function() { var layerHeight = 400; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: 300, height: layerHeight}); this.flush(); // scroll top to bottom and prefetch below 200 this.pane.setScrollY(this.pane.getScrollMaxY()); this.pane.prefetchY(0, 0, 100, 200); this.flush(); this.assertEquals(layerHeight, layer.getBounds().height); this.assertScroll(0, 0, this.pane); }, testPrefetchYLimitedAtBottom : function() { var layerHeight = 400; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: 300, height: layerHeight}); this.flush(); // scroll top to bottom and prefetch below 200 this.pane.setScrollY(this.pane.getScrollMaxY()-100); this.pane.prefetchY(0, 0, 100, 200); this.flush(); this.assertEquals(layerHeight+100, layer.getBounds().height); this.assertScroll(0, 0, this.pane); }, testPrefetchYInMiddle : function() { var layerHeight = 400; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: 300, height: layerHeight}); this.flush(); // scroll top is 500 and prefetch above 200 this.pane.setScrollY(500); this.pane.prefetchY(100, 200, 100, 200); this.flush(); this.assertEquals(layerHeight+400, layer.getBounds().height); this.assertScroll(200, 0, this.pane); // already prefetched 200 pixel above. Scrolling up 20px and prefetching // again should not change the layers this.pane.setScrollY(480); this.pane.prefetchY(100, 200, 100, 200); this.flush(); this.assertEquals(layerHeight+400, layer.getBounds().height); this.assertScroll(180, 0, this.pane); // scroll more than minAbove up. Prefetching should update the layers this.pane.setScrollY(390); this.pane.prefetchY(100, 200, 100, 200); this.flush(); this.assertEquals(layerHeight+400, layer.getBounds().height); this.assertScroll(200, 0, this.pane); // already prefetched 200 pixel below. Scrolling down 20px and prefetching // again should not change the layers this.pane.setScrollY(410); this.pane.prefetchY(100, 200, 100, 200); this.flush(); this.assertEquals(layerHeight+400, layer.getBounds().height); this.assertScroll(220, 0, this.pane); // scroll more than minBelow down. Prefetching should update the layers this.pane.setScrollY(520); this.pane.prefetchY(100, 200, 100, 200); this.flush(); this.assertEquals(layerHeight+400, layer.getBounds().height); this.assertScroll(200, 0, this.pane); }, testPrefetchXAtLeft : function() { var layerWidth = 300; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: layerWidth, height: 400}); this.flush(); // scroll left is 0 and prefetch left this.pane.prefetchX(100, 200, 0, 0); this.flush(); this.assertEquals(layerWidth, layer.getBounds().width); this.assertScroll(0, 0, this.pane); }, testPrefetchXLimitedAtLeft : function() { var layerWidth = 300; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.getColumnConfig().setDefaultItemSize(10); this.pane.set({width: layerWidth, height: 400}); this.flush(); // scroll top is 100 and prefetch above 200 this.pane.setScrollX(100); this.pane.prefetchX(100, 200, 0, 0); this.flush(); this.assertEquals(layerWidth+100, layer.getBounds().width); this.assertScroll(0, 100, this.pane); }, testPrefetchXAtBottom : function() { var layerWidth = 300; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.getColumnConfig().setDefaultItemSize(10); this.pane.set({width: layerWidth, height: 400}); this.flush(); // scroll left to right and prefetch right 200 this.pane.setScrollX(this.pane.getScrollMaxX()); this.pane.prefetchX(0, 0, 100, 200); this.flush(); this.assertEquals(layerWidth, layer.getBounds().width); this.assertScroll(0, 0, this.pane); }, testPrefetchXLimitedAtBottom : function() { var layerWidth = 300; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.getColumnConfig().setDefaultItemSize(10); this.pane.set({width: layerWidth, height: 400}); this.flush(); // scroll left to right-100 and prefetch right 200 this.pane.setScrollX(this.pane.getScrollMaxX()-100); this.pane.prefetchX(0, 0, 100, 200); this.flush(); this.assertEquals(layerWidth+100, layer.getBounds().width); this.assertScroll(0, 0, this.pane); }, testPrefetchXInMiddle : function() { var layerWidth = 300; var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({width: layerWidth, height: 400}); this.pane.getColumnConfig().setDefaultItemSize(10); this.flush(); // scroll left is 500 and prefetch left 200 this.pane.setScrollX(500); this.pane.prefetchX(100, 200, 100, 200); this.flush(); this.assertEquals(layerWidth+400, layer.getBounds().width); this.assertScroll(0, 200, this.pane); // already prefetched 200 pixel at the left. Scrolling left 20px and prefetching // again should not change the layers this.pane.setScrollX(480); this.pane.prefetchX(100, 200, 100, 200); this.flush(); this.assertEquals(layerWidth+400, layer.getBounds().width); this.assertScroll(0, 180, this.pane); // scroll more than minLeft left. Prefetching should update the layers<|fim▁hole|> this.pane.prefetchX(100, 200, 100, 200); this.flush(); this.assertEquals(layerWidth+400, layer.getBounds().width); this.assertScroll(0, 200, this.pane); // already prefetched 200 pixel right. Scrolling right 20px and prefetching // again should not change the layers this.pane.setScrollX(410); this.pane.prefetchX(100, 200, 100, 200); this.flush(); this.assertEquals(layerWidth+400, layer.getBounds().width); this.assertScroll(0, 220, this.pane); // scroll more than minRight right. Prefetching should update the layers this.pane.setScrollX(520); this.pane.prefetchX(100, 200, 100, 200); this.flush(); this.assertEquals(layerWidth+400, layer.getBounds().width); this.assertScroll(0, 200, this.pane); }, testUpdateLayerWindow : function() { var layer = new qx.test.ui.virtual.layer.LayerMock(); this.pane.addLayer(layer); this.pane.set({ width: 100, height: 50 }); this.flush(); layer.calls = []; this.pane.setScrollY(4); this.flush(); this.assertEquals("updateLayerWindow", layer.calls[0][0]); var args = layer.calls[0][1]; this.assertScrollArgs(0, 0, [10, 10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(4, 0, this.pane); layer.calls = []; this.pane.setScrollY(5); this.flush(); this.assertEquals(0, layer.calls.length); this.assertScroll(5, 0, this.pane); layer.calls = []; this.pane.setScrollY(10); this.flush(); this.assertEquals(0, layer.calls.length); this.assertScroll(10, 0, this.pane); layer.calls = []; this.pane.setScrollY(16); this.flush(); this.assertEquals("updateLayerWindow", layer.calls[0][0]); var args = layer.calls[0][1]; this.assertScrollArgs(1, 0, [10, 10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(6, 0, this.pane); this.pane.setScrollY(0); this.flush(); layer.calls = []; this.pane.setScrollX(4); this.flush(); this.assertEquals(0, layer.calls.length); this.assertScroll(0, 4, this.pane); layer.calls = []; this.pane.setScrollX(30); this.flush(); this.assertEquals("updateLayerWindow", layer.calls[0][0]); var args = layer.calls[0][1]; this.assertScrollArgs(0, 1, [10, 10, 10, 10, 10], [30, 30, 30, 30], args); this.assertScroll(0, 0, this.pane); layer.calls = []; this.pane.setScrollX(36); this.flush(); this.assertEquals(0, layer.calls.length); this.assertScroll(0, 6, this.pane); }, testSrollRowIntoView : function() { this.pane.set({ width : 400, height : 305 }); this.pane.getColumnConfig().setItemCount(1); var layer = new qx.test.ui.virtual.layer.LayerSimple(); this.pane.addLayer(layer); this.flush(); this.pane.scrollRowIntoView(100) this.flush(); var children = layer.getContentElement().getDomElement().childNodes; this.assertScroll(5, 0, this.pane); this.assertEquals("70 / 0", children[0].innerHTML) this.assertEquals("100 / 0", children[children.length-1].innerHTML); }, testSrollColumnIntoView : function() { this.pane.set({ width : 405, height : 305 }); this.pane.getRowConfig().setItemCount(1); var layer = new qx.test.ui.virtual.layer.LayerSimple(); this.pane.addLayer(layer); this.flush(); this.pane.scrollColumnIntoView(100) this.flush(); var children = layer.getContentElement().getDomElement().childNodes; this.assertScroll(0, 15, this.pane); this.assertEquals("0 / 87", children[0].innerHTML) this.assertEquals("0 / 100", children[children.length-1].innerHTML) }, _testSrollRowIntoViewEdgeCase : function() { this.pane.set({ width : 400, height : 305 }); this.pane.getColumnConfig().setItemCount(1); var layer = new qx.test.ui.virtual.layer.LayerSimple(); this.pane.addLayer(layer); this.flush(); this.pane.scrollRowIntoView(2000) this.flush(); var children = layer.getContentElement().getDomElement().childNodes; this.assertScroll(5, 0, this.pane); this.assertEquals("0 / 969", children[0].innerHTML) this.assertEquals("0 / 999", children[children.length-1].innerHTML) }, _testSrollColumnIntoViewEdgeCase : function() { this.pane.set({ width : 405, height : 305 }); this.pane.getRowConfig().setItemCount(1); var layer = new qx.test.ui.virtual.layer.LayerSimple(); this.pane.addLayer(layer); this.flush(); this.pane.scrollColumnIntoView(400) this.flush(); var children = layer.getContentElement().getDomElement().childNodes; this.assertScroll(0, 15, this.pane); this.assertEquals("186 / 0", children[0].innerHTML) this.assertEquals("199 / 0", children[children.length-1].innerHTML) }, testGetCellAtPosition : function() { this.pane.getRowConfig().setItemCount(3); this.pane.getColumnConfig().setItemCount(3); var layer = new qx.test.ui.virtual.layer.LayerSimple(); this.pane.addLayer(layer); this.flush(); this.assertJsonEquals({row : 0, column : 0}, this.pane.getCellAtPosition(0, 0)); this.assertEquals(null, this.pane.getCellAtPosition(400, 0)); this.assertEquals(null, this.pane.getCellAtPosition(0, 300)); this.assertEquals(null, this.pane.getCellAtPosition(400, 300)); this.assertJsonEquals({row : 2, column : 2}, this.pane.getCellAtPosition(89, 29)); }, testGetItemAtPositionEmptySpace : function() { var pane = this.pane; pane.setHeight(100); pane.setWidth(50); this.pane.getRowConfig().setItemCount(1); this.pane.getRowConfig().setDefaultItemSize(50); this.flush(); this.assertJsonEquals({row : 0, column : 0}, this.pane.getCellAtPosition(1, 49)); this.assertEquals(null, this.pane.getCellAtPosition(1, 50)); this.assertEquals(null, this.pane.getCellAtPosition(1, 70)); }, testMouseCellEvents : function() { var rowCount = 2; var colCount = 2; var defaultHeight = 10; var defaultWidth = 50; var pane = new qx.ui.virtual.core.Pane( rowCount, colCount, defaultHeight, defaultWidth ).set({ width: 150, height: 30 }); this.getRoot().add(pane, {left: 100, top: 100}); this.flush(); var calls = []; var listener = function(e) { calls.push(e); } pane.addListener("cellClick", listener); pane.addListener("cellDblclick", listener); pane.addListener("cellContextmenu", listener); var MouseEventMock = qx.test.ui.virtual.MouseEventMock; var eventMouseToCellEvents = { "click" : "cellClick", "dblclick" : "cellDblclick", "contextmenu" : "cellContextmenu" }; for (var mouseEvent in eventMouseToCellEvents) { var cellEvent = eventMouseToCellEvents[mouseEvent]; calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 99, documentTop: 99})); this.assertEquals(0, calls.length); calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 100, documentTop: 100})); this.assertEquals(1, calls.length, cellEvent); this.assertEquals(0, calls[0].getRow(), cellEvent); this.assertEquals(0, calls[0].getColumn(), cellEvent); this.assertEquals(cellEvent, calls[0].getType(), cellEvent); calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 160, documentTop: 103})); this.assertEquals(1, calls.length, cellEvent); this.assertEquals(0, calls[0].getRow(), cellEvent); this.assertEquals(1, calls[0].getColumn(), cellEvent); this.assertEquals(cellEvent, calls[0].getType(), cellEvent); calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 105, documentTop: 110})); this.assertEquals(1, calls.length, cellEvent); this.assertEquals(1, calls[0].getRow(), cellEvent); this.assertEquals(0, calls[0].getColumn(), cellEvent); this.assertEquals(cellEvent, calls[0].getType(), cellEvent); calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 105, documentTop: 125})); this.assertEquals(0, calls.length); calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 275, documentTop: 105})); this.assertEquals(0, calls.length); calls = []; pane.dispatchEvent(new MouseEventMock(mouseEvent, {documentLeft: 275, documentTop: 105})); this.assertEquals(0, calls.length); } pane.destroy(); this.flush(); }, testDestroy : function() { this.pane.destroy(); this.flush(); this.assertDestroy(function() { var pane = new qx.ui.virtual.core.Pane( this.rowCount, this.colCount, this.defaultHeight, this.defaultWidth ); this.getRoot().add(pane); pane.destroy(); }, this); } }, destruct : function() { this.pane = null; } });<|fim▁end|>
this.pane.setScrollX(390);
<|file_name|>ventana_no_texto.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>ventana_no_texto$2 ventana_no_texto$1 ventana_no_texto<|fim▁end|>
<|file_name|>map.hpp<|end_file_name|><|fim▁begin|>// Boost.Assign library // // Copyright Thorsten Ottosen 2003-2004. Use, modification and // distribution is subject to the Boost Software License, Version // 1.0. (See accompanying file LICENSE_1_0.txt or copy at // http://www.boost.org/LICENSE_1_0.txt) // // For more information, see http://www.boost.org/libs/assign/ // #ifndef BOOST_ASSIGN_STD_MAP_HPP #define BOOST_ASSIGN_STD_MAP_HPP #if defined(_MSC_VER) && (_MSC_VER >= 1020) # pragma once #endif #include <boost/assign/list_inserter.hpp> #include <boost/config.hpp> #include <map> namespace boost { namespace assign { template< class K, class V, class C, class A, class P > inline list_inserter< assign_detail::call_insert< std::map<K,V,C,A> >, P > operator+=( std::map<K,V,C,A>& m, const P& p ) { return insert( m )( p ); } template< class K, class V, class C, class A, class P > inline list_inserter< assign_detail::call_insert< std::multimap<K,V,C,A> >, P > operator+=( std::multimap<K,V,C,A>& m, const P& p ) { return insert( m )( p );<|fim▁hole|>} #endif<|fim▁end|>
} }
<|file_name|>simd-intrinsic-generic-arithmetic-saturating.rs<|end_file_name|><|fim▁begin|>// compile-flags: -C no-prepopulate-passes // #![crate_type = "lib"] #![feature(repr_simd, platform_intrinsics)] #![allow(non_camel_case_types)] #![deny(unused)] // signed integer types #[repr(simd)] #[derive(Copy, Clone)] pub struct i8x2(i8, i8); #[repr(simd)] #[derive(Copy, Clone)] pub struct i8x4(i8, i8, i8, i8); #[repr(simd)] #[derive(Copy, Clone)] pub struct i8x8( i8, i8, i8, i8, i8, i8, i8, i8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i8x16( i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i8x32( i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i8x64( i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, i8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i16x2(i16, i16); #[repr(simd)] #[derive(Copy, Clone)] pub struct i16x4(i16, i16, i16, i16); #[repr(simd)] #[derive(Copy, Clone)] pub struct i16x8( i16, i16, i16, i16, i16, i16, i16, i16, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i16x16( i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i16x32( i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, i16, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i32x2(i32, i32); #[repr(simd)] #[derive(Copy, Clone)] pub struct i32x4(i32, i32, i32, i32); #[repr(simd)] #[derive(Copy, Clone)] pub struct i32x8( i32, i32, i32, i32, i32, i32, i32, i32, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i32x16( i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, i32, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i64x2(i64, i64); #[repr(simd)] #[derive(Copy, Clone)] pub struct i64x4(i64, i64, i64, i64); #[repr(simd)] #[derive(Copy, Clone)] pub struct i64x8( i64, i64, i64, i64, i64, i64, i64, i64, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct i128x2(i128, i128); #[repr(simd)] #[derive(Copy, Clone)] pub struct i128x4(i128, i128, i128, i128); // unsigned integer types #[repr(simd)] #[derive(Copy, Clone)] pub struct u8x2(u8, u8); #[repr(simd)] #[derive(Copy, Clone)] pub struct u8x4(u8, u8, u8, u8); #[repr(simd)] #[derive(Copy, Clone)] pub struct u8x8( u8, u8, u8, u8, u8, u8, u8, u8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u8x16( u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u8x32( u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u8x64( u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, u8, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u16x2(u16, u16); #[repr(simd)] #[derive(Copy, Clone)] pub struct u16x4(u16, u16, u16, u16); #[repr(simd)] #[derive(Copy, Clone)] pub struct u16x8( u16, u16, u16, u16, u16, u16, u16, u16, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u16x16( u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u16x32( u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, u16, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u32x2(u32, u32); #[repr(simd)] #[derive(Copy, Clone)] pub struct u32x4(u32, u32, u32, u32); #[repr(simd)] #[derive(Copy, Clone)] pub struct u32x8( u32, u32, u32, u32, u32, u32, u32, u32, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u32x16( u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, u32, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u64x2(u64, u64); #[repr(simd)] #[derive(Copy, Clone)] pub struct u64x4(u64, u64, u64, u64); #[repr(simd)] #[derive(Copy, Clone)] pub struct u64x8( u64, u64, u64, u64, u64, u64, u64, u64, ); #[repr(simd)] #[derive(Copy, Clone)] pub struct u128x2(u128, u128); #[repr(simd)] #[derive(Copy, Clone)] pub struct u128x4(u128, u128, u128, u128); extern "platform-intrinsic" { fn simd_saturating_add<T>(x: T, y: T) -> T; fn simd_saturating_sub<T>(x: T, y: T) -> T; } // NOTE(eddyb) `%{{x|_3}}` is used because on some targets (e.g. WASM) // SIMD vectors are passed directly, resulting in `%x` being a vector, // while on others they're passed indirectly, resulting in `%x` being // a pointer to a vector, and `%_3` a vector loaded from that pointer. // This is controlled by the target spec option `simd_types_indirect`. // The same applies to `%{{y|_4}}` as well. // CHECK-LABEL: @sadd_i8x2 #[no_mangle] pub unsafe fn sadd_i8x2(x: i8x2, y: i8x2) -> i8x2 { // CHECK: %{{[0-9]+}} = call <2 x i8> @llvm.sadd.sat.v2i8(<2 x i8> %{{x|_3}}, <2 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i8x4 #[no_mangle] pub unsafe fn sadd_i8x4(x: i8x4, y: i8x4) -> i8x4 { // CHECK: %{{[0-9]+}} = call <4 x i8> @llvm.sadd.sat.v4i8(<4 x i8> %{{x|_3}}, <4 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i8x8 #[no_mangle] pub unsafe fn sadd_i8x8(x: i8x8, y: i8x8) -> i8x8 { // CHECK: %{{[0-9]+}} = call <8 x i8> @llvm.sadd.sat.v8i8(<8 x i8> %{{x|_3}}, <8 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i8x16 #[no_mangle] pub unsafe fn sadd_i8x16(x: i8x16, y: i8x16) -> i8x16 { // CHECK: %{{[0-9]+}} = call <16 x i8> @llvm.sadd.sat.v16i8(<16 x i8> %{{x|_3}}, <16 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i8x32 #[no_mangle] pub unsafe fn sadd_i8x32(x: i8x32, y: i8x32) -> i8x32 { // CHECK: %{{[0-9]+}} = call <32 x i8> @llvm.sadd.sat.v32i8(<32 x i8> %{{x|_3}}, <32 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i8x64 #[no_mangle] pub unsafe fn sadd_i8x64(x: i8x64, y: i8x64) -> i8x64 { // CHECK: %{{[0-9]+}} = call <64 x i8> @llvm.sadd.sat.v64i8(<64 x i8> %{{x|_3}}, <64 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i16x2 #[no_mangle] pub unsafe fn sadd_i16x2(x: i16x2, y: i16x2) -> i16x2 { // CHECK: %{{[0-9]+}} = call <2 x i16> @llvm.sadd.sat.v2i16(<2 x i16> %{{x|_3}}, <2 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i16x4 #[no_mangle] pub unsafe fn sadd_i16x4(x: i16x4, y: i16x4) -> i16x4 { // CHECK: %{{[0-9]+}} = call <4 x i16> @llvm.sadd.sat.v4i16(<4 x i16> %{{x|_3}}, <4 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i16x8 #[no_mangle] pub unsafe fn sadd_i16x8(x: i16x8, y: i16x8) -> i16x8 { // CHECK: %{{[0-9]+}} = call <8 x i16> @llvm.sadd.sat.v8i16(<8 x i16> %{{x|_3}}, <8 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i16x16 #[no_mangle] pub unsafe fn sadd_i16x16(x: i16x16, y: i16x16) -> i16x16 { // CHECK: %{{[0-9]+}} = call <16 x i16> @llvm.sadd.sat.v16i16(<16 x i16> %{{x|_3}}, <16 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i16x32 #[no_mangle] pub unsafe fn sadd_i16x32(x: i16x32, y: i16x32) -> i16x32 { // CHECK: %{{[0-9]+}} = call <32 x i16> @llvm.sadd.sat.v32i16(<32 x i16> %{{x|_3}}, <32 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i32x2 #[no_mangle] pub unsafe fn sadd_i32x2(x: i32x2, y: i32x2) -> i32x2 { // CHECK: %{{[0-9]+}} = call <2 x i32> @llvm.sadd.sat.v2i32(<2 x i32> %{{x|_3}}, <2 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i32x4 #[no_mangle] pub unsafe fn sadd_i32x4(x: i32x4, y: i32x4) -> i32x4 { // CHECK: %{{[0-9]+}} = call <4 x i32> @llvm.sadd.sat.v4i32(<4 x i32> %{{x|_3}}, <4 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i32x8 #[no_mangle] pub unsafe fn sadd_i32x8(x: i32x8, y: i32x8) -> i32x8 { // CHECK: %{{[0-9]+}} = call <8 x i32> @llvm.sadd.sat.v8i32(<8 x i32> %{{x|_3}}, <8 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i32x16 #[no_mangle] pub unsafe fn sadd_i32x16(x: i32x16, y: i32x16) -> i32x16 { // CHECK: %{{[0-9]+}} = call <16 x i32> @llvm.sadd.sat.v16i32(<16 x i32> %{{x|_3}}, <16 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i64x2 #[no_mangle] pub unsafe fn sadd_i64x2(x: i64x2, y: i64x2) -> i64x2 { // CHECK: %{{[0-9]+}} = call <2 x i64> @llvm.sadd.sat.v2i64(<2 x i64> %{{x|_3}}, <2 x i64> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i64x4 #[no_mangle] pub unsafe fn sadd_i64x4(x: i64x4, y: i64x4) -> i64x4 { // CHECK: %{{[0-9]+}} = call <4 x i64> @llvm.sadd.sat.v4i64(<4 x i64> %{{x|_3}}, <4 x i64> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i64x8 #[no_mangle] pub unsafe fn sadd_i64x8(x: i64x8, y: i64x8) -> i64x8 { // CHECK: %{{[0-9]+}} = call <8 x i64> @llvm.sadd.sat.v8i64(<8 x i64> %{{x|_3}}, <8 x i64> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i128x2 #[no_mangle] pub unsafe fn sadd_i128x2(x: i128x2, y: i128x2) -> i128x2 { // CHECK: %{{[0-9]+}} = call <2 x i128> @llvm.sadd.sat.v2i128(<2 x i128> %{{x|_3}}, <2 x i128> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @sadd_i128x4 #[no_mangle] pub unsafe fn sadd_i128x4(x: i128x4, y: i128x4) -> i128x4 { // CHECK: %{{[0-9]+}} = call <4 x i128> @llvm.sadd.sat.v4i128(<4 x i128> %{{x|_3}}, <4 x i128> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u8x2 #[no_mangle] pub unsafe fn uadd_u8x2(x: u8x2, y: u8x2) -> u8x2 { // CHECK: %{{[0-9]+}} = call <2 x i8> @llvm.uadd.sat.v2i8(<2 x i8> %{{x|_3}}, <2 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u8x4 #[no_mangle] pub unsafe fn uadd_u8x4(x: u8x4, y: u8x4) -> u8x4 { // CHECK: %{{[0-9]+}} = call <4 x i8> @llvm.uadd.sat.v4i8(<4 x i8> %{{x|_3}}, <4 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u8x8 #[no_mangle] pub unsafe fn uadd_u8x8(x: u8x8, y: u8x8) -> u8x8 { // CHECK: %{{[0-9]+}} = call <8 x i8> @llvm.uadd.sat.v8i8(<8 x i8> %{{x|_3}}, <8 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u8x16 #[no_mangle] pub unsafe fn uadd_u8x16(x: u8x16, y: u8x16) -> u8x16 { // CHECK: %{{[0-9]+}} = call <16 x i8> @llvm.uadd.sat.v16i8(<16 x i8> %{{x|_3}}, <16 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u8x32 #[no_mangle] pub unsafe fn uadd_u8x32(x: u8x32, y: u8x32) -> u8x32 { // CHECK: %{{[0-9]+}} = call <32 x i8> @llvm.uadd.sat.v32i8(<32 x i8> %{{x|_3}}, <32 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u8x64 #[no_mangle] pub unsafe fn uadd_u8x64(x: u8x64, y: u8x64) -> u8x64 { // CHECK: %{{[0-9]+}} = call <64 x i8> @llvm.uadd.sat.v64i8(<64 x i8> %{{x|_3}}, <64 x i8> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u16x2 #[no_mangle] pub unsafe fn uadd_u16x2(x: u16x2, y: u16x2) -> u16x2 { // CHECK: %{{[0-9]+}} = call <2 x i16> @llvm.uadd.sat.v2i16(<2 x i16> %{{x|_3}}, <2 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u16x4 #[no_mangle] pub unsafe fn uadd_u16x4(x: u16x4, y: u16x4) -> u16x4 { // CHECK: %{{[0-9]+}} = call <4 x i16> @llvm.uadd.sat.v4i16(<4 x i16> %{{x|_3}}, <4 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u16x8 #[no_mangle] pub unsafe fn uadd_u16x8(x: u16x8, y: u16x8) -> u16x8 { // CHECK: %{{[0-9]+}} = call <8 x i16> @llvm.uadd.sat.v8i16(<8 x i16> %{{x|_3}}, <8 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u16x16 #[no_mangle] pub unsafe fn uadd_u16x16(x: u16x16, y: u16x16) -> u16x16 { // CHECK: %{{[0-9]+}} = call <16 x i16> @llvm.uadd.sat.v16i16(<16 x i16> %{{x|_3}}, <16 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u16x32 #[no_mangle] pub unsafe fn uadd_u16x32(x: u16x32, y: u16x32) -> u16x32 { // CHECK: %{{[0-9]+}} = call <32 x i16> @llvm.uadd.sat.v32i16(<32 x i16> %{{x|_3}}, <32 x i16> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u32x2 #[no_mangle] pub unsafe fn uadd_u32x2(x: u32x2, y: u32x2) -> u32x2 { // CHECK: %{{[0-9]+}} = call <2 x i32> @llvm.uadd.sat.v2i32(<2 x i32> %{{x|_3}}, <2 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u32x4 #[no_mangle] pub unsafe fn uadd_u32x4(x: u32x4, y: u32x4) -> u32x4 { // CHECK: %{{[0-9]+}} = call <4 x i32> @llvm.uadd.sat.v4i32(<4 x i32> %{{x|_3}}, <4 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u32x8 #[no_mangle] pub unsafe fn uadd_u32x8(x: u32x8, y: u32x8) -> u32x8 { // CHECK: %{{[0-9]+}} = call <8 x i32> @llvm.uadd.sat.v8i32(<8 x i32> %{{x|_3}}, <8 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u32x16 #[no_mangle] pub unsafe fn uadd_u32x16(x: u32x16, y: u32x16) -> u32x16 { // CHECK: %{{[0-9]+}} = call <16 x i32> @llvm.uadd.sat.v16i32(<16 x i32> %{{x|_3}}, <16 x i32> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u64x2 #[no_mangle] pub unsafe fn uadd_u64x2(x: u64x2, y: u64x2) -> u64x2 { // CHECK: %{{[0-9]+}} = call <2 x i64> @llvm.uadd.sat.v2i64(<2 x i64> %{{x|_3}}, <2 x i64> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u64x4 #[no_mangle] pub unsafe fn uadd_u64x4(x: u64x4, y: u64x4) -> u64x4 { // CHECK: %{{[0-9]+}} = call <4 x i64> @llvm.uadd.sat.v4i64(<4 x i64> %{{x|_3}}, <4 x i64> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u64x8 #[no_mangle] pub unsafe fn uadd_u64x8(x: u64x8, y: u64x8) -> u64x8 { // CHECK: %{{[0-9]+}} = call <8 x i64> @llvm.uadd.sat.v8i64(<8 x i64> %{{x|_3}}, <8 x i64> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u128x2 #[no_mangle] pub unsafe fn uadd_u128x2(x: u128x2, y: u128x2) -> u128x2 { // CHECK: %{{[0-9]+}} = call <2 x i128> @llvm.uadd.sat.v2i128(<2 x i128> %{{x|_3}}, <2 x i128> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @uadd_u128x4 #[no_mangle] pub unsafe fn uadd_u128x4(x: u128x4, y: u128x4) -> u128x4 { // CHECK: %{{[0-9]+}} = call <4 x i128> @llvm.uadd.sat.v4i128(<4 x i128> %{{x|_3}}, <4 x i128> %{{y|_4}}) simd_saturating_add(x, y) } // CHECK-LABEL: @ssub_i8x2 #[no_mangle] pub unsafe fn ssub_i8x2(x: i8x2, y: i8x2) -> i8x2 { // CHECK: %{{[0-9]+}} = call <2 x i8> @llvm.ssub.sat.v2i8(<2 x i8> %{{x|_3}}, <2 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i8x4 #[no_mangle] pub unsafe fn ssub_i8x4(x: i8x4, y: i8x4) -> i8x4 { // CHECK: %{{[0-9]+}} = call <4 x i8> @llvm.ssub.sat.v4i8(<4 x i8> %{{x|_3}}, <4 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i8x8 #[no_mangle] pub unsafe fn ssub_i8x8(x: i8x8, y: i8x8) -> i8x8 { // CHECK: %{{[0-9]+}} = call <8 x i8> @llvm.ssub.sat.v8i8(<8 x i8> %{{x|_3}}, <8 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i8x16 #[no_mangle] pub unsafe fn ssub_i8x16(x: i8x16, y: i8x16) -> i8x16 { // CHECK: %{{[0-9]+}} = call <16 x i8> @llvm.ssub.sat.v16i8(<16 x i8> %{{x|_3}}, <16 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i8x32 #[no_mangle] pub unsafe fn ssub_i8x32(x: i8x32, y: i8x32) -> i8x32 { // CHECK: %{{[0-9]+}} = call <32 x i8> @llvm.ssub.sat.v32i8(<32 x i8> %{{x|_3}}, <32 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i8x64 #[no_mangle] pub unsafe fn ssub_i8x64(x: i8x64, y: i8x64) -> i8x64 { // CHECK: %{{[0-9]+}} = call <64 x i8> @llvm.ssub.sat.v64i8(<64 x i8> %{{x|_3}}, <64 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i16x2 #[no_mangle] pub unsafe fn ssub_i16x2(x: i16x2, y: i16x2) -> i16x2 { // CHECK: %{{[0-9]+}} = call <2 x i16> @llvm.ssub.sat.v2i16(<2 x i16> %{{x|_3}}, <2 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i16x4 #[no_mangle] pub unsafe fn ssub_i16x4(x: i16x4, y: i16x4) -> i16x4 { // CHECK: %{{[0-9]+}} = call <4 x i16> @llvm.ssub.sat.v4i16(<4 x i16> %{{x|_3}}, <4 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i16x8 #[no_mangle] pub unsafe fn ssub_i16x8(x: i16x8, y: i16x8) -> i16x8 { // CHECK: %{{[0-9]+}} = call <8 x i16> @llvm.ssub.sat.v8i16(<8 x i16> %{{x|_3}}, <8 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i16x16 #[no_mangle] pub unsafe fn ssub_i16x16(x: i16x16, y: i16x16) -> i16x16 { // CHECK: %{{[0-9]+}} = call <16 x i16> @llvm.ssub.sat.v16i16(<16 x i16> %{{x|_3}}, <16 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i16x32 #[no_mangle] pub unsafe fn ssub_i16x32(x: i16x32, y: i16x32) -> i16x32 { // CHECK: %{{[0-9]+}} = call <32 x i16> @llvm.ssub.sat.v32i16(<32 x i16> %{{x|_3}}, <32 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i32x2 #[no_mangle] pub unsafe fn ssub_i32x2(x: i32x2, y: i32x2) -> i32x2 { // CHECK: %{{[0-9]+}} = call <2 x i32> @llvm.ssub.sat.v2i32(<2 x i32> %{{x|_3}}, <2 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i32x4 #[no_mangle] pub unsafe fn ssub_i32x4(x: i32x4, y: i32x4) -> i32x4 { // CHECK: %{{[0-9]+}} = call <4 x i32> @llvm.ssub.sat.v4i32(<4 x i32> %{{x|_3}}, <4 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i32x8 #[no_mangle] pub unsafe fn ssub_i32x8(x: i32x8, y: i32x8) -> i32x8 { // CHECK: %{{[0-9]+}} = call <8 x i32> @llvm.ssub.sat.v8i32(<8 x i32> %{{x|_3}}, <8 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i32x16 #[no_mangle] pub unsafe fn ssub_i32x16(x: i32x16, y: i32x16) -> i32x16 { // CHECK: %{{[0-9]+}} = call <16 x i32> @llvm.ssub.sat.v16i32(<16 x i32> %{{x|_3}}, <16 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i64x2 #[no_mangle] pub unsafe fn ssub_i64x2(x: i64x2, y: i64x2) -> i64x2 { // CHECK: %{{[0-9]+}} = call <2 x i64> @llvm.ssub.sat.v2i64(<2 x i64> %{{x|_3}}, <2 x i64> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i64x4 #[no_mangle] pub unsafe fn ssub_i64x4(x: i64x4, y: i64x4) -> i64x4 { // CHECK: %{{[0-9]+}} = call <4 x i64> @llvm.ssub.sat.v4i64(<4 x i64> %{{x|_3}}, <4 x i64> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i64x8 #[no_mangle] pub unsafe fn ssub_i64x8(x: i64x8, y: i64x8) -> i64x8 { // CHECK: %{{[0-9]+}} = call <8 x i64> @llvm.ssub.sat.v8i64(<8 x i64> %{{x|_3}}, <8 x i64> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i128x2 #[no_mangle] pub unsafe fn ssub_i128x2(x: i128x2, y: i128x2) -> i128x2 { // CHECK: %{{[0-9]+}} = call <2 x i128> @llvm.ssub.sat.v2i128(<2 x i128> %{{x|_3}}, <2 x i128> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @ssub_i128x4 #[no_mangle] pub unsafe fn ssub_i128x4(x: i128x4, y: i128x4) -> i128x4 { // CHECK: %{{[0-9]+}} = call <4 x i128> @llvm.ssub.sat.v4i128(<4 x i128> %{{x|_3}}, <4 x i128> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u8x2 #[no_mangle] pub unsafe fn usub_u8x2(x: u8x2, y: u8x2) -> u8x2 { // CHECK: %{{[0-9]+}} = call <2 x i8> @llvm.usub.sat.v2i8(<2 x i8> %{{x|_3}}, <2 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u8x4 #[no_mangle] pub unsafe fn usub_u8x4(x: u8x4, y: u8x4) -> u8x4 { // CHECK: %{{[0-9]+}} = call <4 x i8> @llvm.usub.sat.v4i8(<4 x i8> %{{x|_3}}, <4 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u8x8 #[no_mangle] pub unsafe fn usub_u8x8(x: u8x8, y: u8x8) -> u8x8 { // CHECK: %{{[0-9]+}} = call <8 x i8> @llvm.usub.sat.v8i8(<8 x i8> %{{x|_3}}, <8 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u8x16 #[no_mangle] pub unsafe fn usub_u8x16(x: u8x16, y: u8x16) -> u8x16 { // CHECK: %{{[0-9]+}} = call <16 x i8> @llvm.usub.sat.v16i8(<16 x i8> %{{x|_3}}, <16 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u8x32 #[no_mangle] pub unsafe fn usub_u8x32(x: u8x32, y: u8x32) -> u8x32 { // CHECK: %{{[0-9]+}} = call <32 x i8> @llvm.usub.sat.v32i8(<32 x i8> %{{x|_3}}, <32 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u8x64 #[no_mangle] pub unsafe fn usub_u8x64(x: u8x64, y: u8x64) -> u8x64 { // CHECK: %{{[0-9]+}} = call <64 x i8> @llvm.usub.sat.v64i8(<64 x i8> %{{x|_3}}, <64 x i8> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u16x2 #[no_mangle] pub unsafe fn usub_u16x2(x: u16x2, y: u16x2) -> u16x2 { // CHECK: %{{[0-9]+}} = call <2 x i16> @llvm.usub.sat.v2i16(<2 x i16> %{{x|_3}}, <2 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u16x4 #[no_mangle] pub unsafe fn usub_u16x4(x: u16x4, y: u16x4) -> u16x4 { // CHECK: %{{[0-9]+}} = call <4 x i16> @llvm.usub.sat.v4i16(<4 x i16> %{{x|_3}}, <4 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u16x8 #[no_mangle] pub unsafe fn usub_u16x8(x: u16x8, y: u16x8) -> u16x8 { // CHECK: %{{[0-9]+}} = call <8 x i16> @llvm.usub.sat.v8i16(<8 x i16> %{{x|_3}}, <8 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } <|fim▁hole|>// CHECK-LABEL: @usub_u16x16 #[no_mangle] pub unsafe fn usub_u16x16(x: u16x16, y: u16x16) -> u16x16 { // CHECK: %{{[0-9]+}} = call <16 x i16> @llvm.usub.sat.v16i16(<16 x i16> %{{x|_3}}, <16 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u16x32 #[no_mangle] pub unsafe fn usub_u16x32(x: u16x32, y: u16x32) -> u16x32 { // CHECK: %{{[0-9]+}} = call <32 x i16> @llvm.usub.sat.v32i16(<32 x i16> %{{x|_3}}, <32 x i16> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u32x2 #[no_mangle] pub unsafe fn usub_u32x2(x: u32x2, y: u32x2) -> u32x2 { // CHECK: %{{[0-9]+}} = call <2 x i32> @llvm.usub.sat.v2i32(<2 x i32> %{{x|_3}}, <2 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u32x4 #[no_mangle] pub unsafe fn usub_u32x4(x: u32x4, y: u32x4) -> u32x4 { // CHECK: %{{[0-9]+}} = call <4 x i32> @llvm.usub.sat.v4i32(<4 x i32> %{{x|_3}}, <4 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u32x8 #[no_mangle] pub unsafe fn usub_u32x8(x: u32x8, y: u32x8) -> u32x8 { // CHECK: %{{[0-9]+}} = call <8 x i32> @llvm.usub.sat.v8i32(<8 x i32> %{{x|_3}}, <8 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u32x16 #[no_mangle] pub unsafe fn usub_u32x16(x: u32x16, y: u32x16) -> u32x16 { // CHECK: %{{[0-9]+}} = call <16 x i32> @llvm.usub.sat.v16i32(<16 x i32> %{{x|_3}}, <16 x i32> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u64x2 #[no_mangle] pub unsafe fn usub_u64x2(x: u64x2, y: u64x2) -> u64x2 { // CHECK: %{{[0-9]+}} = call <2 x i64> @llvm.usub.sat.v2i64(<2 x i64> %{{x|_3}}, <2 x i64> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u64x4 #[no_mangle] pub unsafe fn usub_u64x4(x: u64x4, y: u64x4) -> u64x4 { // CHECK: %{{[0-9]+}} = call <4 x i64> @llvm.usub.sat.v4i64(<4 x i64> %{{x|_3}}, <4 x i64> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u64x8 #[no_mangle] pub unsafe fn usub_u64x8(x: u64x8, y: u64x8) -> u64x8 { // CHECK: %{{[0-9]+}} = call <8 x i64> @llvm.usub.sat.v8i64(<8 x i64> %{{x|_3}}, <8 x i64> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u128x2 #[no_mangle] pub unsafe fn usub_u128x2(x: u128x2, y: u128x2) -> u128x2 { // CHECK: %{{[0-9]+}} = call <2 x i128> @llvm.usub.sat.v2i128(<2 x i128> %{{x|_3}}, <2 x i128> %{{y|_4}}) simd_saturating_sub(x, y) } // CHECK-LABEL: @usub_u128x4 #[no_mangle] pub unsafe fn usub_u128x4(x: u128x4, y: u128x4) -> u128x4 { // CHECK: %{{[0-9]+}} = call <4 x i128> @llvm.usub.sat.v4i128(<4 x i128> %{{x|_3}}, <4 x i128> %{{y|_4}}) simd_saturating_sub(x, y) }<|fim▁end|>
<|file_name|>UPnPDocParser.java<|end_file_name|><|fim▁begin|>/** * * Copyright (C) 2004-2008 FhG Fokus * * This file is part of the FhG Fokus UPnP stack - an open source UPnP implementation * with some additional features * * You can redistribute the FhG Fokus UPnP stack and/or modify it * under the terms of the GNU General Public License Version 3 as published by * the Free Software Foundation. * * For a license to use the FhG Fokus UPnP stack software under conditions * other than those described here, or to purchase support for this * software, please contact Fraunhofer FOKUS by e-mail at the following * addresses: * [email protected] * * The FhG Fokus UPnP stack is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License<|fim▁hole|>* */ package de.fraunhofer.fokus.upnp.core; import java.util.Hashtable; import java.util.Vector; import org.xml.sax.Attributes; import org.xml.sax.SAXException; import de.fraunhofer.fokus.upnp.util.SAXTemplateHandler; /** * This class is used to parse UPnPDoc messages. * * @author Alexander Koenig * * */ public class UPnPDocParser extends SAXTemplateHandler { /** Doc entries for the current service type */ private Vector currentDocEntryList = new Vector(); private boolean isAction = false; private boolean isStateVariable = false; private String currentServiceType = null; private String currentArgumentName = null; private String currentArgumentDescription = null; private UPnPDocEntry currentDocEntry = null; /** Hashtable containing the UPnP doc entry list for one service type */ private Hashtable docEntryFromServiceTypeTable = new Hashtable(); /* * (non-Javadoc) * * @see de.fraunhofer.fokus.upnp.util.SAXTemplateHandler#processStartElement(java.lang.String, * java.lang.String, java.lang.String, org.xml.sax.Attributes) */ public void processStartElement(String uri, String name, String name2, Attributes atts) throws SAXException { if (getTagCount() == 2) { for (int i = 0; i < atts.getLength(); i++) { if (atts.getQName(i).equalsIgnoreCase("serviceType")) { currentServiceType = atts.getValue(i); currentDocEntryList = new Vector(); } } } if (getTagCount() == 3 && currentServiceType != null) { isAction = getCurrentTag().equalsIgnoreCase("actionList"); isStateVariable = getCurrentTag().equalsIgnoreCase("serviceStateTable"); } if (getTagCount() == 4 && currentServiceType != null) { currentDocEntry = new UPnPDocEntry(currentServiceType); } } /* * (non-Javadoc) * * @see de.fraunhofer.fokus.upnp.util.SAXTemplateHandler#processEndElement(java.lang.String, * java.lang.String, java.lang.String) */ public void processEndElement(String uri, String localName, String name) throws SAXException { if (getTagCount() == 6 && isAction && currentDocEntry != null && currentArgumentName != null && currentArgumentDescription != null) { currentDocEntry.addArgumentDescription(currentArgumentName, currentArgumentDescription); currentArgumentName = null; currentArgumentDescription = null; } if (getTagCount() == 4) { if (currentDocEntry != null && currentDocEntry.getActionName() != null && isAction) { // TemplateService.printMessage(" Add doc entry for action " + // currentDocEntry.getActionName()); currentDocEntryList.add(currentDocEntry); } if (currentDocEntry != null && currentDocEntry.getStateVariableName() != null && isStateVariable) { // TemplateService.printMessage(" Add doc entry for state variable " + // currentDocEntry.getStateVariableName()); currentDocEntryList.add(currentDocEntry); } currentDocEntry = null; } if (getTagCount() == 3) { isAction = false; isStateVariable = false; } if (getTagCount() == 2) { // store list with doc entries for one service type docEntryFromServiceTypeTable.put(currentServiceType, currentDocEntryList); currentServiceType = null; currentDocEntryList = null; } } /* * (non-Javadoc) * * @see de.fraunhofer.fokus.upnp.util.SAXTemplateHandler#processContentElement(java.lang.String) */ public void processContentElement(String content) throws SAXException { if (getTagCount() == 5 && currentDocEntry != null) { if (getCurrentTag().equalsIgnoreCase("name") && isAction) { currentDocEntry.setActionName(content.trim()); } if (getCurrentTag().equalsIgnoreCase("name") && isStateVariable) { currentDocEntry.setStateVariableName(content.trim()); } if (getCurrentTag().equalsIgnoreCase("description")) { currentDocEntry.setDescription(content.trim()); } } if (getTagCount() == 7 && currentDocEntry != null) { if (getCurrentTag().equalsIgnoreCase("name")) { currentArgumentName = content.trim(); } if (getCurrentTag().equalsIgnoreCase("description")) { currentArgumentDescription = content.trim(); } } } /** * Retrieves the upnpDocEntryTable. * * @return The upnpDocEntryTable */ public Hashtable getDocEntryFormServiceTypeTable() { return docEntryFromServiceTypeTable; } }<|fim▁end|>
* along with this program; if not, see <http://www.gnu.org/licenses/> * or write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
<|file_name|>parser.rs<|end_file_name|><|fim▁begin|>use crate::plugins; use comrak::{ format_html, nodes::{AstNode, NodeHtmlBlock, NodeValue}, parse_document, ComrakExtensionOptions, ComrakOptions, ComrakParseOptions, ComrakRenderOptions, }; use std::string::FromUtf8Error; use std::{ collections::BTreeMap, sync::{Arc, Mutex}, }; use syntect::{ highlighting::{Color, ThemeSet}, html::highlighted_html_for_string, parsing::SyntaxSet, }; use thiserror::Error; use typed_arena::Arena; use url_path::UrlPath; #[derive(Error, Debug)] pub enum ParseError { #[error("Utf8Error: `{0}`")] Utf8Error(#[from] FromUtf8Error), #[error("Error getting lock from embed file")] EmbedFileLockError, #[error("Error parsing md file")] MdParseError, } #[derive(Debug)] pub struct Html { pub title: Option<String>, pub content: String, } pub struct Settings { /// add a base directory for all links to other md files base_dir: Option<String>, } impl Default for Settings { fn default() -> Self { Settings { base_dir: None } } } pub fn parse(arg: &str) -> Result<Html, ParseError> { let referred_files = pre_parse_get_embedded_files(arg); let embed_files = if let Ok(referred_files) = referred_files { let file_contents = plugins::fetch_file_contents(referred_files); Some(file_contents) } else { None }; parse_with_settings(arg, &embed_files, &Settings::default()) } pub fn parse_with_base_dir( arg: &str, base_dir: &str, embed_files: &Option<BTreeMap<String, Vec<u8>>>, ) -> Result<Html, ParseError> { let settings = Settings { base_dir: Some(base_dir.to_string()), ..Default::default() }; parse_with_settings(arg, &embed_files, &settings) } pub fn parse_with_settings( arg: &str, embed_files: &Option<BTreeMap<String, Vec<u8>>>, settings: &Settings, ) -> Result<Html, ParseError> { let html = parse_via_comrak(arg, &embed_files, settings); html } fn get_comrak_options() -> ComrakOptions { ComrakOptions { extension: ComrakExtensionOptions { strikethrough: true, tagfilter: false, table: true, autolink: true, tasklist: true, superscript: false, header_ids: None, footnotes: true, description_lists: true, }, parse: ComrakParseOptions { smart: false, default_info_string: None, }, render: ComrakRenderOptions { hardbreaks: true, github_pre_lang: true, width: 0, unsafe_: true, escape: false, }, } } fn iter_nodes<'a, F>( node: &'a AstNode<'a>, is_heading: Arc<Mutex<bool>>, title: Arc<Mutex<Option<String>>>, f: &F, ) -> Result<(), ParseError> where F: Fn(&'a AstNode<'a>) -> Result<(), ParseError>, { f(node)?; for c in node.children() { iter_nodes(c, is_heading.clone(), title.clone(), f)?; } Ok(()) } fn pre_iter_nodes<'a, F>( node: &'a AstNode<'a>, files: Arc<Mutex<Vec<String>>>, f: &F, ) -> Result<(), ParseError> where F: Fn(&'a AstNode<'a>) -> Result<(), ParseError>, { f(node)?; for c in node.children() { pre_iter_nodes(c, files.clone(), f)?; } Ok(()) } /// /// Extract the embeded files in img image and make it as a lookup pub fn pre_parse_get_embedded_files( arg: &str, ) -> Result<Vec<String>, ParseError> { // The returned nodes are created in the supplied Arena, and are bound by its lifetime. let arena = Arena::new(); let option = get_comrak_options(); let root = parse_document(&arena, arg, &option); let embed_files: Arc<Mutex<Vec<String>>> = Arc::new(Mutex::new(vec![])); pre_iter_nodes(root, embed_files.clone(), &|node| { let ref mut value = node.data.borrow_mut().value; let new_value = match value { &mut NodeValue::Image(ref link) => { let link_url = String::from_utf8(link.url.clone())?; if let Ok(mut embed_files) = embed_files.lock() { embed_files.push(link_url); } value.clone() } _ => value.clone(), }; *value = new_value; Ok(()) })?; let embedded = match embed_files.lock() { Ok(files) => Ok((*files).to_owned()), Err(_e) => Err(ParseError::EmbedFileLockError), }; embedded } fn parse_via_comrak( arg: &str, embed_files: &Option<BTreeMap<String, Vec<u8>>>, settings: &Settings, ) -> Result<Html, ParseError> { // The returned nodes are created in the supplied Arena, and are bound by its lifetime. let arena = Arena::new(); let option = get_comrak_options(); let title: Arc<Mutex<Option<String>>> = Arc::new(Mutex::new(None)); let is_heading: Arc<Mutex<bool>> = Arc::new(Mutex::new(false)); let root = parse_document(&arena, arg, &option); iter_nodes(root, is_heading.clone(), title.clone(), &|node| { let ref mut value = node.data.borrow_mut().value; let new_value = match value { &mut NodeValue::CodeBlock(ref codeblock) => { let codeblock_info = String::from_utf8(codeblock.info.to_owned()) .expect("error converting to string"); let codeblock_literal = String::from_utf8(codeblock.literal.to_owned()) .expect("error converting to string"); if let Ok(out) = plugins::plugin_executor( &codeblock_info, &codeblock_literal, ) { NodeValue::HtmlBlock(NodeHtmlBlock { literal: out.into_bytes(), block_type: 0, }) } else if let Some(code_block_html) = format_source_code(&codeblock_info, &codeblock_literal) { code_block_html } else { value.clone() } } &mut NodeValue::Link(ref nodelink) => { if let Ok(url) = String::from_utf8(nodelink.url.clone()) { if let Some(ref base_dir) = settings.base_dir { let url1 = UrlPath::new(&url); let url2 = url1.normalize(); let url3 = if url1.is_external() { url2 } else if url1.is_absolute() { url2 } else { format!("{}/{}", base_dir, url) }; let url4 = UrlPath::new(&url3); let url5 = url4.normalize(); let url6 = if url4.is_external() && !url4.is_extension("md") { // leave as it url5 } else { format!("/#{}", url5) }; log::info!("url6: {}", url6); let mut new_nodelink = nodelink.clone(); new_nodelink.url = url6.into_bytes(); NodeValue::Link(new_nodelink) } else { value.clone() } } else { value.clone() } } &mut NodeValue::Heading(ref heading) => { if heading.level == 1 { if let Ok(mut is_heading) = is_heading.lock() { *is_heading = true; } } value.clone() } &mut NodeValue::Text(ref text) => { if let Ok(is_heading) = is_heading.lock() { if *is_heading { let txt = String::from_utf8(text.to_owned()) .expect("Unable to convert to string"); if let Ok(mut title) = title.lock() { if title.is_none() { // only when unset *title = Some(txt.to_string());<|fim▁hole|> } value.clone() } &mut NodeValue::Image(ref link) => { let link_url = String::from_utf8(link.url.clone()) .expect("unable to convert to string"); match plugins::embed_handler(&link_url, embed_files) { Ok(html) => NodeValue::HtmlBlock(NodeHtmlBlock { literal: html.into_bytes(), block_type: 0, }), Err(e) => { log::error!("error: {:#?}", e); value.clone() } } } _ => value.clone(), }; *value = new_value; Ok(()) })?; let mut html = vec![]; if let Ok(()) = format_html(root, &option, &mut html) { let render_html = String::from_utf8(html)?; let title = if let Ok(got) = title.lock() { if let Some(ref got) = *got { Some(got.to_string()) } else { None } } else { None }; Ok(Html { title, content: render_html, }) } else { Err(ParseError::MdParseError) } } fn format_source_code(lang: &str, literal: &str) -> Option<NodeValue> { let lang_name = match lang { "rust" => "Rust", _ => "text", }; let ss = SyntaxSet::load_defaults_newlines(); let ts = ThemeSet::load_defaults(); let theme = &ts.themes["base16-ocean.light"]; let _c = theme.settings.background.unwrap_or(Color::WHITE); if let Some(syntax) = ss.find_syntax_by_name(lang_name) { let html = highlighted_html_for_string(literal, &ss, &syntax, theme); Some(NodeValue::HtmlBlock(NodeHtmlBlock { literal: html.into_bytes(), block_type: 0, })) } else { None } }<|fim▁end|>
} } }
<|file_name|>NetworkTypes.py<|end_file_name|><|fim▁begin|># # Functions for interacting with the network_types table in the database # # Mark Huang <[email protected]> # Copyright (C) 2006 The Trustees of Princeton University # from PLC.Faults import * from PLC.Parameter import Parameter from PLC.Table import Row, Table class NetworkType(Row): """ Representation of a row in the network_types table. To use, instantiate with a dict of values. """ table_name = 'network_types' primary_key = 'type' join_tables = ['interfaces'] fields = { 'type': Parameter(str, "Network type", max = 20), } def validate_type(self, name): # Make sure name is not blank if not len(name): raise PLCInvalidArgument("Network type must be specified") # Make sure network type does not alredy exist conflicts = NetworkTypes(self.api, [name])<|fim▁hole|> raise PLCInvalidArgument("Network type name already in use") return name class NetworkTypes(Table): """ Representation of the network_types table in the database. """ def __init__(self, api, types = None): Table.__init__(self, api, NetworkType) sql = "SELECT %s FROM network_types" % \ ", ".join(NetworkType.fields) if types: sql += " WHERE type IN (%s)" % ", ".join( [ api.db.quote (t) for t in types ] ) self.selectall(sql)<|fim▁end|>
if conflicts:
<|file_name|>SessionPlots.hpp<|end_file_name|><|fim▁begin|>/* * SessionPlots.hpp * * Copyright (C) 2009-12 by RStudio, Inc. * * Unless you have received this program directly from RStudio pursuant * to the terms of a commercial license agreement with RStudio, then * this program is licensed to you under the terms of version 3 of the * GNU Affero General Public License. This program is distributed WITHOUT * ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT, * MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the * AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details. * */ #ifndef SESSION_PLOTS_HPP #define SESSION_PLOTS_HPP namespace rstudio { namespace core { class Error;<|fim▁hole|> namespace rstudio { namespace session { namespace modules { namespace plots { bool haveCairoPdf(); core::Error initialize(); } // namespace plots } // namespace modules } // namespace session } // namespace rstudio #endif // SESSION_PLOTS_HPP<|fim▁end|>
} }
<|file_name|>Canonicalizer20010315ExclusiveTest.java<|end_file_name|><|fim▁begin|>/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.xml.security.test.stax.c14n; import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_Excl; import org.junit.Before; import org.apache.xml.security.stax.ext.stax.XMLSecEvent; import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_ExclOmitCommentsTransformer; import org.apache.xml.security.stax.impl.transformer.canonicalizer.Canonicalizer20010315_ExclWithCommentsTransformer; import org.apache.xml.security.test.stax.utils.XMLSecEventAllocator; import org.apache.xml.security.utils.XMLUtils; import org.w3c.dom.Document; import org.w3c.dom.Element; import javax.xml.namespace.QName; import javax.xml.stream.XMLEventReader; import javax.xml.stream.XMLInputFactory; import javax.xml.stream.XMLStreamException; import javax.xml.transform.Transformer; import javax.xml.transform.TransformerFactory; import javax.xml.transform.dom.DOMSource; import javax.xml.transform.stream.StreamResult; import java.io.*; import java.net.URL; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * @author $Author: coheigea $ * @version $Revision: 1721336 $ $Date: 2015-12-22 10:45:18 +0000 (Tue, 22 Dec 2015) $ */ public class Canonicalizer20010315ExclusiveTest extends org.junit.Assert { private XMLInputFactory xmlInputFactory; @Before public void setUp() throws Exception { this.xmlInputFactory = XMLInputFactory.newInstance(); this.xmlInputFactory.setEventAllocator(new XMLSecEventAllocator()); } @org.junit.Test public void test221excl() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer(); c.setOutputStream(baos); XMLEventReader xmlSecEventReader = xmlInputFactory.createXMLEventReader( this.getClass().getClassLoader().getResourceAsStream( "org/apache/xml/security/c14n/inExcl/example2_2_1.xml") ); XMLSecEvent xmlSecEvent = null; while (xmlSecEventReader.hasNext()) { xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent(); if (xmlSecEvent.isStartElement() && xmlSecEvent.asStartElement().getName().equals(new QName("http://example.net", "elem2"))) { break; } } while (xmlSecEventReader.hasNext()) { c.transform(xmlSecEvent); if (xmlSecEvent.isEndElement() && xmlSecEvent.asEndElement().getName().equals(new QName("http://example.net", "elem2"))) { break; } xmlSecEvent = (XMLSecEvent) xmlSecEventReader.nextEvent(); } byte[] reference = getBytesFromResource(this.getClass().getClassLoader().getResource( "org/apache/xml/security/c14n/inExcl/example2_2_c14nized_exclusive.xml")); boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray()); if (!equals) { System.out.println("Expected:\n" + new String(reference, "UTF-8")); System.out.println(""); System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8")); } assertTrue(equals); } @org.junit.Test public void test222excl() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer(); c.setOutputStream(baos); canonicalize(c, this.getClass().getClassLoader().getResourceAsStream( "org/apache/xml/security/c14n/inExcl/example2_2_2.xml"), new QName("http://example.net", "elem2") ); byte[] reference = getBytesFromResource(this.getClass().getClassLoader().getResource( "org/apache/xml/security/c14n/inExcl/example2_2_c14nized_exclusive.xml")); boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray()); if (!equals) { System.out.println("Expected:\n" + new String(reference, "UTF-8")); System.out.println(""); System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8")); } assertTrue(equals); } @org.junit.Test public void test24excl() throws Exception {<|fim▁hole|> ByteArrayOutputStream baos = new ByteArrayOutputStream(); Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer(); c.setOutputStream(baos); canonicalize(c, this.getClass().getClassLoader().getResourceAsStream( "org/apache/xml/security/c14n/inExcl/example2_4.xml"), new QName("http://example.net", "elem2") ); byte[] reference = getBytesFromResource(this.getClass().getClassLoader().getResource( "org/apache/xml/security/c14n/inExcl/example2_4_c14nized.xml")); boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray()); if (!equals) { System.out.println("Expected:\n" + new String(reference, "UTF-8")); System.out.println(""); System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8")); } assertTrue(equals); } @org.junit.Test public void testComplexDocexcl() throws Exception { ByteArrayOutputStream baos = new ByteArrayOutputStream(); Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer(); c.setOutputStream(baos); canonicalize(c, this.getClass().getClassLoader().getResourceAsStream( "org/apache/xml/security/c14n/inExcl/plain-soap-1.1.xml"), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body", "env") ); byte[] reference = getBytesFromResource(this.getClass().getClassLoader().getResource( "org/apache/xml/security/c14n/inExcl/plain-soap-c14nized.xml")); boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray()); if (!equals) { System.out.println("Expected:\n" + new String(reference, "UTF-8")); System.out.println(""); System.out.println("Got:\n" + new String(baos.toByteArray(), "UTF-8")); } assertTrue(equals); } @org.junit.Test public void testNodeSet() throws Exception { final String XML = "<env:Envelope" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<env:Body" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("env"); inclusiveNamespaces.add("ns0"); inclusiveNamespaces.add("xsi"); inclusiveNamespaces.add("wsu"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } /** * Method test24Aexcl - a testcase for SANTUARIO-263 * "Canonicalizer can't handle dynamical created DOM correctly" * https://issues.apache.org/jira/browse/SANTUARIO-263 */ @org.junit.Test public void test24Aexcl() throws Exception { Document doc = XMLUtils.createDocumentBuilder(false).newDocument(); Element local = doc.createElementNS("foo:bar", "dsig:local"); Element test = doc.createElementNS("http://example.net", "etsi:test"); Element elem2 = doc.createElementNS("http://example.net", "etsi:elem2"); Element stuff = doc.createElementNS("foo:bar", "dsig:stuff"); elem2.appendChild(stuff); test.appendChild(elem2); local.appendChild(test); doc.appendChild(local); TransformerFactory tf = TransformerFactory.newInstance(); Transformer t = tf.newTransformer(); StringWriter stringWriter = new StringWriter(); StreamResult streamResult = new StreamResult(stringWriter); t.transform(new DOMSource(doc), streamResult); ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); Canonicalizer20010315_ExclWithCommentsTransformer c = new Canonicalizer20010315_ExclWithCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(stringWriter.toString()), new QName("http://example.net", "elem2")); byte[] reference = getBytesFromResource(this.getClass().getClassLoader().getResource( "org/apache/xml/security/c14n/inExcl/example2_4_c14nized.xml")); boolean equals = java.security.MessageDigest.isEqual(reference, baos.toByteArray()); assertTrue(equals); } /** * Test default namespace behavior if its in the InclusiveNamespace prefix list. * * @throws Exception */ @org.junit.Test public void testDefaultNSInInclusiveNamespacePrefixList1() throws Exception { final String XML = "<env:Envelope" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<env:Body" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; { ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } { //exactly the same outcome is expected if #default is not set: ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } } /** * Test default namespace behavior if its in the InclusiveNamespace prefix list. * * @throws Exception */ @org.junit.Test public void testDefaultNSInInclusiveNamespacePrefixList2() throws Exception { final String XML = "<env:Envelope" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns=\"http://example.com\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML1 = "<env:Body" + " xmlns=\"http://example.com\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">" + "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; final String c14nXML2 = "<env:Body" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; { ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML1); } { ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML2); } } /** * Test default namespace behavior if its in the InclusiveNamespace prefix list. * * @throws Exception */ @org.junit.Test public void testDefaultNSInInclusiveNamespacePrefixList3() throws Exception { final String XML = "<env:Envelope" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns=\"\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<env:Body" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; { ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } { //exactly the same outcome is expected if #default is not set: ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } } /** * Test default namespace behavior if its in the InclusiveNamespace prefix list. * * @throws Exception */ @org.junit.Test public void testDefaultNSInInclusiveNamespacePrefixList4() throws Exception { final String XML = "<env:Envelope" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<env:Body" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; { ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } { //exactly the same outcome is expected if #default is not set: ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("xsi"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } } /** * Test default namespace behavior if its in the InclusiveNamespace prefix list. * * @throws Exception */ @org.junit.Test public void testPropagateDefaultNs1() throws Exception { final String XML = "<env:Envelope" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<env:Body" + " xmlns=\"\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } @org.junit.Test public void testPropagateDefaultNs2() throws Exception { final String XML = "<env:Envelope" + " xmlns=\"http://example.com\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<env:Body" + " xmlns=\"http://example.com\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } @org.junit.Test public void testPropagateDefaultNs3() throws Exception { final String XML = "<Envelope" + " xmlns=\"http://example.com\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</Envelope>"; final String c14nXML = "<env:Body" + " xmlns=\"http://example.com\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">" + "<ns0:text xmlns=\"\" xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } @org.junit.Test public void testPropagateDefaultNs4() throws Exception { final String XML = "<Envelope" + " xmlns=\"\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</Envelope>"; final String c14nXML = "<env:Body" + " xmlns=\"\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\"" + " wsu:Id=\"body\">" + "<ns0:Ping xmlns:ns0=\"http://xmlsoap.org/Ping\" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>"; ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://schemas.xmlsoap.org/soap/envelope/", "Body")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } @org.junit.Test public void testPropagateDefaultNs5() throws Exception { final String XML = "<env:Envelope" + " xmlns=\"http://example.com\"" + " xmlns:env=\"http://schemas.xmlsoap.org/soap/envelope/\"" + " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema\"" + " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"" + " xmlns:ns0=\"http://xmlsoap.org/Ping\"" + " xmlns:wsu=\"http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd\">" + "<env:Body xmlns=\"\" wsu:Id=\"body\">" + "<ns0:Ping xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>" + "</env:Body>" + "</env:Envelope>"; final String c14nXML = "<ns0:Ping xmlns=\"\" xmlns:ns0=\"http://xmlsoap.org/Ping\" " + "xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" xsi:type=\"ns0:ping\">" + "<ns0:text xsi:type=\"xsd:string\">hello</ns0:text>" + "</ns0:Ping>"; ByteArrayOutputStream baos = new ByteArrayOutputStream(); List<String> inclusiveNamespaces = new ArrayList<String>(); inclusiveNamespaces.add("#default"); Canonicalizer20010315_ExclOmitCommentsTransformer c = new Canonicalizer20010315_ExclOmitCommentsTransformer(); Map<String, Object> transformerProperties = new HashMap<String, Object>(); transformerProperties.put(Canonicalizer20010315_Excl.INCLUSIVE_NAMESPACES_PREFIX_LIST, inclusiveNamespaces); transformerProperties.put(Canonicalizer20010315_Excl.PROPAGATE_DEFAULT_NAMESPACE, Boolean.TRUE); c.setProperties(transformerProperties); c.setOutputStream(baos); canonicalize(c, new StringReader(XML), new QName("http://xmlsoap.org/Ping", "Ping")); assertEquals(new String(baos.toByteArray(), "UTF-8"), c14nXML); } private void canonicalize( Canonicalizer20010315_Excl c, InputStream inputStream, QName elementName) throws XMLStreamException { canonicalize(c, xmlInputFactory.createXMLEventReader(inputStream), elementName); } private void canonicalize( Canonicalizer20010315_Excl c, Reader reader, QName elementName) throws XMLStreamException { canonicalize(c, xmlInputFactory.createXMLEventReader(reader), elementName); } private void canonicalize( Canonicalizer20010315_Excl c, XMLEventReader xmlEventReader, QName elementName) throws XMLStreamException { XMLSecEvent xmlSecEvent = null; while (xmlEventReader.hasNext()) { xmlSecEvent = (XMLSecEvent) xmlEventReader.nextEvent(); if (xmlSecEvent.isStartElement() && xmlSecEvent.asStartElement().getName().equals(elementName)) { break; } } while (xmlEventReader.hasNext()) { c.transform(xmlSecEvent); if (xmlSecEvent.isEndElement() && xmlSecEvent.asEndElement().getName().equals(elementName)) { break; } xmlSecEvent = (XMLSecEvent) xmlEventReader.nextEvent(); } } public static byte[] getBytesFromResource(URL resource) throws IOException { ByteArrayOutputStream baos = new ByteArrayOutputStream(); InputStream inputStream = resource.openStream(); try { byte buf[] = new byte[1024]; int len; while ((len = inputStream.read(buf)) > 0) { baos.write(buf, 0, len); } return baos.toByteArray(); } finally { inputStream.close(); } } }<|fim▁end|>
<|file_name|>uploader.py<|end_file_name|><|fim▁begin|># Copyright 2015-2016 Rackspace US, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import boto3 import logging from os import path LOG = logging.getLogger(__name__) MAX_PACKAGE_SIZE = 50000000 class PackageUploader(object): '''TODO: Should we decouple the config from the Object Init''' def __init__(self, config, profile_name): self._config = config self._vpc_config = self._format_vpc_config() self._aws_session = boto3.session.Session(region_name=config.region, profile_name=profile_name) self._lambda_client = self._aws_session.client('lambda') self.version = None ''' Calls the AWS methods to upload an existing package and update the function configuration returns the package version ''' def upload_existing(self, pkg): environment = {'Variables': self._config.variables} self._validate_package_size(pkg.zip_file) with open(pkg.zip_file, "rb") as fil: zip_file = fil.read() LOG.debug('running update_function_code') conf_update_resp = None if self._config.s3_bucket: self._upload_s3(pkg.zip_file) conf_update_resp = self._lambda_client.update_function_code( FunctionName=self._config.name, S3Bucket=self._config.s3_bucket, S3Key=self._config.s3_package_name(), Publish=False, ) else: conf_update_resp = self._lambda_client.update_function_code( FunctionName=self._config.name, ZipFile=zip_file, Publish=False, ) LOG.debug("AWS update_function_code response: %s" % conf_update_resp) LOG.debug('running update_function_configuration') response = self._lambda_client.update_function_configuration( FunctionName=self._config.name, Handler=self._config.handler, Role=self._config.role, Description=self._config.description, Timeout=self._config.timeout, MemorySize=self._config.memory, VpcConfig=self._vpc_config, Environment=environment, TracingConfig=self._config.tracing, Runtime=self._config.runtime, ) LOG.debug("AWS update_function_configuration response: %s" % response) version = response.get('Version') # Publish the version after upload and config update if needed if self._config.publish: resp = self._lambda_client.publish_version( FunctionName=self._config.name, ) LOG.debug("AWS publish_version response: %s" % resp) version = resp.get('Version') return version ''' Creates and uploads a new lambda function returns the package version ''' def upload_new(self, pkg): environment = {'Variables': self._config.variables} code = {} if self._config.s3_bucket: code = {'S3Bucket': self._config.s3_bucket, 'S3Key': self._config.s3_package_name()} self._upload_s3(pkg.zip_file) else: self._validate_package_size(pkg.zip_file) with open(pkg.zip_file, "rb") as fil: zip_file = fil.read() code = {'ZipFile': zip_file} LOG.debug('running create_function_code') response = self._lambda_client.create_function( FunctionName=self._config.name, Runtime=self._config.runtime, Handler=self._config.handler, Role=self._config.role, Code=code, Description=self._config.description, Timeout=self._config.timeout, MemorySize=self._config.memory, Publish=self._config.publish, VpcConfig=self._vpc_config, Environment=environment, TracingConfig=self._config.tracing, ) LOG.debug("AWS create_function response: %s" % response) return response.get('Version') ''' Auto determines whether the function exists or not and calls the appropriate method (upload_existing or upload_new). ''' def upload(self, pkg): existing_function = True try: get_resp = self._lambda_client.get_function_configuration( FunctionName=self._config.name) LOG.debug("AWS get_function_configuration response: %s" % get_resp) except: # noqa: E722 existing_function = False LOG.debug("function not found creating new function") if existing_function: self.version = self.upload_existing(pkg) else: self.version = self.upload_new(pkg) ''' Create/update an alias to point to the package. Raises an exception if the package has not been uploaded. ''' def alias(self): # if self.version is still None raise exception if self.version is None: raise Exception('Must upload package before applying alias') if self._alias_exists(): self._update_alias() else: self._create_alias() ''' Pulls down the current list of aliases and checks to see if an alias exists. ''' def _alias_exists(self): resp = self._lambda_client.list_aliases( FunctionName=self._config.name) for alias in resp.get('Aliases'): if alias.get('Name') == self._config.alias: return True return False '''Creates alias''' def _create_alias(self): LOG.debug("Creating new alias %s" % self._config.alias) resp = self._lambda_client.create_alias( FunctionName=self._config.name, Name=self._config.alias, FunctionVersion=self.version, Description=self._config.alias_description, ) LOG.debug("AWS create_alias response: %s" % resp) '''Update alias''' def _update_alias(self): LOG.debug("Updating alias %s" % self._config.alias) resp = self._lambda_client.update_alias( FunctionName=self._config.name, Name=self._config.alias, FunctionVersion=self.version, Description=self._config.alias_description, ) LOG.debug("AWS update_alias response: %s" % resp) def _validate_package_size(self, pkg): ''' Logs a warning if the package size is over the current max package size ''' if path.getsize(pkg) > MAX_PACKAGE_SIZE: LOG.warning("Size of your deployment package is larger than 50MB!") <|fim▁hole|> def _format_vpc_config(self): ''' Returns {} if the VPC config is set to None by Config, returns the formatted config otherwise ''' if self._config.raw['vpc']: return { 'SubnetIds': self._config.raw['vpc']['subnets'], 'SecurityGroupIds': self._config.raw['vpc']['security_groups'] } else: return { 'SubnetIds': [], 'SecurityGroupIds': [], } def _upload_s3(self, zip_file): ''' Uploads the lambda package to s3 ''' s3_client = self._aws_session.client('s3') transfer = boto3.s3.transfer.S3Transfer(s3_client) transfer.upload_file(zip_file, self._config.s3_bucket, self._config.s3_package_name())<|fim▁end|>
<|file_name|>drive_api_service.cc<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chrome/browser/chromeos/drive/drive_api_service.h" #include <string> #include <vector> #include "base/bind.h" #include "base/message_loop_proxy.h" #include "chrome/browser/google_apis/drive_api_operations.h" #include "chrome/browser/google_apis/operation_runner.h" #include "chrome/browser/google_apis/time_util.h" #include "chrome/browser/profiles/profile.h" #include "chrome/common/net/url_util.h" #include "content/public/browser/browser_thread.h" using content::BrowserThread; namespace drive { namespace { // OAuth2 scopes for Drive API. const char kDriveScope[] = "https://www.googleapis.com/auth/drive"; const char kDriveAppsReadonlyScope[] = "https://www.googleapis.com/auth/drive.apps.readonly"; } // namespace DriveAPIService::DriveAPIService(const std::string& custom_user_agent) : profile_(NULL), runner_(NULL), custom_user_agent_(custom_user_agent) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); } DriveAPIService::~DriveAPIService() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); if (runner_.get()) { runner_->operation_registry()->RemoveObserver(this); runner_->auth_service()->RemoveObserver(this); } } void DriveAPIService::Initialize(Profile* profile) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); profile_ = profile; std::vector<std::string> scopes; scopes.push_back(kDriveScope); scopes.push_back(kDriveAppsReadonlyScope); runner_.reset( new google_apis::OperationRunner(profile, scopes, custom_user_agent_)); runner_->Initialize(); runner_->auth_service()->AddObserver(this); runner_->operation_registry()->AddObserver(this); } void DriveAPIService::AddObserver(google_apis::DriveServiceObserver* observer) { observers_.AddObserver(observer); } void DriveAPIService::RemoveObserver( google_apis::DriveServiceObserver* observer) { observers_.RemoveObserver(observer); } bool DriveAPIService::CanStartOperation() const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); return HasRefreshToken(); } void DriveAPIService::CancelAll() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); runner_->CancelAll(); } bool DriveAPIService::CancelForFilePath(const FilePath& file_path) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); return operation_registry()->CancelForFilePath(file_path); } google_apis::OperationProgressStatusList DriveAPIService::GetProgressStatusList() const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); return operation_registry()->GetProgressStatusList(); } void DriveAPIService::GetDocuments( const GURL& url, int64 start_changestamp, const std::string& search_query, bool shared_with_me, const std::string& directory_resource_id, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); if (search_query.empty()) GetChangelist(url, start_changestamp, callback); else GetFilelist(url, search_query, callback); return; // TODO(kochi): Implement !directory_resource_id.empty() case. NOTREACHED(); } void DriveAPIService::GetFilelist( const GURL& url, const std::string& search_query, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); runner_->StartOperationWithRetry( new google_apis::GetFilelistOperation(operation_registry(), url, search_query, callback)); } void DriveAPIService::GetChangelist( const GURL& url, int64 start_changestamp, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); runner_->StartOperationWithRetry( new google_apis::GetChangelistOperation(operation_registry(), url, start_changestamp, callback)); } void DriveAPIService::GetDocumentEntry( const std::string& resource_id, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); runner_->StartOperationWithRetry(new google_apis::GetFileOperation( operation_registry(), resource_id, callback)); } void DriveAPIService::GetAccountMetadata( const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); runner_->StartOperationWithRetry( new google_apis::GetAboutOperation(operation_registry(), callback)); } void DriveAPIService::GetApplicationInfo( const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); runner_->StartOperationWithRetry( new google_apis::GetApplistOperation(operation_registry(), callback)); } void DriveAPIService::DownloadDocument( const FilePath& virtual_path, const FilePath& local_cache_path, const GURL& document_url, google_apis::DocumentExportFormat format, const google_apis::DownloadActionCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::DownloadFile( const FilePath& virtual_path, const FilePath& local_cache_path, const GURL& document_url, const google_apis::DownloadActionCallback& download_action_callback, const google_apis::GetContentCallback& get_content_callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::DeleteDocument( const GURL& document_url, const google_apis::EntryActionCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::AddNewDirectory( const GURL& parent_content_url, const FilePath::StringType& directory_name, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::CopyDocument( const std::string& resource_id, const FilePath::StringType& new_name, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::RenameResource( const GURL& resource_url, const FilePath::StringType& new_name, const google_apis::EntryActionCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::AddResourceToDirectory(<|fim▁hole|> DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::RemoveResourceFromDirectory( const GURL& parent_content_url, const GURL& resource_url, const std::string& resource_id, const google_apis::EntryActionCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::InitiateUpload( const google_apis::InitiateUploadParams& params, const google_apis::InitiateUploadCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::ResumeUpload( const google_apis::ResumeUploadParams& params, const google_apis::ResumeUploadCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } void DriveAPIService::AuthorizeApp( const GURL& resource_url, const std::string& app_ids, const google_apis::GetDataCallback& callback) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); // TODO(kochi): Implement this. NOTREACHED(); } bool DriveAPIService::HasAccessToken() const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); return runner_->auth_service()->HasAccessToken(); } bool DriveAPIService::HasRefreshToken() const { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); return runner_->auth_service()->HasRefreshToken(); } google_apis::OperationRegistry* DriveAPIService::operation_registry() const { return runner_->operation_registry(); } void DriveAPIService::OnOAuth2RefreshTokenChanged() { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); if (CanStartOperation()) { FOR_EACH_OBSERVER( google_apis::DriveServiceObserver, observers_, OnReadyToPerformOperations()); } } void DriveAPIService::OnProgressUpdate( const google_apis::OperationProgressStatusList& list) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); FOR_EACH_OBSERVER( google_apis::DriveServiceObserver, observers_, OnProgressUpdate(list)); } void DriveAPIService::OnAuthenticationFailed( google_apis::GDataErrorCode error) { DCHECK(BrowserThread::CurrentlyOn(BrowserThread::UI)); FOR_EACH_OBSERVER( google_apis::DriveServiceObserver, observers_, OnAuthenticationFailed(error)); } } // namespace drive<|fim▁end|>
const GURL& parent_content_url, const GURL& resource_url, const google_apis::EntryActionCallback& callback) {
<|file_name|>S10.2.2_A1.1_T3.js<|end_file_name|><|fim▁begin|>// Copyright 2009 the Sputnik authors. All rights reserved. // This code is governed by the BSD license found in the LICENSE file. /** * @name: S10.2.2_A1.1_T3; * @section: 10.2.2; * @assertion: The scope chain is initialised to contain the same objects, * in the same order, as the calling context's scope chain; * @description: eval within global execution context; */ var i; var j; str1 = ''; str2 = ''; this.x = 1; this.y = 2; for(i in this){<|fim▁hole|> if(!(str1 === str2)){ $ERROR("#1: scope chain must contain same objects in the same order as the calling context"); }<|fim▁end|>
str1+=i; } eval('for(j in this){\nstr2+=j;\n}');
<|file_name|>label_leaves_in_expr_with_numbered_intervals.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- from abjad.tools import scoretools from abjad.tools import scoretools from abjad.tools import markuptools from abjad.tools import scoretools from abjad.tools import pitchtools from abjad.tools.topleveltools import attach from abjad.tools.topleveltools import iterate def label_leaves_in_expr_with_numbered_intervals(expr, markup_direction=Up): r"""Label leaves in `expr` with numbered intervals: :: <|fim▁hole|> >>> staff = Staff(notes) >>> labeltools.label_leaves_in_expr_with_numbered_intervals(staff) .. doctest:: >>> print(format(staff)) \new Staff { c'8 ^ \markup { +25 } cs'''8 ^ \markup { -14 } b'8 ^ \markup { -15 } af8 ^ \markup { -10 } bf,8 ^ \markup { +1 } b,8 ^ \markup { +22 } a'8 ^ \markup { +1 } bf'8 ^ \markup { -4 } fs'8 ^ \markup { -1 } f'8 } :: >>> show(staff) # doctest: +SKIP Returns none. """ for note in iterate(expr).by_class(scoretools.Note): logical_voice_iterator = iterate(note).by_logical_voice_from_component( scoretools.Leaf, ) try: next(logical_voice_iterator) next_leaf = next(logical_voice_iterator) if isinstance(next_leaf, scoretools.Note): mci = pitchtools.NumberedInterval.from_pitch_carriers( note, next_leaf) markup = markuptools.Markup(mci, markup_direction) attach(markup, note) except StopIteration: pass<|fim▁end|>
>>> notes = scoretools.make_notes( ... [0, 25, 11, -4, -14, -13, 9, 10, 6, 5], ... [Duration(1, 8)], ... )
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>"""myproject URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/1.9/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: url(r'^$', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home') Including another URLconf 1. Add an import: from blog import urls as blog_urls 2. Import the include() function: from django.conf.urls import url, include 3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls)) """ from django.conf.urls import url, include from django.contrib import admin from django.conf import settings urlpatterns = [ url(r'^admin/', admin.site.urls), ] if settings.DEBUG:<|fim▁hole|> ]<|fim▁end|>
import debug_toolbar urlpatterns += [ url(r'^__debug__/', include(debug_toolbar.urls)),
<|file_name|>CreateSamples.py<|end_file_name|><|fim▁begin|>import os import unittest from __main__ import vtk, qt, ctk, slicer from slicer.ScriptedLoadableModule import * import logging # # CreateSamples # class CreateSamples(ScriptedLoadableModule): """Uses ScriptedLoadableModule base class, available at: https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py """ def __init__(self, parent): ScriptedLoadableModule.__init__(self, parent) self.parent.title = "CreateSamples" # TODO make this more human readable by adding spaces self.parent.categories = ["Examples"] self.parent.dependencies = [] self.parent.contributors = ["John Doe (AnyWare Corp.)"] # replace with "Firstname Lastname (Organization)" self.parent.helpText = """ This is an example of scripted loadable module bundled in an extension. It performs a simple thresholding on the input volume and optionally captures a screenshot. """ self.parent.acknowledgementText = """ This file was originally developed by Jean-Christophe Fillion-Robin, Kitware Inc. and Steve Pieper, Isomics, Inc. and was partially funded by NIH grant 3P41RR013218-12S1. """ # replace with organization, grant and thanks. # # CreateSamplesWidget # class CreateSamplesWidget(ScriptedLoadableModuleWidget): """Uses ScriptedLoadableModuleWidget base class, available at: https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py """ def setup(self): ScriptedLoadableModuleWidget.setup(self) # Instantiate and connect widgets ... generalParametersCollapsibleButton = ctk.ctkCollapsibleButton() generalParametersCollapsibleButton.text = "General parameters" self.layout.addWidget(generalParametersCollapsibleButton) # Layout within the dummy collapsible button hlayout = qt.QHBoxLayout(generalParametersCollapsibleButton) self.label=qt.QLabel("Volume Name:") hlayout.addWidget(self.label) self.volumeNameLine=qt.QLineEdit() hlayout.addWidget(self.volumeNameLine) self.volumeNameLine.connect('textChanged(QString)', self.onLabelChanged) # # Parameters Area # parametersCollapsibleButton = ctk.ctkCollapsibleButton() parametersCollapsibleButton.text = "Sample From Nothing" self.layout.addWidget(parametersCollapsibleButton) # Layout within the dummy collapsible button parametersFormLayout = qt.QFormLayout(parametersCollapsibleButton) # # Sample Label map Button # self.labelButton = qt.QPushButton("Create Sample Label Map") self.labelButton.toolTip = "Create sample label map." self.labelButton.enabled = True parametersFormLayout.addRow(self.labelButton) # # Sample Volume Button # self.volumeButton = qt.QPushButton("Create Sample Volume") self.volumeButton.toolTip = "Create sample volume." self.volumeButton.enabled = True parametersFormLayout.addRow(self.volumeButton) # # Sample model Button # self.modelButton = qt.QPushButton("Create Sample Model") self.modelButton.toolTip = "Create sample Model." self.modelButton.enabled = True parametersFormLayout.addRow(self.modelButton) # connections self.labelButton.connect('clicked(bool)', self.onLabelButton) self.volumeButton.connect('clicked(bool)', self.onVolumeButton) self.modelButton.connect('clicked(bool)', self.onModelButton) parametersCollapsibleButton2 = ctk.ctkCollapsibleButton() parametersCollapsibleButton2.text = "Sample From example" self.layout.addWidget(parametersCollapsibleButton2) # Layout within the dummy collapsible button parametersFormLayout = qt.QFormLayout(parametersCollapsibleButton2) # # input volume selector # self.inputSelector = slicer.qMRMLNodeComboBox() self.inputSelector.nodeTypes = ( ("vtkMRMLScalarVolumeNode"), "" ) # Keep the following line as an example #self.inputSelector.addAttribute( "vtkMRMLScalarVolumeNode", "LabelMap", 0 ) self.inputSelector.selectNodeUponCreation = True self.inputSelector.addEnabled = False self.inputSelector.removeEnabled = False self.inputSelector.noneEnabled = True self.inputSelector.showHidden = False self.inputSelector.showChildNodeTypes = False self.inputSelector.setMRMLScene( slicer.mrmlScene ) self.inputSelector.setToolTip( "reference image." ) parametersFormLayout.addRow("Reference Volume: ", self.inputSelector) self.inputSelector.connect("currentNodeChanged(vtkMRMLNode*)", self.onSampleFromReferenceSelect) # # Sample From reference Button # self.referenceButton = qt.QPushButton("Create Sample Model from a reference") self.referenceButton.toolTip = "Create sample Model from a reference." parametersFormLayout.addRow(self.referenceButton) self.referenceButton.connect('clicked(bool)', self.onReferenceButton) # Add vertical spacer self.layout.addStretch(1) # Refresh Apply button state self.onLabelChanged(self.volumeNameLine.text) def ButtonsClickable(self, value): self.labelButton.setEnabled(value) self.volumeButton.setEnabled(value) self.modelButton.setEnabled(value) self.onSampleFromReferenceSelect() def cleanup(self): pass def onLabelChanged(self,myString): if not myString=='': self.ButtonsClickable(True) else: self.ButtonsClickable(False) def onSampleFromReferenceSelect(self): self.referenceButton.enabled = self.inputSelector.currentNode() and self.volumeNameLine.text != '' def onLabelButton(self): logic = CreateSamplesLogic() logic.createVolume(self.volumeNameLine.text, labelmap=True) def onVolumeButton(self): logic = CreateSamplesLogic() logic.createVolume(self.volumeNameLine.text) def onModelButton(self): logic = CreateSamplesLogic() logic.createModel() def onReferenceButton(self): logic = CreateSamplesLogic() logic.createVolume(self.volumeNameLine.text, labelmap=True, reference=self.inputSelector.currentNode()) # # CreateSamplesLogic # class CreateSamplesLogic(ScriptedLoadableModuleLogic): """This class should implement all the actual computation done by your module. The interface should be such that other python code can import this class and make use of the functionality without requiring an instance of the Widget. Uses ScriptedLoadableModuleLogic base class, available at: https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py """ def setVolumeAsBackgroundImage(self, node): count = slicer.mrmlScene.GetNumberOfNodesByClass('vtkMRMLSliceCompositeNode') for n in xrange(count): compNode = slicer.mrmlScene.GetNthNodeByClass(n, 'vtkMRMLSliceCompositeNode') compNode.SetBackgroundVolumeID(node.GetID()) return True # Create sample labelmap with same geometry as input volume<|fim▁hole|> value = 1 sampleVolumeNode = slicer.vtkMRMLScalarVolumeNode() sampleVolumeNode = slicer.mrmlScene.AddNode(sampleVolumeNode) imageData = vtk.vtkImageData() if reference == None: mySpacing = (0.5,0.6,0.5) myOrigin = (20,50,50) # Do NOT set the spacing and the origin of imageData (vtkImageData) # The spacing and the origin should only be set in the vtkMRMLScalarVolumeNode!!!!!! imageData.SetDimensions(30,5,15) imageData.AllocateScalars(vtk.VTK_DOUBLE, 1) sampleVolumeNode.SetSpacing(mySpacing[0],mySpacing[1],mySpacing[2]) sampleVolumeNode.SetOrigin(myOrigin[0],myOrigin[1],myOrigin[2]) else: sampleVolumeNode.Copy(reference) imageData.DeepCopy(reference.GetImageData()) sampleVolumeNode.SetName(volumeName) sampleVolumeNode.SetAndObserveImageData(imageData) extent = imageData.GetExtent() for x in xrange(extent[0], extent[1]+1): for y in xrange(extent[2], extent[3]+1): for z in xrange(extent[4], extent[5]+1): if (x >= (extent[1]/4) and x <= (extent[1]/4) * 3) and (y >= (extent[3]/4) and y <= (extent[3]/4) * 3) and (z >= (extent[5]/4) and z <= (extent[5]/4) * 3): imageData.SetScalarComponentFromDouble(x,y,z,0,value) else: imageData.SetScalarComponentFromDouble(x,y,z,0,0) # Display labelmap if labelmap: sampleVolumeNode.SetLabelMap(1) labelmapVolumeDisplayNode = slicer.vtkMRMLLabelMapVolumeDisplayNode() slicer.mrmlScene.AddNode(labelmapVolumeDisplayNode) colorNode = slicer.util.getNode('GenericAnatomyColors') labelmapVolumeDisplayNode.SetAndObserveColorNodeID(colorNode.GetID()) labelmapVolumeDisplayNode.VisibilityOn() sampleVolumeNode.SetAndObserveDisplayNodeID(labelmapVolumeDisplayNode.GetID()) else: volumeDisplayNode = slicer.vtkMRMLScalarVolumeDisplayNode() slicer.mrmlScene.AddNode(volumeDisplayNode) colorNode = slicer.util.getNode('Grey') volumeDisplayNode.SetAndObserveColorNodeID(colorNode.GetID()) volumeDisplayNode.VisibilityOn() sampleVolumeNode.SetAndObserveDisplayNodeID(volumeDisplayNode.GetID()) self.setVolumeAsBackgroundImage(sampleVolumeNode) return True def createModel(self): print "model" class CreateSamplesTest(ScriptedLoadableModuleTest): """ This is the test case for your scripted module. Uses ScriptedLoadableModuleTest base class, available at: https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py """ def setUp(self): """ Do whatever is needed to reset the state - typically a scene clear will be enough. """ slicer.mrmlScene.Clear(0) def runTest(self): """Run as few or as many tests as needed here. """ self.setUp()<|fim▁end|>
def createVolume(self , volumeName, labelmap=False, reference=None): if volumeName == '': raise Exception('The name of the output volume cannot be empty')
<|file_name|>jquery-gstree.js<|end_file_name|><|fim▁begin|>// in development // getsimple tree expansion // written for editpages, but can probably be expanded to support any // list of elements with depth data in proper parent child order // var treeprefix = 'tree-'; var treeparentclass = treeprefix + 'parent'; // class for expander handles var treeindentclass = treeprefix + 'indent'; // class for expander handles var treeexpanderclass = treeprefix + 'expander'; // class for expander handles var treeexpandedclass = treeprefix + 'expander-expanded'; // class for expander when expanded var treecollapsedclass = treeprefix + 'expander-collapsed'; // class for expanded when collapsed var nodecollapsedclass = treeprefix + 'collapsed'; // class to control node visibility and data flag as collapsed var nodeparentcollapsedclass = treeprefix + 'parentcollapsed'; // class to control children visibility while retaining collapse data var depthprefix = 'depth-'; // class prefix for depth information var datadepthattr = 'depth'; // data attribute name for depth information function toggleRow(){ var row = $(this).closest('.'+treeparentclass); // Debugger.log("toggle row " + $(row)); var depth = getNodeDepth(row); if(depth < 0 ) return toggleTopAncestors(); // special handler to collapse all top level if($(row).hasClass(nodecollapsedclass)) expandRow(row); else collapseRow(row); $("table.striped").zebraStripe(); } function setExpander(elem){ var expander = $(elem).find('.'+treeexpanderclass); $(expander).toggleClass(treecollapsedclass,$(elem).hasClass(nodecollapsedclass)); $(expander).toggleClass(treeexpandedclass,!$(elem).hasClass(nodecollapsedclass)); } function collapseRow(elem){ $(elem).addClass(nodecollapsedclass); hideChildRows(elem); setExpander(elem); } function expandRow(elem){ $(elem).removeClass(nodecollapsedclass); showChildRows(elem); setExpander(elem); } function hideChildRows(elem){ var children = getChildrenByDepth(elem); children.each(function(i,elem){ hideChildRow(elem); }); } function hideChildRow(elem){ $(elem).addClass(nodeparentcollapsedclass); // $(elem).animate({opacity: 0.1} , 100, function(){ $(this).addClass(nodeparentcollapsedclass);} ); // @todo custom callout } // not using recursion or tree walking here, this is likely faster // obtains children by getting all siblings up until the first sibling of equal depth // retains collapse states on parents function showChildRows(elem){ var children = getChildrenByDepth(elem); var startDepth = getNodeDepth(elem); children.each(function(i,elem){ thisDepth = getNodeDepth(elem); immediateChild = thisDepth == (startDepth + 1); // if immediate child just show it if(immediateChild){ showChildRow(elem); return true; } // get actual parent of this child thisParent = getParentByDepth(elem); // if(!thisParent[0]) Debugger.log('parent not found'); parentCollapsed = $(thisParent).hasClass(nodecollapsedclass); parentHidden = $(thisParent).hasClass(nodeparentcollapsedclass); // Debugger.log(elem.id + ' | ' + $(thisParent).attr('id') + ' | ' + parentCollapsed + ' | ' + parentHidden); // show child only if parent is not hidden AND parent is not collapsed if(!parentHidden && !parentCollapsed){ showChildRow(elem); } }); } function showChildRow(elem){ $(elem).removeClass(nodeparentcollapsedclass); // $(elem).animate({opacity: 1}, 300); // todo: custom callout } function getNodeDepth(elem){ return parseInt($(elem).data(datadepthattr),10); } function getChildrenByDepth(elem){ // children are all nodes until nextsibling of equal OR LOWER depth var nextsibling = getNextSiblingByDepth(elem); var children = elem.nextUntil(nextsibling); return children; } /** * get the first previous parentclass with lower depth */ function getParentByDepth(elem){ var depth = getNodeDepth(elem) - 1; return $(elem).prevAll("."+treeparentclass+"[data-"+datadepthattr+"='" + (depth) + "']").first(); } /** * get the next parent of equal or less depth */ function getNextSiblingByDepth(elem){ var tagname = getTagName(elem); var depth = getNodeDepth(elem); var nextsiblings = elem.nextAll(tagname).filter(function(index){ return getNodeDepth(this) <= depth; }); return nextsiblings.first(); } function addExpanders(elems,expander){ if(expander === undefined) expander = '<span class="'+ treeexpanderclass + ' ' + treeexpandedclass +'"></span>'; $(elems).each(function(i,elem){ // Debugger.log($(elem)); $(elem).removeClass("tree-indent").removeClass("indent-last").html(''); // remove any indentation here, now an expander $(expander).on('click',toggleRow).bind('selectstart dragstart', function(evt) { evt.preventDefault(); return false; }).prependTo($(elem)); }); } function addIndents(elems){ $(elems).each(function(i,elem){ $('<span class="'+treeindentclass+'"></span>').prependTo($(elem)); }); } function toggleTopAncestors(){ // @todo possible optimizations<|fim▁hole|> // if(rootcollapsed) console.profile('expand all'); // else console.profile('collapse all'); // toggle label text var langstr = !rootcollapsed ? i18n('EXPAND_TOP') : i18n('COLLAPSE_TOP'); $('#roottoggle .label').html(langstr); // hide all depth 0 children, do not change collpase data var depth = 0; $("#editpages tr[data-depth='" + depth + "']").each(function(i,elem){ if(rootcollapsed) expandRow($(elem)); else collapseRow($(elem)); }); $("#roottoggle").toggleClass("rootcollapsed"); $('#roottoggle').toggleClass(nodecollapsedclass,!rootcollapsed); setExpander($('#roottoggle')); $("table.striped").zebraStripe(); // console.profileEnd(); } // add tree to editpages table function addExpanderTableHeader(elem,expander,colspan){ // Debugger.log($(elem)); var rootcollapsed = $("#roottoggle").hasClass("rootcollapsed"); var langstr = rootcollapsed ? i18n('EXPAND_TOP') : i18n('COLLAPSE_TOP'); $('<tr id="roottoggle" class="tree-roottoggle nohighlight" data-depth="-1"><td colspan="'+colspan+'">'+expander+'<span class="label">'+ langstr +'</span></td></tr>').insertAfter(elem); // init expander $('#roottoggle').toggleClass("collapsed",rootcollapsed); setExpander($('#roottoggle')); $('#roottoggle .'+treeexpanderclass).on('click',toggleTopAncestors).bind('selectstart dragstart', function(evt) { evt.preventDefault(); return false; }); $('#roottoggle .label').on('click',toggleTopAncestors).bind('selectstart dragstart', function(evt) { evt.preventDefault(); return false; }); } $.fn.zebraStripe = function(){ $("tbody tr:not(.tree-parentcollapsed)",$(this)).each(function(i,elem){ if(i%2!=1) $(elem).addClass('odd').removeClass('even'); else $(elem).addClass('even').removeClass('odd'); }); }; /** * addTabletree * * add gstree to tree ready table with data-depths and parent,indent classes * * @param int minrows minumum rows needed to apply tree, else will skip tree creation * @param int mindepth minimum depth required to apply tree, else wil skip * @param int headerdepth minimum depth required to add the header expander controls */ $.fn.addTableTree = function(minrows,mindepth,headerdepth){ // console.profile(); // @todo for slide animations, temporarily insert tbody at start end of collapse range and animate it, use display:block on tbody // minrows = 10; // mindepth = 3; // headerdepth = 3; var elem = this; if(!elem[0]){ Debugger.log("gstree: table does not exist, skipping"); return; } // table is small if table has less rows that minrows var small = minrows !== undefined && $("tbody tr",elem).length < minrows; // table is shallow if table has depths less than mindepth var shallow = $("tbody tr[data-depth="+mindepth+"]",elem).length <= 0; // skip if no children if(!$("."+treeparentclass,elem)[0] || small || shallow){ if(!small || !shallow) Debugger.log("gstree: insufficient depth, skipping"); else Debugger.log("gstree: table too small, skipping"); return; } // custom overrides for fontawesome icons for expander and collapse classes treeexpandedclass = 'fa-rotate-90'; treecollapsedclass = ''; var customexpander = '<i class="'+treeexpanderclass+' '+treeexpandedclass+' fa fa-play fa-fw"></i>'; addIndents($('tr td:first-child',elem)); // preload indents for roots and childless addExpanders($('tr.tree-parent td:first-child .tree-indent:last-of-type',elem),customexpander); // add expanders to last indent $('tr td:first-child .tree-indent:last-of-type').html(''); // remove extra indentation // add the header expander controls var deep = headerdepth === undefined || $("tbody tr[data-depth="+headerdepth+"]",elem).length > 0; if(deep) addExpanderTableHeader($('thead > tr:first',elem),customexpander,4); $("table.striped").zebraStripe(); // console.profileEnd(); };<|fim▁end|>
// could use a table css rule to hide all trs, unless classes depth-0 or something with specificty to override // would skip all iterations needed here, but would also require special css to toggle expanders // could also use a cache table for these using tr ids var rootcollapsed = $("#roottoggle").hasClass("rootcollapsed");
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""File related wrapper functions to streamline common use cases""" import manage as manage from manage import find_file, find_file_re, list_dir import operation as operation<|fim▁hole|><|fim▁end|>
from operation import hash_file, read_file, slice_file, write_file
<|file_name|>bitcoin_uk.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="uk" version="2.0"> <defaumlcodec>UTF-8</defaumlcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About Menlocoin</source> <translation>Про Menlocoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;Menlocoin&lt;/b&gt; version</source> <translation>Версія &lt;b&gt;Menlocoin&apos;a&lt;b&gt;</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Це програмне забезпечення є експериментальним. Поширюється за ліцензією MIT/X11, додаткова інформація міститься у файлі COPYING, а також за адресою http://www.opensource.org/licenses/mit-license.php. Цей продукт включає в себе програмне забезпечення, розроблене в рамках проекту OpenSSL (http://www.openssl.org/), криптографічне програмне забезпечення, написане Еріком Янгом ([email protected]), та функції для роботи з UPnP, написані Томасом Бернардом.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation>Авторське право</translation> </message> <message> <location line="+0"/> <source>The Menlocoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Адресна книга</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>Двічі клікніть на адресу чи назву для їх зміни</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Створити нову адресу</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Копіювати виділену адресу в буфер обміну</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Створити адресу</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your Menlocoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Це ваші адреси для отримання платежів. Ви можете давати різні адреси різним людям, таким чином маючи можливість відслідкувати хто конкретно і скільки вам заплатив.</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>&amp;Скопіювати адресу</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Показати QR-&amp;Код</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a Menlocoin address</source> <translation>Підпишіть повідомлення щоб довести, що ви є власником цієї адреси</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Вилучити вибрані адреси з переліку</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>Експортувати дані з поточної вкладки в файл</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified Menlocoin address</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною Menlocoin-адресою</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Видалити</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your Menlocoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>Скопіювати &amp;мітку</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>&amp;Редагувати</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>Експортувати адресну книгу</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Файли відділені комами (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Помилка при експортуванні</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Неможливо записати у файл %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Назва</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(немає назви)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Діалог введення паролю</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Введіть пароль</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Новий пароль</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Повторіть пароль</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Введіть новий пароль для гаманця.&lt;br/&gt;Будь ласка, використовуйте паролі що містять &lt;b&gt;як мінімум 10 випадкових символів&lt;/b&gt;, або &lt;b&gt;як мінімум 8 слів&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Зашифрувати гаманець</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ця операція потребує пароль для розблокування гаманця.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Розблокувати гаманець</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ця операція потребує пароль для дешифрування гаманця.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Дешифрувати гаманець</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Змінити пароль</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Ввести старий та новий паролі для гаманця.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Підтвердити шифрування гаманця</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR LITECOINS&lt;/b&gt;!</source> <translation>УВАГА: Якщо ви зашифруєте гаманець і забудете пароль, ви &lt;b&gt;ВТРАТИТЕ ВСІ СВОЇ БІТКОІНИ&lt;/b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ви дійсно хочете зашифрувати свій гаманець?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Увага: Ввімкнено Caps Lock!</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>Гаманець зашифровано</translation> </message> <message> <location line="-56"/> <source>Menlocoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your menlocoins from being stolen by malware infecting your computer.</source> <translation>Біткоін-клієнт буде закрито для завершення процесу шифрування. Пам&apos;ятайте, що шифрування гаманця не може повністю захистити ваші біткоіни від крадіжки, у випадку якщо ваш комп&apos;ютер буде інфіковано шкідливими програмами.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>Не вдалося зашифрувати гаманець</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Виникла помилка під час шифрування гаманця. Ваш гаманець не було зашифровано.</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>Введені паролі не співпадають.</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>Не вдалося розблокувати гаманець</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Введений пароль є невірним.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>Не вдалося розшифрувати гаманець</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>Пароль було успішно змінено.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>&amp;Підписати повідомлення...</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>Синхронізація з мережею...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>&amp;Огляд</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Показати загальний огляд гаманця</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>Транзакції</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Переглянути історію транзакцій</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>Редагувати список збережених адрес та міток</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>Показати список адрес для отримання платежів</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>&amp;Вихід</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Вийти</translation> </message> <message> <location line="+4"/> <source>Show information about Menlocoin</source> <translation>Показати інформацію про Menlocoin</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>&amp;Про Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Показати інформацію про Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Параметри...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Шифрування гаманця...</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Резервне копіювання гаманця...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>Змінити парол&amp;ь...</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation>Імпорт блоків з диску...</translation> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a Menlocoin address</source> <translation>Відправити монети на вказану адресу</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for Menlocoin</source> <translation>Редагувати параметри</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>Резервне копіювання гаманця в інше місце</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Змінити пароль, який використовується для шифрування гаманця</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>Вікно зневадження</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Відкрити консоль зневадження і діагностики</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>Перевірити повідомлення...</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>Menlocoin</source> <translation>Menlocoin</translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>Гаманець</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About Menlocoin</source> <translation>&amp;Про Menlocoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>Показати / Приховати</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation>Показує або приховує головне вікно</translation> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your Menlocoin addresses to prove you own them</source> <translation>Підтвердіть, що Ви є власником повідомлення підписавши його Вашою Menlocoin-адресою </translation> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified Menlocoin addresses</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною Menlocoin-адресою</translation> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>&amp;Файл</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>&amp;Налаштування</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>&amp;Довідка</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Панель вкладок</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>[тестова мережа]</translation> </message> <message> <location line="+47"/> <source>Menlocoin client</source> <translation>Menlocoin-клієнт</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to Menlocoin network</source> <translation><numerusform>%n активне з&apos;єднання з мережею</numerusform><numerusform>%n активні з&apos;єднання з мережею</numerusform><numerusform>%n активних з&apos;єднань з мережею</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation>Оброблено %1 блоків історії транзакцій.</translation> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation>Помилка</translation> </message> <message> <location line="+3"/> <source>Warning</source> <translation>Увага</translation> </message> <message> <location line="+3"/> <source>Information</source> <translation>Інформація</translation> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>Синхронізовано</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>Синхронізується...</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>Підтвердити комісію</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>Надіслані транзакції</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>Отримані перекази</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Дата: %1 Кількість: %2 Тип: %3 Адреса: %4 </translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>Обробка URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid Menlocoin address or malformed URI parameters.</source> <translation>Неможливо обробити URI! Це може бути викликано неправильною Menlocoin-адресою, чи невірними параметрами URI.</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>&lt;b&gt;Зашифрований&lt;/b&gt; гаманець &lt;b&gt;розблоковано&lt;/b&gt;</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>&lt;b&gt;Зашифрований&lt;/b&gt; гаманець &lt;b&gt;заблоковано&lt;/b&gt;</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. Menlocoin can no longer continue safely and will quit.</source> <translation type="unfinished"/> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>Сповіщення мережі</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Редагувати адресу</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Мітка</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>Мітка, пов&apos;язана з цим записом адресної книги</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Адреса</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>Адреса, пов&apos;язана з цим записом адресної книги. Може бути змінено тільки для адреси відправника.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>Нова адреса для отримання</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Нова адреса для відправлення</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Редагувати адресу для отримання</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Редагувати адресу для відправлення</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Введена адреса «%1» вже присутня в адресній книзі.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid Menlocoin address.</source> <translation>Введена адреса «%1» не є коректною адресою в мережі Menlocoin.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>Неможливо розблокувати гаманець.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Не вдалося згенерувати нові ключі.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>Menlocoin-Qt</source> <translation>Menlocoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>версія</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Використання:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>параметри командного рядка</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Параметри інтерфейсу</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Встановлення мови, наприклад &quot;de_DE&quot; (типово: системна)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Запускати згорнутим</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Показувати заставку під час запуску (типово: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Параметри</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Головні</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Заплатити комісі&amp;ю</translation> </message> <message> <location line="+31"/> <source>Automatically start Menlocoin after logging in to the system.</source> <translation>Автоматично запускати гаманець при вході до системи.</translation> </message> <message> <location line="+3"/> <source>&amp;Start Menlocoin on system login</source> <translation>&amp;Запускати гаманець при вході в систему</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation>Скинути всі параметри клієнта на типові.</translation> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation>Скинути параметри</translation> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>&amp;Мережа</translation> </message> <message> <location line="+6"/> <source>Automatically open the Menlocoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Автоматично відкривати порт для клієнту біткоін на роутері. Працює лише якщо ваш роутер підтримує UPnP і ця функція увімкнена.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Відображення порту через &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the Menlocoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Підключатись до мережі Menlocoin через SOCKS-проксі (наприклад при використанні Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>Підключатись через &amp;SOCKS-проксі:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP проксі:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>IP-адреса проксі-сервера (наприклад 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Порт:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Порт проксі-сервера (наприклад 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS версії:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Версія SOCKS-проксі (наприклад 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Вікно</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Показувати лише іконку в треї після згортання вікна.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>Мінімізувати &amp;у трей</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Згортати замість закриття. Якщо ця опція включена, програма закриється лише після вибору відповідного пункту в меню.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>Згортати замість закритт&amp;я</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Відображення</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Мова інтерфейсу користувача:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting Menlocoin.</source> <translation>Встановлює мову інтерфейсу. Зміни набудуть чинності після перезапуску Menlocoin.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>В&amp;имірювати монети в:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Виберіть одиницю вимірювання монет, яка буде відображатись в гаманці та при відправленні.</translation> </message> <message> <location line="+9"/> <source>Whether to show Menlocoin addresses in the transaction list or not.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Відображати адресу в списку транзакцій</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;Гаразд</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Скасувати</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Застосувати</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>типово</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation>Підтвердження скидання параметрів</translation> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation>Деякі параметри потребують перезапуск клієнта для набуття чинності.</translation> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation>Продовжувати?</translation> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>Увага</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting Menlocoin.</source> <translation>Цей параметр набуде чинності після перезапуску Menlocoin.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>Невірно вказано адресу проксі.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Форма</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the Menlocoin network after a connection is established, but this process has not completed yet.</source> <translation>Показана інформація вже може бути застарілою. Ваш гаманець буде автоматично синхронізовано з мережею Menlocoin після встановлення підключення, але цей процес ще не завершено.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>Баланс:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Непідтверджені:</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>Гаманець</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation type="unfinished"/> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Недавні транзакції&lt;/b&gt;</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>Ваш поточний баланс</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Загальна сума всіх транзакцій, які ще не підтверджені, та до цих пір не враховуються в загальному балансі</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>не синхронізовано</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start menlocoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Діалог QR-коду</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Запросити Платіж</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Кількість:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Мітка:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Повідомлення:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Зберегти як...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Помилка при кодуванні URI в QR-код.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>Невірно введено кількість, будь ласка, перевірте.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Кінцевий URI занадто довгий, спробуйте зменшити текст для мітки / повідомлення.</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Зберегти QR-код</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>PNG-зображення (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Назва клієнту</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>Н/Д</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Версія клієнту</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Інформація</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Використовується OpenSSL версії</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation type="unfinished"/> </message> <message> <location line="+29"/> <source>Network</source> <translation>Мережа</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Кількість підключень</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>В тестовій мережі</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Поточне число блоків</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Last block time</source> <translation type="unfinished"/> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>Відкрити</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Параметри командного рядка</translation> </message> <message> <location line="+7"/> <source>Show the Menlocoin-Qt help message to get a list with possible Menlocoin command-line options.</source> <translation>Показати довідку Menlocoin-Qt для отримання переліку можливих параметрів командного рядка.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>Показати</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>Консоль</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Дата збирання</translation> </message> <message> <location line="-104"/> <source>Menlocoin - Debug window</source> <translation>Menlocoin - Вікно зневадження</translation> </message> <message> <location line="+25"/> <source>Menlocoin Core</source> <translation type="unfinished"/> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Файл звіту зневадження</translation> </message> <message> <location line="+7"/> <source>Open the Menlocoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation type="unfinished"/> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Очистити консоль</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the Menlocoin RPC console.</source> <translation>Вітаємо у консолі Menlocoin RPC.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Використовуйте стрілки вгору вниз для навігації по історії, і &lt;b&gt;Ctrl-L&lt;/b&gt; для очищення екрана.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Наберіть &lt;b&gt;help&lt;/b&gt; для перегляду доступних команд.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Відправити</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>Відправити на декілька адрес</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Дод&amp;ати одержувача</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Видалити всі поля транзакції</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Очистити &amp;все</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>Баланс:</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 BTC</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Підтвердити відправлення</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;Відправити</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; адресату %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Підтвердіть відправлення</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Ви впевнені що хочете відправити %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation> і </translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>Адреса отримувача невірна, будь ласка перепровірте.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>Кількість монет для відправлення повинна бути більшою 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Кількість монет для відправлення перевищує ваш баланс.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Сума перевищить ваш баланс, якщо комісія %1 буде додана до вашої транзакції.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Знайдено адресу що дублюється. Відправлення на кожну адресу дозволяється лише один раз на кожну операцію переказу.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation>Помилка: Не вдалося створити транзакцію!</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Помилка: транзакцію було відхилено. Це може статись, якщо декілька монет з вашого гаманця вже використані, наприклад, якщо ви використовуєте одну копію гаманця (wallet.dat), а монети були використані з іншої копії, але не позначені як використані в цій.</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Форма</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>&amp;Кількість:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>&amp;Отримувач:</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>Введіть мітку для цієї адреси для додавання її в адресну книгу</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;Мітка:</translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>Вибрати адресу з адресної книги</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Вставити адресу</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Видалити цього отримувача</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a Menlocoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Введіть адресу Menlocoin (наприклад Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Підписи - Підпис / Перевірка повідомлення</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation type="unfinished"/> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Введіть адресу Menlocoin (наприклад Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>Вибрати адресу з адресної книги</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>Вставити адресу</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Введіть повідомлення, яке ви хочете підписати тут</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation>Підпис</translation> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>Копіювати поточну сигнатуру до системного буферу обміну</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this Menlocoin address</source> <translation>Підпишіть повідомлення щоб довести, що ви є власником цієї адреси</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>Скинути всі поля підпису повідомлення</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Очистити &amp;все</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation type="unfinished"/> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Введіть адресу Menlocoin (наприклад Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified Menlocoin address</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною Menlocoin-адресою</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>Скинути всі поля перевірки повідомлення</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a Menlocoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source> <translation>Введіть адресу Menlocoin (наприклад Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Натисніть кнопку «Підписати повідомлення», для отримання підпису</translation> </message> <message> <location line="+3"/> <source>Enter Menlocoin signature</source> <translation>Введіть сигнатуру Menlocoin</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>Введена нечинна адреса.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Будь ласка, перевірте адресу та спробуйте ще.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation type="unfinished"/> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>Не вдалося підписати повідомлення.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Повідомлення підписано.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>Підпис не можливо декодувати.</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Будь ласка, перевірте підпис та спробуйте ще.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Не вдалося перевірити повідомлення.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Повідомлення перевірено.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+22"/> <source>The Menlocoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>[тестова мережа]</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>Відкрити до %1</translation> </message><|fim▁hole|> <location line="+6"/> <source>%1/offline</source> <translation>%1/поза інтернетом</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/не підтверджено</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 підтверджень</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Статус</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Дата</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Згенеровано</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Відправник</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>Отримувач</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation type="unfinished"/> </message> <message> <location line="-2"/> <source>label</source> <translation>Мітка</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Кредит</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>не прийнято</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Дебет</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Комісія за транзакцію</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Загальна сума</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Повідомлення</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Коментар</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID транзакції</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Після генерації монет, потрібно зачекати 120 блоків, перш ніж їх можна буде використати. Коли ви згенерували цей блок, його було відправлено в мережу для того, щоб він був доданий до ланцюжка блоків. Якщо ця процедура не вдасться, статус буде змінено на «не підтверджено» і ви не зможете потратити згенеровані монету. Таке може статись, якщо хтось інший згенерував блок на декілька секунд раніше.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Транзакція</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation type="unfinished"/> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Кількість</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>true</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>false</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>, ще не було успішно розіслано</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>невідомий</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Деталі транзакції</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Даний діалог показує детальну статистику по вибраній транзакції</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>Дата</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Тип</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Кількість</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>Відкрити до %1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>Поза інтернетом (%1 підтверджень)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>Непідтверджено (%1 із %2 підтверджень)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>Підтверджено (%1 підтверджень)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Цей блок не був отриманий жодними іншими вузлами і, ймовірно, не буде прийнятий!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Згенеровано, але не підтверджено</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>Отримано</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Отримано від</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Відправлено</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Відправлено собі</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Добуто</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(недоступно)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Статус транзакції. Наведіть вказівник на це поле, щоб показати кількість підтверджень.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Дата і час, коли транзакцію було отримано.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Тип транзакції.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Адреса отримувача транзакції.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Сума, додана чи знята з балансу.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>Всі</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Сьогодні</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>На цьому тижні</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>На цьому місяці</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>Минулого місяця</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Цього року</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Проміжок...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Отримані на</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Відправлені на</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>Відправлені собі</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Добуті</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Інше</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Введіть адресу чи мітку для пошуку</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Мінімальна сума</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Скопіювати адресу</translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Скопіювати мітку</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Копіювати кількість</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Редагувати мітку</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>Показати деталі транзакції</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>Експортувати дані транзакцій</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Файли, розділені комою (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Підтверджені</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Дата</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Тип</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Мітка</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Адреса</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Кількість</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>Ідентифікатор</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Помилка експорту</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>Неможливо записати у файл %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Діапазон від:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>до</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>Відправити</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>Експортувати дані з поточної вкладки в файл</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Виникла помилка при спробі зберегти гаманець в новому місці.</translation> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation>Успішне створення резервної копії</translation> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation>Данні гаманця успішно збережено в новому місці призначення.</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>Menlocoin version</source> <translation>Версія</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>Використання:</translation> </message> <message> <location line="-29"/> <source>Send command to -server or menlocoind</source> <translation>Відправити команду серверу -server чи демону</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>Список команд</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>Отримати довідку по команді</translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>Параметри:</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: menlocoin.conf)</source> <translation>Вкажіть файл конфігурації (типово: menlocoin.conf)</translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: menlocoind.pid)</source> <translation>Вкажіть pid-файл (типово: menlocoind.pid)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Вкажіть робочий каталог</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Встановити розмір кешу бази даних в мегабайтах (типово: 25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 9333 or testnet: 19333)</source> <translation>Чекати на з&apos;єднання на &lt;port&gt; (типово: 9333 або тестова мережа: 19333)</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Підтримувати не більше &lt;n&gt; зв&apos;язків з колегами (типово: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation type="unfinished"/> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Поріг відключення неправильно під&apos;єднаних пірів (типово: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Максимальній розмір вхідного буферу на одне з&apos;єднання (типово: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 9332 or testnet: 19332)</source> <translation>Прослуховувати &lt;port&gt; для JSON-RPC-з&apos;єднань (типово: 9332 або тестова мережа: 19332)</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>Приймати команди із командного рядка та команди JSON-RPC</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>Запустити в фоновому режимі (як демон) та приймати команди</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>Використовувати тестову мережу</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation type="unfinished"/> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=menlocoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;Menlocoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. Menlocoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Помилка: транзакцію було відхилено. Це може статись, якщо декілька монет з вашого гаманця вже використані, наприклад, якщо ви використовуєте одну копію гаманця (wallet.dat), а монети були використані з іншої копії, але не позначені як використані в цій.</translation> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation type="unfinished"/> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Увага: встановлено занадто велику комісію (-paytxfee). Комісія зніматиметься кожен раз коли ви проводитимете транзакції.</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong Menlocoin will not work properly.</source> <translation>Увага: будь ласка, перевірте дату і час на своєму комп&apos;ютері. Якщо ваш годинник йде неправильно, Menlocoin може працювати некоректно.</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Увага: помилка читання wallet.dat! Всі ключі прочитано коректно, але дані транзакцій чи записи адресної книги можуть бути пропущені, або пошкоджені.</translation> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Увага: файл wallet.dat пошкоджено, дані врятовано! Оригінальний wallet.dat збережено як wallet.{timestamp}.bak до %s; якщо Ваш баланс чи транзакції неправильні, Ви можете відновити їх з резервної копії. </translation> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Спроба відновити закриті ключі з пошкодженого wallet.dat</translation> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>Підключитись лише до вказаного вузла</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation>Помилка ініціалізації бази даних блоків</translation> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation>Помилка завантаження бази даних блоків</translation> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation>Помилка: Мало вільного місця на диску!</translation> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Помилка: Гаманець заблокований, неможливо створити транзакцію!</translation> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation>Помилка: системна помилка: </translation> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Скільки блоків перевіряти під час запуску (типово: 288, 0 = всі)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation>Імпорт блоків з зовнішнього файлу blk000??.dat</translation> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation>Інформація</translation> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Помилка в адресі -tor: «%s»</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Максимальний буфер, &lt;n&gt;*1000 байт (типово: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Максимальній розмір вихідного буферу на одне з&apos;єднання, &lt;n&gt;*1000 байт (типово: 1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Виводити більше налагоджувальної інформації. Мається на увазі всі шнші -debug* параметри</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>Доповнювати налагоджувальний вивід відміткою часу</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the Menlocoin Wiki for SSL setup instructions)</source> <translation>Параметри SSL: (див. Menlocoin Wiki для налаштування SSL)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Вибір версії socks-проксі для використання (4-5, типово: 5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Відсилати налагоджувальну інформацію на консоль, а не у файл debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Відсилати налагоджувальну інформацію до налагоджувача</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Встановити максимальний розмір блоку у байтах (типово: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Встановити мінімальний розмір блоку у байтах (типово: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Стискати файл debug.log під час старту клієнта (типово: 1 коли відсутутній параметр -debug)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Вказати тайм-аут підключення у мілісекундах (типово: 5000)</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation>Системна помилка: </translation> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Намагатись використовувати UPnP для відображення порту, що прослуховується на роутері (default: 0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Намагатись використовувати UPnP для відображення порту, що прослуховується на роутері (default: 1 when listening)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>Ім&apos;я користувача для JSON-RPC-з&apos;єднань</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation>Попередження</translation> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Увага: Поточна версія застаріла, необхідне оновлення!</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat пошкоджено, відновлення не вдалося</translation> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>Пароль для JSON-RPC-з&apos;єднань</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Дозволити JSON-RPC-з&apos;єднання з вказаної IP-адреси</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Відправляти команди на вузол, запущений на &lt;ip&gt; (типово: 127.0.0.1)</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation type="unfinished"/> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>Модернізувати гаманець до останнього формату</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Встановити розмір пулу ключів &lt;n&gt; (типово: 100)</translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Пересканувати ланцюжок блоків, в пошуку втрачених транзакцій</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Використовувати OpenSSL (https) для JSON-RPC-з&apos;єднань</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation>Файл сертифіката сервера (типово: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Закритий ключ сервера (типово: server.pem)</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Допустимі шифри (типово: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>Дана довідка</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Неможливо прив&apos;язати до порту %s на цьому комп&apos;ютері (bind returned error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>Підключитись через SOCKS-проксі</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Дозволити пошук в DNS для команд -addnode, -seednode та -connect</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>Завантаження адрес...</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Помилка при завантаженні wallet.dat: Гаманець пошкоджено</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of Menlocoin</source> <translation>Помилка при завантаженні wallet.dat: Гаманець потребує новішої версії Біткоін-клієнта</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart Menlocoin to complete</source> <translation>Потрібно перезаписати гаманець: перезапустіть Біткоін-клієнт для завершення</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>Помилка при завантаженні wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Помилка в адресі проксі-сервера: «%s»</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Невідома мережа вказана в -onlynet: «%s»</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation type="unfinished"/> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Помилка у величині комісії -paytxfee=&lt;amount&gt;: «%s»</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>Некоректна кількість</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>Недостатньо коштів</translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>Завантаження індексу блоків...</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Додати вузол до підключення і лишити його відкритим</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. Menlocoin is probably already running.</source> <translation>Неможливо прив&apos;язати до порту %s на цьому комп&apos;ютері. Можливо гаманець вже запущено.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>Комісія за КБ</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>Завантаження гаманця...</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>Неможливо записати типову адресу</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>Сканування...</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>Завантаження завершене</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation type="unfinished"/> </message> <message> <location line="-74"/> <source>Error</source> <translation>Помилка</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Ви мусите встановити rpcpassword=&lt;password&gt; в файлі конфігурації: %s Якщо файл не існує, створіть його із правами тільки для читання власником (owner-readable-only).</translation> </message> </context> </TS><|fim▁end|>
<message>
<|file_name|>aes.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 Martin Grabmueller. See the LICENSE file at the // top-level directory of this distribution for license information. //! Example implementation of AES (the Advanced Encryption Standard). //! Note that this implementation has neither been verified to be //! correct, nor to be secure. Do not use it for production! //! //! The implementation of the basic block-sized AES is based on the //! one in Joshua Davies: "Implementing SSL/TLS (Using Cryptography //! and PKI)", Wiley Publishing Inc., 2011. The cipher modes EBC, CBC //! and CTR have been implemented from scratch. use std::collections::HashSet; use std::io::Cursor; use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt}; /// Key for AES cipher. This comes in three sizes: 128, 192 and 256 /// bytes. pub enum AesKey { /// 128-bit key. Key128(AesKey128), /// 192-bit key. Key192(AesKey192), /// 256-bit key. Key256(AesKey256), } /// Container for 128-bit value to be used as an AES key. pub struct AesKey128 { /// Raw key material. pub key: [u8; 16], } /// Container for 192-bit value to be used as an AES key. pub struct AesKey192 { /// Raw key material. pub key: [u8; 24], } /// Container for 256-bit value to be used as an AES key. pub struct AesKey256 { /// Raw key material. pub key: [u8; 32], } /// `SBOX` implements the sboxes used in the sub_word operation (used /// in key schedule generation) and sub_bytes operation (used in the /// encryption rounds). static SBOX: [[u8; 16]; 16] = [[0x63, 0x7c, 0x77, 0x7b, 0xf2, 0x6b, 0x6f, 0xc5, 0x30, 0x01, 0x67, 0x2b, 0xfe, 0xd7, 0xab, 0x76], [0xca, 0x82, 0xc9, 0x7d, 0xfa, 0x59, 0x47, 0xf0, 0xad, 0xd4, 0xa2, 0xaf, 0x9c, 0xa4, 0x72, 0xc0], [0xb7, 0xfd, 0x93, 0x26, 0x36, 0x3f, 0xf7, 0xcc, 0x34, 0xa5, 0xe5, 0xf1, 0x71, 0xd8, 0x31, 0x15], [0x04, 0xc7, 0x23, 0xc3, 0x18, 0x96, 0x05, 0x9a, 0x07, 0x12, 0x80, 0xe2, 0xeb, 0x27, 0xb2, 0x75], [0x09, 0x83, 0x2c, 0x1a, 0x1b, 0x6e, 0x5a, 0xa0, 0x52, 0x3b, 0xd6, 0xb3, 0x29, 0xe3, 0x2f, 0x84], [0x53, 0xd1, 0x00, 0xed, 0x20, 0xfc, 0xb1, 0x5b, 0x6a, 0xcb, 0xbe, 0x39, 0x4a, 0x4c, 0x58, 0xcf], [0xd0, 0xef, 0xaa, 0xfb, 0x43, 0x4d, 0x33, 0x85, 0x45, 0xf9, 0x02, 0x7f, 0x50, 0x3c, 0x9f, 0xa8], [0x51, 0xa3, 0x40, 0x8f, 0x92, 0x9d, 0x38, 0xf5, 0xbc, 0xb6, 0xda, 0x21, 0x10, 0xff, 0xf3, 0xd2], [0xcd, 0x0c, 0x13, 0xec, 0x5f, 0x97, 0x44, 0x17, 0xc4, 0xa7, 0x7e, 0x3d, 0x64, 0x5d, 0x19, 0x73], [0x60, 0x81, 0x4f, 0xdc, 0x22, 0x2a, 0x90, 0x88, 0x46, 0xee, 0xb8, 0x14, 0xde, 0x5e, 0x0b, 0xdb], [0xe0, 0x32, 0x3a, 0x0a, 0x49, 0x06, 0x24, 0x5c, 0xc2, 0xd3, 0xac, 0x62, 0x91, 0x95, 0xe4, 0x79], [0xe7, 0xc8, 0x37, 0x6d, 0x8d, 0xd5, 0x4e, 0xa9, 0x6c, 0x56, 0xf4, 0xea, 0x65, 0x7a, 0xae, 0x08], [0xba, 0x78, 0x25, 0x2e, 0x1c, 0xa6, 0xb4, 0xc6, 0xe8, 0xdd, 0x74, 0x1f, 0x4b, 0xbd, 0x8b, 0x8a], [0x70, 0x3e, 0xb5, 0x66, 0x48, 0x03, 0xf6, 0x0e, 0x61, 0x35, 0x57, 0xb9, 0x86, 0xc1, 0x1d, 0x9e], [0xe1, 0xf8, 0x98, 0x11, 0x69, 0xd9, 0x8e, 0x94, 0x9b, 0x1e, 0x87, 0xe9, 0xce, 0x55, 0x28, 0xdf], [0x8c, 0xa1, 0x89, 0x0d, 0xbf, 0xe6, 0x42, 0x68, 0x41, 0x99, 0x2d, 0x0f, 0xb0, 0x54, 0xbb, 0x16] ]; /// `INV_SBOX` implements the inverse of `SBOX` and is used in /// decryption. static INV_SBOX: [[u8; 16]; 16] = [[0x52, 0x09, 0x6a, 0xd5, 0x30, 0x36, 0xa5, 0x38, 0xbf, 0x40, 0xa3, 0x9e, 0x81, 0xf3, 0xd7, 0xfb], [0x7c, 0xe3, 0x39, 0x82, 0x9b, 0x2f, 0xff, 0x87, 0x34, 0x8e, 0x43, 0x44, 0xc4, 0xde, 0xe9, 0xcb], [0x54, 0x7b, 0x94, 0x32, 0xa6, 0xc2, 0x23, 0x3d, 0xee, 0x4c, 0x95, 0x0b, 0x42, 0xfa, 0xc3, 0x4e], [0x08, 0x2e, 0xa1, 0x66, 0x28, 0xd9, 0x24, 0xb2, 0x76, 0x5b, 0xa2, 0x49, 0x6d, 0x8b, 0xd1, 0x25], [0x72, 0xf8, 0xf6, 0x64, 0x86, 0x68, 0x98, 0x16, 0xd4, 0xa4, 0x5c, 0xcc, 0x5d, 0x65, 0xb6, 0x92], [0x6c, 0x70, 0x48, 0x50, 0xfd, 0xed, 0xb9, 0xda, 0x5e, 0x15, 0x46, 0x57, 0xa7, 0x8d, 0x9d, 0x84], [0x90, 0xd8, 0xab, 0x00, 0x8c, 0xbc, 0xd3, 0x0a, 0xf7, 0xe4, 0x58, 0x05, 0xb8, 0xb3, 0x45, 0x06], [0xd0, 0x2c, 0x1e, 0x8f, 0xca, 0x3f, 0x0f, 0x02, 0xc1, 0xaf, 0xbd, 0x03, 0x01, 0x13, 0x8a, 0x6b], [0x3a, 0x91, 0x11, 0x41, 0x4f, 0x67, 0xdc, 0xea, 0x97, 0xf2, 0xcf, 0xce, 0xf0, 0xb4, 0xe6, 0x73], [0x96, 0xac, 0x74, 0x22, 0xe7, 0xad, 0x35, 0x85, 0xe2, 0xf9, 0x37, 0xe8, 0x1c, 0x75, 0xdf, 0x6e], [0x47, 0xf1, 0x1a, 0x71, 0x1d, 0x29, 0xc5, 0x89, 0x6f, 0xb7, 0x62, 0x0e, 0xaa, 0x18, 0xbe, 0x1b], [0xfc, 0x56, 0x3e, 0x4b, 0xc6, 0xd2, 0x79, 0x20, 0x9a, 0xdb, 0xc0, 0xfe, 0x78, 0xcd, 0x5a, 0xf4], [0x1f, 0xdd, 0xa8, 0x33, 0x88, 0x07, 0xc7, 0x31, 0xb1, 0x12, 0x10, 0x59, 0x27, 0x80, 0xec, 0x5f], [0x60, 0x51, 0x7f, 0xa9, 0x19, 0xb5, 0x4a, 0x0d, 0x2d, 0xe5, 0x7a, 0x9f, 0x93, 0xc9, 0x9c, 0xef], [0xa0, 0xe0, 0x3b, 0x4d, 0xae, 0x2a, 0xf5, 0xb0, 0xc8, 0xeb, 0xbb, 0x3c, 0x83, 0x53, 0x99, 0x61], [0x17, 0x2b, 0x04, 0x7e, 0xba, 0x77, 0xd6, 0x26, 0xe1, 0x69, 0x14, 0x63, 0x55, 0x21, 0x0c, 0x7d] ]; /// rot_word operation. This is used in generation the key schedule. fn rot_word(w: &mut [u8; 4]) { let tmp = w[0]; w[0] = w[1]; w[1] = w[2]; w[2] = w[3]; w[3] = tmp; } // sub_word operation. This is used in generating the key schedule. fn sub_word(w: &mut [u8; 4]) { for i in 0..4 { w[i] = SBOX[((w[i] & 0xf0) >> 4) as usize][(w[i] & 0x0f) as usize]; } } /// Derive the key schedule from the input key, which may be 16, 24 or /// 32 bytes in length. The length of `w` depends on the key length. /// On return, `w` contains the round keys (10 for AES-128, 12 for /// AES-192 and 14 for AES-256). fn compute_key_schedule(key: &[u8], w: &mut [[u8; 4]]) { let keylength = key.len(); let keywords = keylength / 4; let mut rcon = 0x01; for i in 0..keylength { w[i / 4][i % 4] = key[i]; } for i in keywords..4*(keywords+7) { w[i] = w[i-1]; if i % keywords == 0 { rot_word(&mut w[i]); sub_word(&mut w[i]); if i % 36 == 0 { rcon = 0x1b; } w[i][0] ^= rcon; rcon <<= 1; } else if keywords > 6 && i % keywords == 4 { sub_word(&mut w[i]); } w[i][0] ^= w[i - keywords][0]; w[i][1] ^= w[i - keywords][1]; w[i][2] ^= w[i - keywords][2]; w[i][3] ^= w[i - keywords][3]; } } /// XOR the round key `w` with the state. fn add_round_key(state: &mut [[u8; 4]; 4], w: &[[u8;4]]) { for c in 0..4 { for r in 0..4 { state[r][c] = state[r][c] ^ w[c][r]; } } } /// Perform SBOX substitution on the state. fn sub_bytes(state: &mut [[u8; 4]]) { for r in 0..4 { for c in 0..4 { state[r][c] = SBOX[((state[r][c] & 0xf0) >> 4) as usize] [(state[r][c] & 0x0f) as usize]; } } } /// Perform the shift_rows operation on the state. fn shift_rows(state: &mut [[u8; 4]]) { let tmp = state[1][0]; state[1][0] = state[1][1]; state[1][1] = state[1][2]; state[1][2] = state[1][3]; state[1][3] = tmp; let tmp = state[2][0]; state[2][0] = state[2][2]; state[2][2] = tmp; let tmp = state[2][1]; state[2][1] = state[2][3]; state[2][3] = tmp; let tmp = state[3][3]; state[3][3] = state[3][2]; state[3][2] = state[3][1]; state[3][1] = state[3][0]; state[3][0] = tmp; } fn xtime(x: u8) -> u8 { (x << 1) ^ (if x & 0x80 != 0 { 0x1b } else { 0x00 }) } fn dot(xin: u8, y: u8) -> u8 { let mut x = xin; let mut product = 0; let mut mask = 0x01; while mask != 0 { if y & mask != 0 { product ^= x; } x = xtime(x); mask <<= 1; } product } /// Perform the mix_columns operation on the state. fn mix_columns(s: &mut [[u8; 4]]) { let mut t = [0u8; 4]; for c in 0..4 { t[0] = dot(2, s[0][c]) ^ dot(3, s[1][c]) ^ s[2][c] ^ s[3][c]; t[1] = s[0][c] ^ dot(2, s[1][c]) ^ dot(3, s[2][c]) ^ s[3][c]; t[2] = s[0][c] ^ s[1][c] ^ dot(2, s[2][c]) ^ dot(3, s[3][c]); t[3] = dot(3, s[0][c]) ^ s[1][c] ^ s[2][c] ^ dot(2, s[3][c]); s[0][c] = t[0]; s[1][c] = t[1]; s[2][c] = t[2]; s[3][c] = t[3]; } } /// Perform the encryption of one block. `w` is the key schedule, `nr` /// the number of rounds and `input` and `output` are the in- and /// output blocks, respectively. fn encrypt_block(w: &[[u8; 4]], nr: usize, input: &[u8; 16], output: &mut [u8; 16]) { let mut state = [[0u8; 4]; 4]; for r in 0..4 { for c in 0..4 { state[r][c] = input[r + (4 * c)]; } } add_round_key(&mut state, &w[0..4]); for round in 0..nr { sub_bytes(&mut state); shift_rows(&mut state); if round < nr-1 { mix_columns(&mut state); } add_round_key(&mut state, &w[(round+1)*4..(round+2)*4]); } for r in 0..4 { for c in 0..4 { output[r + (4 * c)] = state[r][c]; } } } /// Encrypt the plaintext block `input` with AES, using the given key. /// The ciphertext output is placed in `output`. pub fn encrypt(key: &AesKey, input: &[u8; 16], output: &mut [u8; 16]) { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); encrypt_block(&w, nr, input, output); } /// Encrypt the arbitrary-length plaintext block `input` with AES in<|fim▁hole|> let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let padded_plaintext = ::padding::pkcs7::pad(&plaintext, 16); let mut result = Vec::with_capacity(padded_plaintext.len()); let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); let mut input = [0u8; 16]; let mut output = [0u8; 16]; for chunk in padded_plaintext.chunks(16) { for x in 0..16 { input[x] = chunk[x]; } encrypt_block(&w, nr, &input, &mut output); for x in 0..16 { result.push(output[x]); } } result } /// Encrypt the arbitrary-length plaintext block `input` with AES in /// CBC mode, using the given key and initialization vector. The /// ciphertext output is returned as a vector of bytes. pub fn encrypt_cbc(key: &AesKey, iv: &[u8; 16], plaintext: &[u8]) -> Vec<u8> { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let padded_plaintext = ::padding::pkcs7::pad(&plaintext, 16); let mut result = Vec::with_capacity(padded_plaintext.len()); let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); let mut input = [0u8; 16]; let mut output = [0u8; 16]; let mut r = *iv; for chunk in padded_plaintext.chunks(16) { for x in 0..16 { input[x] = chunk[x] ^ r[x]; } encrypt_block(&w, nr, &input, &mut output); for x in 0..16 { result.push(output[x]); } r = output; } result } /// Encrypt the arbitrary-length plaintext block `input` with AES in /// CBC mode, using the given key and initialization vector. The /// ciphertext output is returned as a vector of bytes. /// /// Note that this implementation uses the most significant 64 bits of /// the IV as a nonce, and the least significant 64 bits as the /// initial counter value. To produce the input to the block cipher, /// the nonce is encoded in big-endian format and concatenated with /// a 64-bit counter, also encoded in big-endian format. pub fn encrypt_ctr(key: &AesKey, iv: &[u8; 16], plaintext: &[u8]) -> Vec<u8> { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let mut result = Vec::with_capacity(plaintext.len()); let mut rdr = Cursor::new(iv); let nonce = rdr.read_u64::<BigEndian>().unwrap(); let mut ctr = rdr.read_u64::<BigEndian>().unwrap(); let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); let mut input = [0u8; 16]; let mut output = [0u8; 16]; let mut wtr = vec![]; for chunk in plaintext.chunks(16) { wtr.truncate(0); wtr.write_u64::<BigEndian>(nonce).unwrap(); wtr.write_u64::<BigEndian>(ctr).unwrap(); ctr += 1; for x in 0..16 { input[x] = wtr[x]; } encrypt_block(&w, nr, &input, &mut output); for x in 0..chunk.len() { result.push(chunk[x] ^ output[x]); } } result } /// Inverse of the shift_rows operation, used in decryption. fn inv_shift_rows(state: &mut [[u8; 4]; 4]) { let tmp = state[1][2]; state[1][2] = state[1][1]; state[1][1] = state[1][0]; state[1][0] = state[1][3]; state[1][3] = tmp; let tmp = state[2][0]; state[2][0] = state[2][2]; state[2][2] = tmp; let tmp = state[2][1]; state[2][1] = state[2][3]; state[2][3] = tmp; let tmp = state[3][0]; state[3][0] = state[3][1]; state[3][1] = state[3][2]; state[3][2] = state[3][3]; state[3][3] = tmp; } /// Inverse of the sub_bytes operation, used in decryption. fn inv_sub_bytes(state: &mut [[u8; 4]; 4]) { for r in 0..4 { for c in 0..4 { state[r][c] = INV_SBOX[((state[r][c] & 0xf0) >> 4) as usize] [(state[r][c] & 0x0f) as usize]; } } } /// Inverse of the mix_columns operation, used in decryption. fn inv_mix_columns(s: &mut [[u8; 4]; 4]) { let mut t = [0u8; 4]; for c in 0..4 { t[0] = dot(0x0e, s[0][c]) ^ dot(0x0b, s[1][c]) ^ dot(0x0d, s[2][c]) ^ dot(0x09, s[3][c]); t[1] = dot(0x09, s[0][c]) ^ dot(0x0e, s[1][c]) ^ dot(0x0b, s[2][c]) ^ dot(0x0d, s[3][c]); t[2] = dot(0x0d, s[0][c]) ^ dot(0x09, s[1][c]) ^ dot(0x0e, s[2][c]) ^ dot(0x0b, s[3][c]); t[3] = dot(0x0b, s[0][c]) ^ dot(0x0d, s[1][c]) ^ dot(0x09, s[2][c]) ^ dot(0x0e, s[3][c]); s[0][c] = t[0]; s[1][c] = t[1]; s[2][c] = t[2]; s[3][c] = t[3]; } } /// Perform the encryption of one block. `w` is the key schedule, `nr` /// the number of rounds and `input` and `output` are the in- and /// output blocks, respectively. fn decrypt_block(w: &[[u8; 4]], nr: usize, input: &[u8; 16], output: &mut [u8; 16]) { let mut state = [[0u8; 4]; 4]; for r in 0..4 { for c in 0..4 { state[r][c] = input[r + (4 * c)]; } } add_round_key(&mut state, &w[nr*4..(nr+1)*4]); let mut round = nr; while round > 0 { inv_shift_rows(&mut state); inv_sub_bytes(&mut state); add_round_key(&mut state, &w[(round-1)*4..(round)*4]); if round > 1 { inv_mix_columns(&mut state); } round -= 1; } for r in 0..4 { for c in 0..4 { output[r + (4 * c)] = state[r][c]; } } } /// Decrypt the ciphertext block `input` with AES, using the given /// key. The plaintext output is placed in `output`. pub fn decrypt(key: &AesKey, input: &[u8; 16], output: &mut [u8; 16]) { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); decrypt_block(&w, nr, input, output); } /// Decrypt the ciphertext block `input` with AES in ECB mode, using /// the given key. The plaintext output is returned as a byte vector pub fn decrypt_ecb(key: &AesKey, ciphertext: &[u8]) -> Vec<u8> { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let mut result = Vec::with_capacity(ciphertext.len()); let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); let mut input = [0u8; 16]; let mut output = [0u8; 16]; for chunk in ciphertext.chunks(16) { for x in 0..16 { input[x] = chunk[x]; } decrypt_block(&w, nr, &input, &mut output); for x in 0..16 { result.push(output[x]); } } let res_len = result.len(); let padding_len = result[res_len - 1] as usize; result.truncate(res_len - padding_len); result } /// Decrypt the ciphertext block `input` with AES in ECB mode, using /// the given key. The plaintext output is returned as a byte vector pub fn decrypt_cbc(key: &AesKey, iv: &[u8; 16], ciphertext: &[u8]) -> Vec<u8> { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let mut result = Vec::with_capacity(ciphertext.len()); let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); let mut input = [0u8; 16]; let mut output = [0u8; 16]; let mut r = *iv; for chunk in ciphertext.chunks(16) { for x in 0..16 { input[x] = chunk[x]; } decrypt_block(&w, nr, &input, &mut output); for x in 0..16 { result.push(output[x] ^ r[x]); } r = input; } let res_len = result.len(); let padding_len = result[res_len - 1] as usize; result.truncate(res_len - padding_len); result } /// Decrypt the ciphertext block `input` with AES in ECB mode, using /// the given key. The plaintext output is returned as a byte vector pub fn decrypt_ctr(key: &AesKey, iv: &[u8; 16], ciphertext: &[u8]) -> Vec<u8> { let (keysize, keybytes): (usize, Vec<_>) = match key { &AesKey::Key128(AesKey128 {key}) => (16, key[..].iter().cloned().collect()), &AesKey::Key192(AesKey192 {key}) => (24, key[..].iter().cloned().collect()), &AesKey::Key256(AesKey256 {key}) => (32, key[..].iter().cloned().collect()), }; let mut w = [[0u8; 4]; 60]; let mut result = Vec::with_capacity(ciphertext.len()); let mut rdr = Cursor::new(iv); let nonce = rdr.read_u64::<BigEndian>().unwrap(); let mut ctr = rdr.read_u64::<BigEndian>().unwrap(); let nr = (keysize >> 2) + 6; compute_key_schedule(&keybytes, &mut w); let mut input = [0u8; 16]; let mut output = [0u8; 16]; let mut wtr = vec![]; for chunk in ciphertext.chunks(16) { wtr.truncate(0); wtr.write_u64::<BigEndian>(nonce).unwrap(); wtr.write_u64::<BigEndian>(ctr).unwrap(); ctr += 1; for x in 0..16 { input[x] = wtr[x]; } encrypt_block(&w, nr, &input, &mut output); for x in 0..chunk.len() { result.push(chunk[x] ^ output[x]); } } result } pub fn detect_ecb(input: &[u8]) -> bool { if input.len() % 16 != 0 { return false; } let mut m = HashSet::new(); for chunk in input.chunks(16) { if m.contains(chunk) { return true; } m.insert(chunk); } false } #[cfg(test)] mod tests { use super::{encrypt, decrypt}; use super::{encrypt_ecb, decrypt_ecb}; use super::{encrypt_cbc, decrypt_cbc}; use super::{encrypt_ctr, decrypt_ctr}; use super::{detect_ecb}; use super::{AesKey, AesKey128}; use ::codec; // From // http://stackoverflow.com/questions/25428920/how-to-get-a-slice-as-an-array-in-rust fn to_byte_array_16(slice: &[u8]) -> [u8; 16] { let mut array = [0u8; 16]; for (&x, p) in slice.iter().zip(array.iter_mut()) { *p = x; } array } #[test] fn encrypt_0() { let input = b"YELLOW SUBMARINE"; let mut output = [0u8; 16]; let expected = codec::hex::decode("761ab98c7086c509261f322cb3ffa7d9").unwrap(); let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); encrypt(&key, input, &mut output); assert_eq!(expected, output); } #[test] fn decrypt_0() { let input = codec::hex::decode("761ab98c7086c509261f322cb3ffa7d9").unwrap(); let mut output = [0u8; 16]; let expected = b"YELLOW SUBMARINE"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); decrypt(&key, &to_byte_array_16(&input), &mut output); assert_eq!(to_byte_array_16(expected), output); } #[test] fn decrypt_encrypt_0() { let input = b"YELLOW SUBMARINE"; let mut output = [0u8; 16]; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); encrypt(&key, input, &mut output); let mut decrypted = [0u8; 16]; decrypt(&key, &output, &mut decrypted); assert_eq!(to_byte_array_16(input), decrypted); } #[test] fn encrypt_ecb_0() { let plaintext = b"Cooller"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let expected = vec![40, 80, 126, 246, 153, 18, 246, 8, 200, 113, 212, 145, 203, 140, 137, 97]; let ciphertext = encrypt_ecb(&key, plaintext); assert_eq!(expected, ciphertext); } #[test] fn encrypt_ecb_1() { let plaintext = b"This is an example text for testing encryption and decryption.\n"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let expected = vec![0xdd, 0xd0, 0x52, 0x5b, 0xdb, 0x4f, 0x1b, 0x3e, 0x66, 0xa7, 0x4f, 0x29, 0x08, 0x25, 0x01, 0x5d, 0x25, 0x86, 0xd6, 0xde, 0x47, 0x6a, 0x68, 0xc5, 0x02, 0xa4, 0x65, 0x6e, 0x74, 0x5f, 0x17, 0x4c, 0x0e, 0x6a, 0x0e, 0x1b, 0x1e, 0xe0, 0xcb, 0x10, 0xc4, 0xd0, 0xa2, 0xa6, 0x5d, 0xe8, 0x57, 0xda, 0xe3, 0xfa, 0xa5, 0x4d, 0x4e, 0xb2, 0xa5, 0x2e, 0xee, 0x5e, 0xc8, 0x2e, 0x69, 0xd9, 0x48, 0x02]; let ciphertext = encrypt_ecb(&key, plaintext); assert_eq!(expected, ciphertext); } #[test] fn decrypt_ecb_0() { let ciphertext = vec![40, 80, 126, 246, 153, 18, 246, 8, 200, 113, 212, 145, 203, 140, 137, 97]; let expected = vec![b'C', b'o', b'o', b'l', b'l', b'e', b'r']; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let plaintext = decrypt_ecb(&key, &ciphertext); assert_eq!(&expected, &plaintext); } #[test] fn decrypt_ecb_1() { let ciphertext = vec![0xdd, 0xd0, 0x52, 0x5b, 0xdb, 0x4f, 0x1b, 0x3e, 0x66, 0xa7, 0x4f, 0x29, 0x08, 0x25, 0x01, 0x5d, 0x25, 0x86, 0xd6, 0xde, 0x47, 0x6a, 0x68, 0xc5, 0x02, 0xa4, 0x65, 0x6e, 0x74, 0x5f, 0x17, 0x4c, 0x0e, 0x6a, 0x0e, 0x1b, 0x1e, 0xe0, 0xcb, 0x10, 0xc4, 0xd0, 0xa2, 0xa6, 0x5d, 0xe8, 0x57, 0xda, 0xe3, 0xfa, 0xa5, 0x4d, 0x4e, 0xb2, 0xa5, 0x2e, 0xee, 0x5e, 0xc8, 0x2e, 0x69, 0xd9, 0x48, 0x02]; let expected: Vec<_> = b"This is an example text for testing encryption and decryption.\n" .iter().cloned().collect(); let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let plaintext = decrypt_ecb(&key, &ciphertext); assert_eq!(expected, plaintext); } #[test] fn encrypt_cbc_0() { let plaintext = b"Cooller"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf]; let expected = vec![184, 150, 45, 131, 33, 100, 210, 30, 247, 102, 16, 15, 77, 186, 157, 60]; let ciphertext = encrypt_cbc(&key, &iv, plaintext); assert_eq!(expected, ciphertext); } #[test] fn encrypt_cbc_1() { let plaintext = b"Need a longer text oh yeah."; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf]; let expected = vec![142, 180, 175, 89, 254, 0, 125, 125, 142, 78, 32, 224, 101, 202, 49, 247, 146, 217, 135, 92, 254, 111, 190, 89, 137, 225, 117, 77, 14, 53, 2, 178]; let ciphertext = encrypt_cbc(&key, &iv, plaintext); assert_eq!(expected, ciphertext); } #[test] fn encrypt_cbc_2() { let plaintext = b"This is an example text for testing encryption and decryption.\n"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let expected = vec![0xdd, 0xd0, 0x52, 0x5b, 0xdb, 0x4f, 0x1b, 0x3e, 0x66, 0xa7, 0x4f, 0x29, 0x08, 0x25, 0x01, 0x5d, 0xfd, 0xdc, 0x12, 0x46, 0xc3, 0xf4, 0x7c, 0xaa, 0x85, 0xe4, 0x19, 0x3a, 0x06, 0xdc, 0x14, 0x22, 0x82, 0x46, 0x3b, 0x6d, 0xed, 0x3c, 0x55, 0xa6, 0x4d, 0x7f, 0x41, 0x83, 0xde, 0x85, 0xe0, 0x17, 0x41, 0xef, 0xe7, 0xf5, 0xbf, 0x8f, 0x9f, 0x2a, 0x05, 0x36, 0x9e, 0x19, 0x6b, 0x6f, 0x49, 0x6f]; let ciphertext = encrypt_cbc(&key, &iv, plaintext); assert_eq!(expected, ciphertext); } #[test] fn decrypt_cbc_0() { let ciphertext = vec![142, 180, 175, 89, 254, 0, 125, 125, 142, 78, 32, 224, 101, 202, 49, 247, 146, 217, 135, 92, 254, 111, 190, 89, 137, 225, 117, 77, 14, 53, 2, 178]; let expected = vec![78, 101, 101, 100, 32, 97, 32, 108, 111, 110, 103, 101, 114, 32, 116, 101, 120, 116, 32, 111, 104, 32, 121, 101, 97, 104, 46]; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf]; let plaintext = decrypt_cbc(&key, &iv, &ciphertext); assert_eq!(&expected, &plaintext); } #[test] fn decrypt_cbc_1() { let ciphertext = vec![0xdd, 0xd0, 0x52, 0x5b, 0xdb, 0x4f, 0x1b, 0x3e, 0x66, 0xa7, 0x4f, 0x29, 0x08, 0x25, 0x01, 0x5d, 0xfd, 0xdc, 0x12, 0x46, 0xc3, 0xf4, 0x7c, 0xaa, 0x85, 0xe4, 0x19, 0x3a, 0x06, 0xdc, 0x14, 0x22, 0x82, 0x46, 0x3b, 0x6d, 0xed, 0x3c, 0x55, 0xa6, 0x4d, 0x7f, 0x41, 0x83, 0xde, 0x85, 0xe0, 0x17, 0x41, 0xef, 0xe7, 0xf5, 0xbf, 0x8f, 0x9f, 0x2a, 0x05, 0x36, 0x9e, 0x19, 0x6b, 0x6f, 0x49, 0x6f]; let expected: Vec<_> = b"This is an example text for testing encryption and decryption.\n" .iter().cloned().collect(); let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let plaintext = decrypt_cbc(&key, &iv, &ciphertext); assert_eq!(&expected, &plaintext); } #[test] fn encrypt_ctr_0() { let plaintext = b"Cooller"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf]; let expected = vec![73, 251, 100, 217, 45, 11, 130]; let ciphertext = encrypt_ctr(&key, &iv, plaintext); assert_eq!(expected, ciphertext); } #[test] fn encrypt_ctr_1() { let plaintext = b"Need a longer text oh yeah."; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf]; let expected = vec![68, 241, 110, 209, 97, 15, 208, 41, 158, 173, 243, 61, 180, 115, 158, 63, 122, 23, 204, 251, 14, 56, 11, 243, 251, 178, 211]; let ciphertext = encrypt_ctr(&key, &iv, plaintext); assert_eq!(expected, ciphertext); } #[test] fn encrypt_ctr_2() { let plaintext = b"This is an example text for testing encryption and decryption.\n"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let expected = vec![0x92, 0xc9, 0x52, 0x44, 0xa7, 0xe6, 0x28, 0xa2, 0x0e, 0x21, 0xa1, 0x07, 0xd9, 0xa9, 0xb5, 0x09, 0x1f, 0x23, 0x33, 0xe1, 0xf0, 0xb8, 0xc0, 0x3e, 0x2f, 0x14, 0xcf, 0xc3, 0x11, 0x91, 0x5e, 0x7e, 0x20, 0xb8, 0xe0, 0x73, 0xfc, 0xf5, 0xc5, 0xfe, 0x9a, 0xf9, 0x0e, 0x01, 0x0f, 0xef, 0x90, 0xfc, 0xd7, 0xc9, 0x0b, 0x4a, 0x51, 0x09, 0xb0, 0x41, 0x20, 0x29, 0x5f, 0x31, 0xf2, 0x99, 0xf6]; let ciphertext = encrypt_ctr(&key, &iv, plaintext); assert_eq!(expected, ciphertext); } #[test] fn decrypt_ctr_0() { let ciphertext = vec![68, 241, 110, 209, 97, 15, 208, 41, 158, 173, 243, 61, 180, 115, 158, 63, 122, 23, 204, 251, 14, 56, 11, 243, 251, 178, 211]; let expected: Vec<_> = b"Need a longer text oh yeah.".into_iter().cloned().collect(); let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf]; let plaintext = decrypt_ctr(&key, &iv, &ciphertext); assert_eq!(&expected, &plaintext); } #[test] fn decrypt_ctr_1() { let ciphertext = vec![0x92, 0xc9, 0x52, 0x44, 0xa7, 0xe6, 0x28, 0xa2, 0x0e, 0x21, 0xa1, 0x07, 0xd9, 0xa9, 0xb5, 0x09, 0x1f, 0x23, 0x33, 0xe1, 0xf0, 0xb8, 0xc0, 0x3e, 0x2f, 0x14, 0xcf, 0xc3, 0x11, 0x91, 0x5e, 0x7e, 0x20, 0xb8, 0xe0, 0x73, 0xfc, 0xf5, 0xc5, 0xfe, 0x9a, 0xf9, 0x0e, 0x01, 0x0f, 0xef, 0x90, 0xfc, 0xd7, 0xc9, 0x0b, 0x4a, 0x51, 0x09, 0xb0, 0x41, 0x20, 0x29, 0x5f, 0x31, 0xf2, 0x99, 0xf6]; let expected: Vec<_> = b"This is an example text for testing encryption and decryption.\n" .iter().cloned().collect(); let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let plaintext = decrypt_ctr(&key, &iv, &ciphertext); assert_eq!(&expected, &plaintext); } #[test] fn detect_ecb_0() { let plaintext = b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.\n"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let ciphertext = encrypt_ecb(&key, plaintext); assert!(detect_ecb(&ciphertext)); } #[test] fn detect_ecb_1() { let plaintext = b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.\n"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let ciphertext = encrypt_cbc(&key, &iv, plaintext); assert!(!detect_ecb(&ciphertext)); } #[test] fn detect_ecb_2() { let plaintext = b"AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA.\n"; let keybytes = codec::hex::decode("000102030405060708090a0b0c0d0e0f").unwrap(); let key = AesKey::Key128(AesKey128{key: to_byte_array_16(&keybytes)}); let iv = [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]; let ciphertext = encrypt_ctr(&key, &iv, plaintext); assert!(!detect_ecb(&ciphertext)); } }<|fim▁end|>
/// ECB mode, using the given key. The ciphertext output is returned /// as a vector of bytes. pub fn encrypt_ecb(key: &AesKey, plaintext: &[u8]) -> Vec<u8> {
<|file_name|>scrutest.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from shutil import rmtree from tempfile import mkdtemp<|fim▁hole|>from scrusubtitles import ScruSubtitlesListener from scrusubtitles import ScruSubtitlesLogger class TestService(ScruSubtitlesListener, ScruSubtitlesLogger): def __init__(self): super(TestService, self).__init__() self._omdbapi = OMDbAPI() self._omdbapi.logger = self self._scrusubtitles = ScruSubtitles() self._scrusubtitles.listener = self self._scrusubtitles.logger = self self._scrusubtitles.workdir = mkdtemp() self._num_subtitles_downloaded = 0 self._num_subtitles_found = 0 def cleanup(self): rmtree(self._scrusubtitles.workdir) def lookup(self, title, year): return self._omdbapi.search(title, year) def download(self, url, filename): self._num_subtitles_downloaded = 0 self._scrusubtitles.download(url, filename) self.info(u'{0} subtitles downloaded'.format(self._num_subtitles_downloaded)) def search(self, imdb_id, languages): self._num_subtitles_found = 0 self._scrusubtitles.search(imdb_id, languages) self.info(u'{0} subtitles found'.format(self._num_subtitles_found)) def on_subtitle_found(self, subtitle): self._num_subtitles_found += 1 self.info(u'Found {0} subtitle {1}'.format(subtitle['language'], subtitle['filename'])) for key in subtitle: self.debug(u' {0}: {1}'.format(key, subtitle[key])) def on_subtitle_downloaded(self, path): self._num_subtitles_downloaded += 1 self.info(u'Subtitle {0} downloaded'.format(path)) def debug(self, message): print u'DEBUG: {0}'.format(message) def info(self, message): print u'INFO: {0}'.format(message) def warn(self, message): print u'WARN: {0}'.format(message) def error(self, message): print u'ERROR: {0}'.format(message)<|fim▁end|>
from omdbapi import OMDbAPI from scrusubtitles import ScruSubtitles
<|file_name|>ConfigurationNode.java<|end_file_name|><|fim▁begin|>/* * HawkEye Redux * Copyright (C) 2012-2013 Cubeville <http://www.cubeville.org> and contributors * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.cubeville.hawkeye.config; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; /** * Configuration implementation backed by a nested map */ public class ConfigurationNode implements Configuration { protected Map<String, Object> root; private boolean writeDefaults; public ConfigurationNode(Map<String, Object> root) { this(root, false); } public ConfigurationNode(Map<String, Object> root, boolean writeDefaults) { this.root = root; this.writeDefaults = writeDefaults; } /** * Gets the configuration's backing map * * @return Map of keys and values */ public Map<String, Object> getRoot() { return root; } @Override public void clear() { root.clear(); } @SuppressWarnings("unchecked") @Override public Object get(String node) { // Process dot notation String[] path = node.split("\\."); Object val = null; Map<String, Object> tmp = root; // Loop through map to get nested values for (int i = 0; i < path.length; i++) { val = tmp.get(path[i]); // Path doesn't exist if (val == null) return null; // Last piece of path if (i == path.length - 1) break; try { // Get next level of nested map tmp = (Map<String, Object>) val; } catch (ClassCastException ex) { // Nested map doesn't exist return null; } } return val; } @Override public Object get(String path, Object def) { Object val = get(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @SuppressWarnings("unchecked") @Override public void set(String node, Object value) { // Process dot notation String[] path = node.split("\\."); Map<String, Object> tmp = root; for (int i = 0; i < path.length; i++) { // Last level of nesting reached if (i == path.length - 1) { tmp.put(path[i], value); return; } Object val = tmp.get(path[i]); if (val == null || !(val instanceof Map)) { // Create a map if it isn't already there val = new HashMap<String, Object>(); tmp.put(path[i], val); } tmp = (Map<String, Object>) val; } } @Override public String getString(String path) { Object val = get(path); return val == null ? null : val.toString(); } @Override public String getString(String path, String def) { String val = getString(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @Override public Integer getInt(String path) { Object val = get(path); if (val instanceof Number) { return ((Number) val).intValue(); } else { return null; } } @Override public int getInt(String path, int def) { Integer val = getInt(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @Override public Double getDouble(String path) { Object val = get(path); if (val instanceof Number) { return ((Number) val).doubleValue(); } else { return null; } } @Override public double getDouble(String path, double def) { Double val = getDouble(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @Override public Boolean getBoolean(String path) { Object val = get(path); if (val instanceof Boolean) { return (Boolean) val; } else { return null; } } @Override public boolean getBoolean(String path, boolean def) { Boolean val = getBoolean(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @Override public List<String> getStringList(String path) { Object val = get(path); List<String> list = new ArrayList<String>(); if (!(val instanceof List)) { return list; } @SuppressWarnings("unchecked") List<Object> raw = (List<Object>) val; for (Object obj : raw) { if (obj != null) list.add(obj.toString()); } return list; } @Override public List<String> getStringList(String path, List<String> def) { List<String> val = getStringList(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } <|fim▁hole|> public List<Integer> getIntList(String path) { Object val = get(path); List<Integer> list = new ArrayList<Integer>(); if (!(val instanceof List)) { return list; } @SuppressWarnings("unchecked") List<Object> raw = (List<Object>) val; for (Object obj : raw) { if (obj instanceof Number) list.add(((Number) obj).intValue()); } return list; } @Override public List<Integer> getIntList(String path, List<Integer> def) { List<Integer> val = getIntList(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @Override public List<Double> getDoubleList(String path) { Object val = get(path); List<Double> list = new ArrayList<Double>(); if (!(val instanceof List)) { return list; } @SuppressWarnings("unchecked") List<Object> raw = (List<Object>) val; for (Object obj : raw) { if (obj instanceof Number) list.add(((Number) obj).doubleValue()); } return list; } @Override public List<Double> getDoubleList(String path, List<Double> def) { List<Double> val = getDoubleList(path); if (val == null) { if (writeDefaults) set(path, def); val = def; } return val; } @Override public boolean writeDefaults() { return writeDefaults; } @Override public void setWriteDefaults(boolean writeDefaults) { this.writeDefaults = writeDefaults; } @Override public Object get(Variable path) { return get(path.getPath()); } @Override public Object get(Variable path, Object def) { return get(path.getPath(), def); } @Override public void set(Variable path, Object value) { set(path.getPath(), value); } @Override public String getString(Variable path) { return getString(path.getPath()); } @Override public String getString(Variable path, String def) { return getString(path.getPath(), def); } @Override public Integer getInt(Variable path) { return getInt(path.getPath()); } @Override public int getInt(Variable path, int def) { return getInt(path.getPath(), def); } @Override public Double getDouble(Variable path) { return getDouble(path.getPath()); } @Override public double getDouble(Variable path, double def) { return getDouble(path.getPath(), def); } @Override public Boolean getBoolean(Variable path) { return getBoolean(path.getPath()); } @Override public boolean getBoolean(Variable path, boolean def) { return getBoolean(path.getPath(), def); } @Override public List<String> getStringList(Variable path) { return getStringList(path.getPath()); } @Override public List<String> getStringList(Variable path, List<String> def) { return getStringList(path.getPath(), def); } @Override public List<Integer> getIntList(Variable path) { return getIntList(path.getPath()); } @Override public List<Integer> getIntList(Variable path, List<Integer> def) { return getIntList(path.getPath(), def); } @Override public List<Double> getDoubleList(Variable path) { return getDoubleList(path.getPath()); } @Override public List<Double> getDoubleList(Variable path, List<Double> def) { return getDoubleList(path.getPath(), def); } }<|fim▁end|>
@Override
<|file_name|>RichTextAreaConsts.py<|end_file_name|><|fim▁begin|>""" * Copyright 2007 Google Inc. # Copyright (C) 2009 Luke Kenneth Casson Leighton <[email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http:#www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under<|fim▁hole|> """* * Font size enumeration. Represents the seven basic HTML font sizes, as * defined in CSS. """ XX_SMALL = 1 X_SMALL = 2 SMALL = 3 MEDIUM = 4 LARGE = 5 X_LARGE = 6 XX_LARGE = 7 """* * Justification enumeration. The three values are <code>left</code>, * <code>right</code>, <code>center</code>. """ CENTER = "Center" LEFT = "Left" RIGHT = "Right"<|fim▁end|>
* the License. """
<|file_name|>test_organization.py<|end_file_name|><|fim▁begin|>"""Unit tests for the ``organizations`` paths. Each ``APITestCase`` subclass tests a single URL. A full list of URLs to be tested can be found here: http://theforeman.org/api/apidoc/v2/organizations.html :Requirement: Organization :CaseAutomation: Automated :CaseLevel: Acceptance :CaseComponent: API :TestType: Functional :CaseImportance: High :Upstream: No """ from fauxfactory import gen_alphanumeric, gen_string from nailgun import client, entities from random import randint from requests.exceptions import HTTPError from robottelo.config import settings from robottelo.datafactory import filtered_datapoint, invalid_values_list from robottelo.decorators import skip_if_bug_open, tier1, tier2 from robottelo.helpers import get_nailgun_config from robottelo.test import APITestCase from six.moves import http_client @filtered_datapoint def valid_org_data_list(): """List of valid data for input testing. Note: The maximum allowed length of org name is 242 only. This is an intended behavior (Also note that 255 is the standard across other entities.) """ return [ gen_string('alphanumeric', randint(1, 242)), gen_string('alpha', randint(1, 242)), gen_string('cjk', randint(1, 85)), gen_string('latin1', randint(1, 242)), gen_string('numeric', randint(1, 242)), gen_string('utf8', randint(1, 85)), gen_string('html', randint(1, 85)), ] class OrganizationTestCase(APITestCase): """Tests for the ``organizations`` path.""" @tier1 def test_positive_create_text_plain(self): """Create an organization using a 'text/plain' content-type. :id: 6f67a3f0-0c1d-498c-9a35-28207b0faec2 :expectedresults: HTTP 415 is returned. :CaseImportance: Critical """ organization = entities.Organization() organization.create_missing() response = client.post( organization.path(), organization.create_payload(), auth=settings.server.get_credentials(), headers={'content-type': 'text/plain'}, verify=False, ) self.assertEqual( http_client.UNSUPPORTED_MEDIA_TYPE, response.status_code) @tier1 def test_positive_create_with_auto_label(self): """Create an organization and provide a name. :id: c9f69ee5-c6dd-4821-bb05-0d93ffa22460 :expectedresults: The organization has the provided attributes and an auto-generated label. :CaseImportance: Critical """ org = entities.Organization().create() self.assertTrue(hasattr(org, 'label')) self.assertIsInstance(org.label, type(u'')) @tier1 def test_positive_create_with_custom_label(self): """Create an org and provide a name and identical label. :id: f0deab6a-b09b-4110-8575-d4bea945a545 :expectedresults: The organization has the provided attributes. :CaseImportance: Critical """ # A label has a more restrictive allowable charset than a name, so we # use it for populating both name and label. org = entities.Organization() name_label = org.get_fields()['label'].gen_value() org.name = org.label = name_label org = org.create() self.assertEqual(name_label, org.name) self.assertEqual(name_label, org.label) @tier1 def test_positive_create_with_name_and_label(self): """Create an organization and provide a name and label. :id: 2bdd9aa8-a36a-4009-ac29-5c3d6416a2b7 :expectedresults: The organization has the provided attributes. :CaseImportance: Critical """ org = entities.Organization() org.name = name = org.get_fields()['name'].gen_value() org.label = label = org.get_fields()['label'].gen_value() org = org.create() self.assertEqual(name, org.name) self.assertEqual(label, org.label) @tier1 def test_positive_create_with_name_and_description(self): """Create an organization and provide a name and description. :id: afeea84b-61ca-40bf-bb16-476432919115 :expectedresults: The organization has the provided attributes and an auto-generated label. :CaseImportance: Critical """ for name in valid_org_data_list(): with self.subTest(name): org = entities.Organization( name=name, description=name, ).create() self.assertEqual(org.name, name) self.assertEqual(org.description, name) # Was a label auto-generated? self.assertTrue(hasattr(org, 'label')) self.assertIsInstance(org.label, type(u'')) self.assertGreater(len(org.label), 0) @tier1 def test_positive_create_with_name_label_description(self): """Create an org and provide a name, label and description. :id: f7d92392-751e-45de-91da-5ed2a47afc3f :expectedresults: The organization has the provided name, label and description. :CaseImportance: Critical """ org = entities.Organization() org.name = name = org.get_fields()['name'].gen_value() org.label = label = org.get_fields()['label'].gen_value() org.description = desc = org.get_fields()['description'].gen_value() org = org.create() self.assertEqual(org.name, name) self.assertEqual(org.label, label) self.assertEqual(org.description, desc) @tier1 def test_negative_create_with_invalid_name(self): """Create an org with an incorrect name. :id: 9c6a4b45-a98a-4d76-9865-92d992fa1a22 :expectedresults: The organization cannot be created. :CaseImportance: Critical """ for name in invalid_values_list(): with self.subTest(name): with self.assertRaises(HTTPError): entities.Organization(name=name).create() @tier1 def test_negative_create_with_same_name(self): """Create two organizations with identical names. :id: a0f5333c-cc83-403c-9bf7-08fb372909dc :expectedresults: The second organization cannot be created. :CaseImportance: Critical """ name = entities.Organization().create().name with self.assertRaises(HTTPError): entities.Organization(name=name).create() @tier1 def test_positive_search(self): """Create an organization, then search for it by name. :id: f6f1d839-21f2-4676-8683-9f899cbdec4c :expectedresults: Searching returns at least one result. :CaseImportance: Critical """ org = entities.Organization().create() orgs = entities.Organization().search( query={u'search': u'name="{0}"'.format(org.name)} ) self.assertEqual(len(orgs), 1) self.assertEqual(orgs[0].id, org.id) self.assertEqual(orgs[0].name, org.name) class OrganizationUpdateTestCase(APITestCase): """Tests for the ``organizations`` path.""" @classmethod def setUpClass(cls): # noqa """Create an organization.""" super(OrganizationUpdateTestCase, cls).setUpClass() cls.organization = entities.Organization().create() @tier1 def test_positive_update_name(self): """Update an organization's name with valid values. :id: 68f2ba13-2538-407c-9f33-2447fca28cd5 :expectedresults: The organization's name is updated. :CaseImportance: Critical """ for name in valid_org_data_list(): with self.subTest(name): setattr(self.organization, 'name', name) self.organization = self.organization.update(['name']) self.assertEqual(self.organization.name, name) @tier1 def test_positive_update_description(self): """Update an organization's description with valid values. :id: bd223197-1021-467e-8714-c1a767ae89af :expectedresults: The organization's description is updated. :CaseImportance: Critical """ for desc in valid_org_data_list(): with self.subTest(desc): setattr(self.organization, 'description', desc) self.organization = self.organization.update(['description']) self.assertEqual(self.organization.description, desc) @tier1 def test_positive_update_name_and_description(self): """Update an organization with new name and description. :id: 30036e70-b8fc-4c24-9494-b201bbd1c28d :expectedresults: The organization's name and description are updated. :CaseImportance: Critical """ name = gen_string('alpha') desc = gen_string('alpha') self.organization.name = name self.organization.description = desc self.organization = self.organization.update(['name', 'description']) self.assertEqual(self.organization.name, name) self.assertEqual(self.organization.description, desc) @tier2 def test_positive_update_user(self): """Update an organization, associate user with it. :id: 2c0c0061-5b4e-4007-9f54-b61d6e65ef58 :expectedresults: User is associated with organization. :CaseLevel: Integration """ user = entities.User().create() self.organization.user = [user] self.organization = self.organization.update(['user']) self.assertEqual(len(self.organization.user), 1) self.assertEqual(self.organization.user[0].id, user.id) @tier2 def test_positive_update_subnet(self): """Update an organization, associate subnet with it. :id: 3aa0b9cb-37f7-4e7e-a6ec-c1b407225e54 :expectedresults: Subnet is associated with organization. :CaseLevel: Integration """ subnet = entities.Subnet().create() self.organization.subnet = [subnet] self.organization = self.organization.update(['subnet']) self.assertEqual(len(self.organization.subnet), 1) self.assertEqual(self.organization.subnet[0].id, subnet.id) @tier2 @skip_if_bug_open('bugzilla', 1230865) def test_positive_add_media(self): """Update an organization and associate it with a media. :id: 83f085d9-94c0-4462-9780-d29ea4cb5aac :expectedresults: An organization is associated with a media. :CaseLevel: Integration """ media = entities.Media().create() self.organization.media = [media] self.organization = self.organization.update(['media']) self.assertEqual(len(self.organization.media), 1) self.assertEqual(self.organization.media[0].id, media.id) @tier2 def test_positive_add_hostgroup(self): """Add a hostgroup to an organization :id: e8c2ccfd-9ae8-4a39-b459-bc5818f54e63 :expectedresults: Hostgroup is added to organization :CaseLevel: Integration """ org = entities.Organization().create() hostgroup = entities.HostGroup().create() org.hostgroup = [hostgroup] org = org.update(['hostgroup']) self.assertEqual(len(org.hostgroup), 1) self.assertEqual(org.hostgroup[0].id, hostgroup.id) @skip_if_bug_open('bugzilla', 1395229) @tier2 def test_positive_remove_hostgroup(self): """Add a hostgroup to an organization and then remove it :id: 7eb1aca7-fd7b-404f-ab18-21be5052a11f :expectedresults: Hostgroup is added to organization and then removed :CaseLevel: Integration """ org = entities.Organization().create() hostgroup = entities.HostGroup().create() org.hostgroup = [hostgroup] org = org.update(['hostgroup']) self.assertEqual(len(org.hostgroup), 1) org.hostgroup = [] org = org.update(['hostgroup']) self.assertEqual(len(org.hostgroup), 0) @tier2 @skip_if_bug_open('bugzilla', 1395229) def test_positive_add_smart_proxy(self): """Add a smart proxy to an organization :id: e21de720-3fa2-429b-bd8e-b6a48a13146d :expectedresults: Smart proxy is successfully added to organization :CaseLevel: Integration """ # Every Satellite has a built-in smart proxy, so let's find it smart_proxy = entities.SmartProxy().search(query={ 'search': 'url = https://{0}:9090'.format(settings.server.hostname) }) # Check that proxy is found and unpack it from the list self.assertGreater(len(smart_proxy), 0) smart_proxy = smart_proxy[0] # By default, newly created organization uses built-in smart proxy, # so we need to remove it first org = entities.Organization().create() org.smart_proxy = [] org = org.update(['smart_proxy']) # Verify smart proxy was actually removed self.assertEqual(len(org.smart_proxy), 0) # Add smart proxy to organization org.smart_proxy = [smart_proxy] org = org.update(['smart_proxy']) # Verify smart proxy was actually added self.assertEqual(len(org.smart_proxy), 1) self.assertEqual(org.smart_proxy[0].id, smart_proxy.id) @skip_if_bug_open('bugzilla', 1395229) @tier2 def test_positive_remove_smart_proxy(self):<|fim▁hole|> :id: 8045910e-d85c-47ee-9aed-ac0a6bbb646b :expectedresults: Smart proxy is removed from organization :CaseLevel: Integration """ # By default, newly created organization uses built-in smart proxy, # so we can remove it instead of adding and removing some another one org = entities.Organization().create() self.assertGreater(len(org.smart_proxy), 0) org.smart_proxy = [] org = org.update(['smart_proxy']) # Verify smart proxy was actually removed self.assertEqual(len(org.smart_proxy), 0) @tier1 def test_negative_update(self): """Update an organization's attributes with invalid values. :id: b7152d0b-5ab0-4d68-bfdf-f3eabcb5fbc6 :expectedresults: The organization's attributes are not updated. :CaseImportance: Critical """ dataset = ( {'name': gen_string(str_type='utf8', length=256)}, # Immutable. See BZ 1089996. {'label': gen_string(str_type='utf8')}, ) for attrs in dataset: with self.subTest(attrs): with self.assertRaises(HTTPError): entities.Organization( id=self.organization.id, **attrs ).update(attrs.keys()) @tier2 @skip_if_bug_open('bugzilla', 1103157) def test_verify_bugzilla_1103157(self): """Create organization and add two compute resources one by one using different transactions and different users to see that they actually added, but not overwrite each other :id: 5f4fd2b7-d998-4980-b5e7-9822bd54156b :Steps: 1. Use the admin user to create an organization and two compute resources. Make one compute resource point at / belong to the organization. 2. Create a user and give them the ability to update compute resources and organizations. Have this user make the second compute resource point at / belong to the organization. 3. Use the admin user to read information about the organization. Verify that both compute resources are pointing at / belong to the organization. :expectedresults: Organization contains both compute resources :CaseLevel: Integration """ # setUpClass() creates an organization w/admin user. Here, we use admin # to make two compute resources and make first belong to organization. compute_resources = [ entities.LibvirtComputeResource( name=gen_string('alpha'), url='qemu://host.example.com/system' ).create() for _ in range(2) ] self.organization.compute_resource = compute_resources[:1] # list self.organization = self.organization.update(['compute_resource']) self.assertEqual(len(self.organization.compute_resource), 1) # Create a new user and give them minimal permissions. login = gen_alphanumeric() password = gen_alphanumeric() user = entities.User(login=login, password=password).create() role = entities.Role().create() for perm in ['edit_compute_resources', 'edit_organizations']: permissions = [ entities.Permission(id=permission['id']) for permission in entities.Permission(name=perm).search() ] entities.Filter(permission=permissions, role=role).create() user.role = [role] user = user.update(['role']) # Make new user assign second compute resource to org. cfg = get_nailgun_config() cfg.auth = (login, password) entities.Organization( cfg, id=self.organization.id, compute_resource=compute_resources[1:], # slice returns list ).update(['compute_resource']) # Use admin to verify both compute resources belong to organization. self.assertEqual(len(self.organization.read().compute_resource), 2)<|fim▁end|>
"""Remove a smart proxy from an organization
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # CAVEAT UTILITOR # # This file was automatically generated by Grako. # # https://pypi.python.org/pypi/grako/ # # Any changes you make to it will be overwritten the next time # the file is generated. from __future__ import print_function, division, absolute_import, unicode_literals from grako.parsing import graken, Parser from grako.util import re, RE_FLAGS __version__ = (2015, 12, 26, 22, 15, 59, 5) __all__ = [ 'BParser', 'BSemantics', 'main' ] class BParser(Parser): def __init__(self, whitespace=None, nameguard=None, comments_re='/\\*((?:[^\\*]|\\*[^/]|\\n)*?)\\*+/', eol_comments_re=None, ignorecase=None, left_recursion=False, **kwargs): super(BParser, self).__init__( whitespace=whitespace, nameguard=nameguard, comments_re=comments_re, eol_comments_re=eol_comments_re, ignorecase=ignorecase, left_recursion=left_recursion, **kwargs ) @graken() def _program_(self): def block1(): self._definition_() self._cut() self._closure(block1) self.ast['@'] = self.last_node self._check_eof() @graken() def _definition_(self): with self._choice(): with self._option(): self._simpledef_() with self._option(): self._vectordef_() with self._option(): self._functiondef_() self._error('no available options') @graken() def _simpledef_(self): self._name_() self.ast['name'] = self.last_node with self._optional(): self._ival_() self.ast['init'] = self.last_node self._token(';') self.ast._define( ['name', 'init'], [] ) @graken() def _vectordef_(self): self._name_() self.ast['name'] = self.last_node self._token('[') with self._optional(): self._constantexpr_() self.ast['maxidx'] = self.last_node self._token(']') with self._optional(): self._ivallist_() self.ast['ivals'] = self.last_node self._token(';') self.ast._define( ['name', 'maxidx', 'ivals'], [] ) @graken() def _ivallist_(self): self._ival_() self.ast.setlist('@', self.last_node) def block1(): self._token(',') self._ival_() self.ast.setlist('@', self.last_node) self._closure(block1) @graken() def _ival_(self): with self._choice(): with self._option(): self._numericexpr_() with self._option(): self._characterexpr_() with self._option(): self._stringexpr_() self._error('no available options') @graken() def _functiondef_(self): self._name_() self.ast['name'] = self.last_node self._token('(') with self._optional(): self._namelist_() self.ast['args'] = self.last_node self._token(')') self._cut() self._statement_() self.ast['body'] = self.last_node self.ast._define( ['name', 'args', 'body'], [] ) @graken() def _statement_(self): with self._choice(): with self._option(): self._labelstatement_() with self._option(): self._gotostatement_() with self._option(): self._switchstatement_() with self._option(): self._casestatement_() with self._option(): self._breakstatement_() with self._option(): self._autostatement_() with self._option(): self._extrnstatement_() with self._option(): self._compoundstatement_() with self._option(): self._ifstatement_() with self._option(): self._whilestatement_() with self._option(): self._returnstatement_() with self._option(): self._exprstatement_() with self._option(): self._nullstatement_() self._error('no available options') @graken() def _labelstatement_(self): with self._ifnot(): with self._group(): self._token('default') self._name_() self.ast['label'] = self.last_node self._token(':') self._statement_() self.ast['statement'] = self.last_node self.ast._define( ['label', 'statement'], [] ) @graken() def _gotostatement_(self): self._token('goto') self._cut() self._name_() self.ast['label'] = self.last_node self._token(';') self.ast._define( ['label'], [] ) @graken() def _switchstatement_(self): self._token('switch') self._cut() self._expr_() self.ast['rvalue'] = self.last_node self._cut() self._statement_() self.ast['body'] = self.last_node self.ast._define( ['rvalue', 'body'], [] ) @graken() def _casestatement_(self): with self._group(): with self._choice(): with self._option(): with self._group(): self._token('case') self._constantexpr_() self.ast['cond'] = self.last_node with self._option(): self._token('default') self._error('expecting one of: default') self._cut() self._token(':') self._statement_() self.ast['then'] = self.last_node self.ast._define( ['cond', 'then'], [] ) @graken() def _breakstatement_(self): self._token('break') self._token(';') @graken() def _autostatement_(self): self._token('auto') self._cut() self._autovar_() self.ast.setlist('@', self.last_node) def block1(): self._token(',') self._autovar_() self.ast.setlist('@', self.last_node) self._closure(block1) self._token(';') @graken() def _autovar_(self): self._name_() self.ast['name'] = self.last_node with self._optional(): self._token('[') self._constantexpr_() self.ast['maxidx'] = self.last_node self._token(']') self.ast._define( ['name', 'maxidx'], [] ) @graken() def _extrnstatement_(self): self._token('extrn') self._cut() self._namelist_() self.ast['@'] = self.last_node self._token(';') @graken() def _compoundstatement_(self): self._token('{') self._cut() def block1(): self._statement_() self._cut() self._closure(block1) self.ast['@'] = self.last_node self._token('}') @graken() def _ifstatement_(self): self._token('if') self._cut() self._token('(') self._expr_() self.ast['cond'] = self.last_node self._token(')') self._statement_() self.ast['then'] = self.last_node with self._optional(): self._token('else') self._statement_() self.ast['otherwise'] = self.last_node self.ast._define( ['cond', 'then', 'otherwise'], [] ) @graken() def _whilestatement_(self): self._token('while') self._cut() self._token('(') self._expr_() self.ast['cond'] = self.last_node self._token(')') self._statement_() self.ast['body'] = self.last_node self.ast._define( ['cond', 'body'], [] ) @graken() def _returnstatement_(self): self._token('return') self._cut() with self._optional(): self._token('(') self._expr_() self.ast['return_value'] = self.last_node self._token(')') self._token(';') self.ast._define( ['return_value'], [] ) @graken() def _exprstatement_(self): self._expr_() self.ast['@'] = self.last_node self._token(';') @graken() def _nullstatement_(self): self._token(';') @graken() def _expr_(self): self._assignexpr_() @graken() def _assignexpr_(self): self._condexpr_() self.ast['lhs'] = self.last_node with self._optional(): self._assignop_() self.ast['op'] = self.last_node self._assignexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['lhs', 'op', 'rhs'], [] ) @graken() def _assignop_(self): self._pattern(r'=([+\-/\*%&^|]|[=!]=|>[=>]?|<[=<]?)?') @graken() def _condexpr_(self): self._orexpr_() self.ast['cond'] = self.last_node with self._optional(): self._token('?') self._condexpr_() self.ast['then'] = self.last_node self._token(':') self._condexpr_() self.ast['otherwise'] = self.last_node self.ast._define( ['cond', 'then', 'otherwise'], [] ) @graken() def _orexpr_(self): self._xorexpr_() self.ast['lhs'] = self.last_node def block2(): self._ortail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _ortail_(self): self._token('|') self.ast['op'] = self.last_node self._xorexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) <|fim▁hole|> self._andexpr_() self.ast['lhs'] = self.last_node def block2(): self._xortail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _xortail_(self): self._token('^') self.ast['op'] = self.last_node self._andexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _andexpr_(self): self._eqexpr_() self.ast['lhs'] = self.last_node def block2(): self._andtail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _andtail_(self): self._token('&') self.ast['op'] = self.last_node self._eqexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _eqexpr_(self): self._relexpr_() self.ast['lhs'] = self.last_node def block2(): self._eqtail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _eqtail_(self): self._eqop_() self.ast['op'] = self.last_node self._relexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _eqop_(self): self._pattern(r'[!=]=') @graken() def _relexpr_(self): self._shiftexpr_() self.ast['lhs'] = self.last_node def block2(): self._reltail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _reltail_(self): self._relop_() self.ast['op'] = self.last_node self._shiftexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _relop_(self): self._pattern(r'[<>]={0,1}') @graken() def _shiftexpr_(self): self._addexpr_() self.ast['lhs'] = self.last_node def block2(): self._shifttail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _shifttail_(self): self._shiftop_() self.ast['op'] = self.last_node self._addexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _shiftop_(self): self._pattern(r'<<|>>') @graken() def _addexpr_(self): self._multexpr_() self.ast['lhs'] = self.last_node def block2(): self._addtail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _addtail_(self): self._addop_() self.ast['op'] = self.last_node self._multexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _addop_(self): self._pattern(r'[+-]') @graken() def _multexpr_(self): self._unaryexpr_() self.ast['lhs'] = self.last_node def block2(): self._multtail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['lhs', 'tail'], [] ) @graken() def _multtail_(self): self._multop_() self.ast['op'] = self.last_node self._unaryexpr_() self.ast['rhs'] = self.last_node self.ast._define( ['op', 'rhs'], [] ) @graken() def _multop_(self): self._pattern(r'[/%\*]') @graken() def _unaryexpr_(self): def block1(): self._leftunaryop_() self._closure(block1) self.ast['leftops'] = self.last_node self._primaryexpr_() self.ast['rhs'] = self.last_node def block4(): self._rightunaryop_() self._closure(block4) self.ast['rightops'] = self.last_node self.ast._define( ['leftops', 'rhs', 'rightops'], [] ) @graken() def _leftunaryop_(self): self._pattern(r'[\*&!\~]|--?|\+\+') @graken() def _rightunaryop_(self): with self._choice(): with self._option(): self._token('++') with self._option(): self._token('--') self._error('expecting one of: ++ --') @graken() def _primaryexpr_(self): self._primaryexprhead_() self.ast['head'] = self.last_node def block2(): self._primaryexprtail_() self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['head', 'tail'], [] ) @graken() def _primaryexprhead_(self): with self._choice(): with self._option(): self._token('(') self._expr_() self.ast['@'] = self.last_node self._token(')') with self._option(): self._builtinexpr_() with self._option(): self._variableexpr_() with self._option(): self._constantexpr_() with self._option(): self._stringexpr_() self._error('no available options') @graken() def _primaryexprtail_(self): with self._choice(): with self._option(): self._token('(') with self._optional(): self._exprlist_() self.ast['args'] = self.last_node self._token(')') with self._option(): self._token('[') self._expr_() self.ast['index'] = self.last_node self._token(']') self._error('expecting one of: (') self.ast._define( ['args', 'index'], [] ) @graken() def _variableexpr_(self): with self._ifnot(): self._builtinexpr_() self._name_() @graken() def _constantexpr_(self): with self._choice(): with self._option(): self._numericexpr_() with self._option(): self._characterexpr_() self._error('no available options') @graken() def _builtinexpr_(self): self._token('__bytes_per_word') @graken() def _numericexpr_(self): def block0(): self._NUMERIC_() self._positive_closure(block0) @graken() def _characterexpr_(self): self._token("'") def block1(): self._CHARACTERCONSTCHAR_() self._closure(block1) self.ast['@'] = self.last_node self._token("'") @graken() def _stringexpr_(self): self._token('"') def block1(): self._STRINGCONSTCHAR_() self._closure(block1) self.ast['@'] = self.last_node self._token('"') @graken() def _name_(self): self._ALPHA_() self.ast['head'] = self.last_node def block2(): with self._choice(): with self._option(): self._ALPHA_() with self._option(): self._NUMERIC_() self._error('no available options') self._closure(block2) self.ast['tail'] = self.last_node self.ast._define( ['head', 'tail'], [] ) @graken() def _ALPHA_(self): self._pattern(r'[A-Za-z_\.\b]') @graken() def _NUMERIC_(self): self._pattern(r'[0-9]') @graken() def _CHARACTERCONSTCHAR_(self): self._pattern(r"([^'\*])|(\*.)") @graken() def _STRINGCONSTCHAR_(self): self._pattern(r'([^"\*])|(\*.)') @graken() def _exprlist_(self): self._expr_() self.ast.setlist('@', self.last_node) def block1(): self._token(',') self._expr_() self.ast.setlist('@', self.last_node) self._closure(block1) @graken() def _namelist_(self): self._name_() self.ast.setlist('@', self.last_node) def block1(): self._token(',') self._name_() self.ast.setlist('@', self.last_node) self._closure(block1) class BSemantics(object): def program(self, ast): return ast def definition(self, ast): return ast def simpledef(self, ast): return ast def vectordef(self, ast): return ast def ivallist(self, ast): return ast def ival(self, ast): return ast def functiondef(self, ast): return ast def statement(self, ast): return ast def labelstatement(self, ast): return ast def gotostatement(self, ast): return ast def switchstatement(self, ast): return ast def casestatement(self, ast): return ast def breakstatement(self, ast): return ast def autostatement(self, ast): return ast def autovar(self, ast): return ast def extrnstatement(self, ast): return ast def compoundstatement(self, ast): return ast def ifstatement(self, ast): return ast def whilestatement(self, ast): return ast def returnstatement(self, ast): return ast def exprstatement(self, ast): return ast def nullstatement(self, ast): return ast def expr(self, ast): return ast def assignexpr(self, ast): return ast def assignop(self, ast): return ast def condexpr(self, ast): return ast def orexpr(self, ast): return ast def ortail(self, ast): return ast def xorexpr(self, ast): return ast def xortail(self, ast): return ast def andexpr(self, ast): return ast def andtail(self, ast): return ast def eqexpr(self, ast): return ast def eqtail(self, ast): return ast def eqop(self, ast): return ast def relexpr(self, ast): return ast def reltail(self, ast): return ast def relop(self, ast): return ast def shiftexpr(self, ast): return ast def shifttail(self, ast): return ast def shiftop(self, ast): return ast def addexpr(self, ast): return ast def addtail(self, ast): return ast def addop(self, ast): return ast def multexpr(self, ast): return ast def multtail(self, ast): return ast def multop(self, ast): return ast def unaryexpr(self, ast): return ast def leftunaryop(self, ast): return ast def rightunaryop(self, ast): return ast def primaryexpr(self, ast): return ast def primaryexprhead(self, ast): return ast def primaryexprtail(self, ast): return ast def variableexpr(self, ast): return ast def constantexpr(self, ast): return ast def builtinexpr(self, ast): return ast def numericexpr(self, ast): return ast def characterexpr(self, ast): return ast def stringexpr(self, ast): return ast def name(self, ast): return ast def ALPHA(self, ast): return ast def NUMERIC(self, ast): return ast def CHARACTERCONSTCHAR(self, ast): return ast def STRINGCONSTCHAR(self, ast): return ast def exprlist(self, ast): return ast def namelist(self, ast): return ast def main(filename, startrule, trace=False, whitespace=None, nameguard=None): import json with open(filename) as f: text = f.read() parser = BParser(parseinfo=False) ast = parser.parse( text, startrule, filename=filename, trace=trace, whitespace=whitespace, nameguard=nameguard) print('AST:') print(ast) print() print('JSON:') print(json.dumps(ast, indent=2)) print() if __name__ == '__main__': import argparse import string import sys class ListRules(argparse.Action): def __call__(self, parser, namespace, values, option_string): print('Rules:') for r in BParser.rule_list(): print(r) print() sys.exit(0) parser = argparse.ArgumentParser(description="Simple parser for B.") parser.add_argument('-l', '--list', action=ListRules, nargs=0, help="list all rules and exit") parser.add_argument('-n', '--no-nameguard', action='store_true', dest='no_nameguard', help="disable the 'nameguard' feature") parser.add_argument('-t', '--trace', action='store_true', help="output trace information") parser.add_argument('-w', '--whitespace', type=str, default=string.whitespace, help="whitespace specification") parser.add_argument('file', metavar="FILE", help="the input file to parse") parser.add_argument('startrule', metavar="STARTRULE", help="the start rule for parsing") args = parser.parse_args() main( args.file, args.startrule, trace=args.trace, whitespace=args.whitespace, nameguard=not args.no_nameguard )<|fim▁end|>
@graken() def _xorexpr_(self):
<|file_name|>MqttContext.java<|end_file_name|><|fim▁begin|>/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.iot.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * Specifies the MQTT context to use for the test authorizer request * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class MqttContext implements Serializable, Cloneable, StructuredPojo { /** * <p> * The value of the <code>username</code> key in an MQTT authorization request. * </p> */ private String username; /** * <p> * The value of the <code>password</code> key in an MQTT authorization request. * </p> */ private java.nio.ByteBuffer password; /** * <p> * The value of the <code>clientId</code> key in an MQTT authorization request. * </p> */ private String clientId; /** * <p> * The value of the <code>username</code> key in an MQTT authorization request. * </p> * * @param username * The value of the <code>username</code> key in an MQTT authorization request. */ public void setUsername(String username) { this.username = username; } /** * <p> * The value of the <code>username</code> key in an MQTT authorization request. * </p> * * @return The value of the <code>username</code> key in an MQTT authorization request. */ public String getUsername() { return this.username; } /** * <p> * The value of the <code>username</code> key in an MQTT authorization request. * </p> * * @param username * The value of the <code>username</code> key in an MQTT authorization request. * @return Returns a reference to this object so that method calls can be chained together. */ public MqttContext withUsername(String username) { setUsername(username); return this; } /** * <p> * The value of the <code>password</code> key in an MQTT authorization request. * </p> * <p> * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. * </p> * <p> * Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. * </p> * * @param password * The value of the <code>password</code> key in an MQTT authorization request. */ public void setPassword(java.nio.ByteBuffer password) { this.password = password; } /** * <p> * The value of the <code>password</code> key in an MQTT authorization request. * </p> * <p> * {@code ByteBuffer}s are stateful. Calling their {@code get} methods changes their {@code position}. We recommend * using {@link java.nio.ByteBuffer#asReadOnlyBuffer()} to create a read-only view of the buffer with an independent * {@code position}, and calling {@code get} methods on this rather than directly on the returned {@code ByteBuffer}. * Doing so will ensure that anyone else using the {@code ByteBuffer} will not be affected by changes to the * {@code position}. * </p> * * @return The value of the <code>password</code> key in an MQTT authorization request. */ public java.nio.ByteBuffer getPassword() { return this.password; } /** * <p> * The value of the <code>password</code> key in an MQTT authorization request. * </p> * <p> * The AWS SDK for Java performs a Base64 encoding on this field before sending this request to the AWS service. * Users of the SDK should not perform Base64 encoding on this field. * </p> * <p> * Warning: ByteBuffers returned by the SDK are mutable. Changes to the content or position of the byte buffer will * be seen by all objects that have a reference to this object. It is recommended to call ByteBuffer.duplicate() or * ByteBuffer.asReadOnlyBuffer() before using or reading from the buffer. This behavior will be changed in a future * major version of the SDK. * </p> * * @param password * The value of the <code>password</code> key in an MQTT authorization request. * @return Returns a reference to this object so that method calls can be chained together. */ public MqttContext withPassword(java.nio.ByteBuffer password) { setPassword(password); return this; } /** * <p> * The value of the <code>clientId</code> key in an MQTT authorization request. * </p> * * @param clientId * The value of the <code>clientId</code> key in an MQTT authorization request. */ public void setClientId(String clientId) { this.clientId = clientId; } /** * <p> * The value of the <code>clientId</code> key in an MQTT authorization request. * </p> * * @return The value of the <code>clientId</code> key in an MQTT authorization request. */ public String getClientId() { return this.clientId; } /** * <p> * The value of the <code>clientId</code> key in an MQTT authorization request. * </p> * * @param clientId * The value of the <code>clientId</code> key in an MQTT authorization request. * @return Returns a reference to this object so that method calls can be chained together. */ public MqttContext withClientId(String clientId) { setClientId(clientId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getUsername() != null) sb.append("Username: ").append(getUsername()).append(","); if (getPassword() != null) sb.append("Password: ").append(getPassword()).append(","); if (getClientId() != null) sb.append("ClientId: ").append(getClientId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof MqttContext == false) return false; MqttContext other = (MqttContext) obj; if (other.getUsername() == null ^ this.getUsername() == null) return false; if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false) return false; if (other.getPassword() == null ^ this.getPassword() == null) return false; if (other.getPassword() != null && other.getPassword().equals(this.getPassword()) == false) return false; if (other.getClientId() == null ^ this.getClientId() == null) return false; if (other.getClientId() != null && other.getClientId().equals(this.getClientId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode()); hashCode = prime * hashCode + ((getPassword() == null) ? 0 : getPassword().hashCode()); hashCode = prime * hashCode + ((getClientId() == null) ? 0 : getClientId().hashCode()); return hashCode; } @Override public MqttContext clone() {<|fim▁hole|> } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.iot.model.transform.MqttContextMarshaller.getInstance().marshall(this, protocolMarshaller); } }<|fim▁end|>
try { return (MqttContext) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
<|file_name|>TempConv.py<|end_file_name|><|fim▁begin|># TempConv.py # Celcius to Fahreinheit<|fim▁hole|>def Fahreinheit(temp): temp = float(temp) temp = (temp*9/5)+32 return temp # Fahreinheit to Celcius def Celcius(temp): temp = float(temp) temp = (temp-32)*5/9 return temp<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>import gi gi.require_version('Gtk', '3.0') from gi.repository import Gtk, Gdk import os from subprocess import call, Popen, PIPE, STDOUT class GtkPassWindow(Gtk.Window): def __init__(self): self.search_text = '' self.search_result_text = ''<|fim▁hole|> self.build_data_structures() def get_pass_path(self): self.pass_path = os.path.expanduser('~/.password-store') def build_gui(self): Gtk.Window.__init__(self, title='pass') self.set_border_width(10) self.set_default_size(300, -1) self.text_view = Gtk.Entry() self.text_view.set_editable(False) self.text_view.set_can_focus(False) self.text_entry = Gtk.Entry() self.text_entry.connect('key-release-event', self.on_key_release) self.text_entry.connect('activate', self.on_activate) self.text_entry.set_icon_from_icon_name(Gtk.EntryIconPosition.PRIMARY, 'system-search-symbolic') self.box = Gtk.Box(orientation=Gtk.Orientation.VERTICAL, spacing=6) self.box.pack_start(self.text_view, True, True, 0) self.box.pack_start(self.text_entry, True, True, 0) self.add(self.box) self.text_entry.grab_focus() def build_data_structures(self): self.pass_list = [] for root, dirs, files in os.walk(self.pass_path): for file_ in files: file_ = os.path.join(root, file_) if os.path.splitext(file_)[1] == '.gpg': pass_list_item = os.path.relpath(file_, self.pass_path) pass_list_item = os.path.splitext(pass_list_item)[0] self.pass_list.append(pass_list_item) def fuzzy_find(self): env = os.environ.copy() fzf_bin = os.path.expanduser('~/.fzf/bin') if fzf_bin not in env['PATH']: env['PATH'] += '{}:{}'.format(env['PATH'], fzf_bin) p = Popen(['fzf', '-f', self.search_text], env=env, stdin=PIPE, stdout=PIPE, stderr=STDOUT) fzf_in = '\n'.join(self.pass_list).encode('utf-8') return p.communicate(fzf_in)[0].decode().strip().split('\n') def on_key_release(self, widget, event): if event.keyval == Gdk.KEY_Escape: Gtk.main_quit() self.search_text = self.text_entry.get_text().strip() if self.search_text == '': self.search_result_text = None else: search_result = self.fuzzy_find() if search_result == []: self.search_result_text = None else: self.search_result_text = search_result[0] if self.search_result_text: self.text_view.set_text(self.search_result_text) else: self.text_view.set_text('') def on_button_release(self, widget, event): self.copy_to_clipboard() def on_activate(self, event): self.copy_to_clipboard() def copy_to_clipboard(self): if self.search_result_text: p = call(['pass', '-c', self.search_result_text]) self.text_entry.set_icon_from_icon_name( Gtk.EntryIconPosition.SECONDARY, 'edit-paste-symbolic') def main(): win = GtkPassWindow() win.connect('delete-event', Gtk.main_quit) win.show_all() Gtk.main() if __name__ == '__main__': main()<|fim▁end|>
self.get_pass_path() self.build_gui()
<|file_name|>MediaExportParameters.java<|end_file_name|><|fim▁begin|>/** * @(#)MediaExportParameters.java * * This file is part of the Non-Linear Book project. * Copyright (c) 2012-2016 Anton P. Kolosov * Authors: Anton P. Kolosov, et al. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License version 3 * as published by the Free Software Foundation with the addition of the * following permission added to Section 15 as permitted in Section 7(a): * FOR ANY PART OF THE COVERED WORK IN WHICH THE COPYRIGHT IS OWNED BY * ANTON P. KOLOSOV. ANTON P. KOLOSOV DISCLAIMS THE WARRANTY OF NON INFRINGEMENT * OF THIRD PARTY RIGHTS * * This program is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. * See the GNU Affero General Public License for more details. * You should have received a copy of the GNU Affero General Public License * along with this program; if not, see http://www.gnu.org/licenses or write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA, 02110-1301 USA. * * The interactive user interfaces in modified source and object code versions * of this program must display Appropriate Legal Notices, as required under * Section 5 of the GNU Affero General Public License. * * You can be released from the requirements of the license by purchasing * a commercial license. Buying such a license is mandatory as soon as you * develop commercial activities involving the Non-Linear Book software without * disclosing the source code of your own applications. * * For more information, please contact Anton P. Kolosov at this * address: [email protected] * * Copyright (c) 2012 Anton P. Kolosov All rights reserved. */ package com.nlbhub.nlb.domain; import com.nlbhub.nlb.api.PropertyManager; /** * The MediaExportParameters class represents parameters used when saving media files during export of the scheme * to some end format (such as INSTEAD game). * * @author Anton P. Kolosov * @version 1.0 8/9/12 */ public class MediaExportParameters { public enum Preset {CUSTOM, DEFAULT, NOCHANGE, COMPRESSED};<|fim▁hole|> new MediaExportParameters( Preset.DEFAULT, PropertyManager.getSettings().getDefaultConfig().getExport().isConvertpng2jpg(), PropertyManager.getSettings().getDefaultConfig().getExport().getQuality() ) ); private Preset m_preset = Preset.CUSTOM; private boolean m_convertPNG2JPG; private int m_quality; public static MediaExportParameters fromPreset(Preset preset) { switch (preset) { case NOCHANGE: return MediaExportParameters.NOCHANGE; case COMPRESSED: return MediaExportParameters.COMPRESSED; default: return MediaExportParameters.DEFAULT; } } public static MediaExportParameters getDefault() { return DEFAULT; } /* public MediaExportParameters(boolean convertPNG2JPG, int quality) { m_preset = Preset.CUSTOM; m_convertPNG2JPG = convertPNG2JPG; m_quality = quality; } */ private MediaExportParameters(Preset preset, boolean convertPNG2JPG, int quality) { m_preset = preset; m_convertPNG2JPG = convertPNG2JPG; m_quality = quality; } public Preset getPreset() { return m_preset; } public boolean isConvertPNG2JPG() { return m_convertPNG2JPG; } public int getQuality() { return m_quality; } }<|fim▁end|>
private static final MediaExportParameters NOCHANGE = new MediaExportParameters(Preset.NOCHANGE, false, 0); private static final MediaExportParameters COMPRESSED = new MediaExportParameters(Preset.COMPRESSED, true, 80); private static final MediaExportParameters DEFAULT = (
<|file_name|>weatherai.py<|end_file_name|><|fim▁begin|>from sklearn.ensemble import RandomForestClassifier from sklearn.datasets import make_classification import numpy as np def predictClothesGeneral(temp): dataFile = open("data.txt") data = dataFile.read() data = data.split("\n") X = [] Y = [] Y2 = [] for i in range(0,len(data) - 1): X.append([float(data[i].split(":")[1])]) Y.append(int(data[i].split(":")[3])) Y2.append(int(data[i].split(":")[4])) clf = RandomForestClassifier(n_estimators=25) clf2 = RandomForestClassifier(n_estimators=25) clf.fit(X,Y) clf2.fit(X,Y2) pants = clf.predict([[temp]]) tops = clf2.predict([[temp]]) s = "I recommend you wear a pair of " if pants == 1: s = s + "jeans" else: s = s + "khaki shorts" s = s + " and a " if tops == 1: s = s + "shirt, its a nice day out!" elif tops == 2: s = s + "sweat shirt." else: s = s + "jacket, it will be chilly today." return s def predictFromFileGeneral(fileName): fi = open(fileName) data = fi.read().split("\n") for i in range(0,len(data) - 1): data2 = data[i].split(":") print "At " + data2[1].split(",")[0] + " degrees... " + predictClothesGeneral(float(data2[1].split(",")[0])) def addToKnownList(shirt, temp): dataFile = open("userAdded.txt", 'a') dataFile.write(str(shirt + ":" + str(temp)) + '\n') def predictClothesData(temp): dataFile = open("userAdded.txt") data = dataFile.read() data = data.split("\n") X = []<|fim▁hole|> X.append([float(data[i].split(":")[1])]) Y.append(data[i].split(":")[0]) clf = RandomForestClassifier(n_estimators=25) clf.fit(X,Y) predict = clf.predict([[temp]]) return predict def predictFromFileData(fileName): fi = open(fileName) data = fi.read().split("\n") for i in range(0,len(data) - 1): data2 = data[i].split(":") print "At " + data2[1].split(",")[0] + " degrees... I would recommend a " + predictClothesData(float(data2[1].split(",")[0]))[0]<|fim▁end|>
Y = [] for i in range(0,len(data) - 1):
<|file_name|>utils.go<|end_file_name|><|fim▁begin|>package main import ( "fmt" "os" "github.com/Sirupsen/logrus" "github.com/opencontainers/runtime-spec/specs-go" "github.com/urfave/cli" ) // fatal prints the error's details if it is a libcontainer specific error type // then exits the program with an exit status of 1. func fatal(err error) { // make sure the error is written to the logger logrus.Error(err) fmt.Fprintln(os.Stderr, err) os.Exit(1) } // setupSpec performs initial setup based on the cli.Context for the container func setupSpec(context *cli.Context) (*specs.Spec, error) { bundle := context.String("bundle") if bundle != "" { if err := os.Chdir(bundle); err != nil { return nil, err } } spec, err := loadSpec(specConfig) if err != nil { return nil, err } notifySocket := os.Getenv("NOTIFY_SOCKET") if notifySocket != "" { setupSdNotify(spec, notifySocket) }<|fim▁hole|> return spec, nil }<|fim▁end|>
if os.Geteuid() != 0 { return nil, fmt.Errorf("runc should be run as root") }
<|file_name|>bitcoin_fa.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="fa" version="2.0"> <defaultcodec>UTF-8</defaultcodec> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About OMGCoin</source> <translation>در مورد OMGCoin</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;OMGCoin&lt;/b&gt; version</source> <translation>نسخه OMGCoin</translation> </message> <message> <location line="+57"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation>⏎ ⏎ این نسخه نرم افزار آزمایشی است⏎ ⏎ نرم افزار تحت لیسانس MIT/X11 منتشر شده است. به فایل coping یا آدرس http://www.opensource.org/licenses/mit-license.php. مراجعه شود⏎ ⏎ این محصول شامل نرم افزاری است که با OpenSSL برای استفاده از OpenSSL Toolkit (http://www.openssl.org/) و نرم افزار نوشته شده توسط اریک یانگ ([email protected] ) و UPnP توسط توماس برنارد طراحی شده است.</translation> </message> <message> <location filename="../aboutdialog.cpp" line="+14"/> <source>Copyright</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The OMGCoin developers</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>فهرست آدرس</translation> </message> <message> <location line="+19"/> <source>Double-click to edit address or label</source> <translation>برای ویرایش آدرس یا بر چسب دو بار کلیک کنید</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>آدرس جدید ایجاد کنید</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>آدرس انتخاب شده در سیستم تخته رسم گیره دار کپی کنید</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>آدرس جدید</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+63"/> <source>These are your OMGCoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>این آدرسها، آدرسهای omgcoin شما برای دریافت وجوه هستند. شما ممکن است آدرسهای متفاوت را به هر گیرنده اختصاص دهید که بتوانید مواردی که پرداخت می کنید را پیگیری نمایید</translation> </message> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>&amp;Copy Address</source> <translation>کپی آدرس</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>نمایش &amp;کد QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a OMGCoin address</source> <translation>پیام را برای اثبات آدرس OMGCoin خود امضا کنید</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>امضا و پیام</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>آدرس انتخاب شده در سیستم تخته رسم گیره دا حذف</translation> </message> <message> <location line="+27"/> <source>Export the data in the current tab to a file</source> <translation>داده ها نوارِ جاری را به فایل انتقال دهید</translation> </message> <message> <location line="+3"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="-44"/> <source>Verify a message to ensure it was signed with a specified OMGCoin address</source> <translation>یک پیام را برای حصول اطمینان از ورود به سیستم با آدرس omgcoin مشخص، شناسایی کنید</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>شناسایی پیام</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>حذف</translation> </message> <message> <location filename="../addressbookpage.cpp" line="-5"/> <source>These are your OMGCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>Copy &amp;Label</source> <translation>کپی و برچسب گذاری</translation> </message> <message> <location line="+1"/> <source>&amp;Edit</source> <translation>ویرایش</translation> </message> <message> <location line="+1"/> <source>Send &amp;Coins</source> <translation type="unfinished"/> </message> <message> <location line="+260"/> <source>Export Address Book Data</source> <translation>آدرس انتخاب شده در سیستم تخته رسم گیره دار کپی کنید</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Comma separated file (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>خطای صدور</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>تا فایل %1 نمی شود نوشت</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>بر چسب</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>آدرس</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>بدون برچسب</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>دیالوگ Passphrase </translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>وارد عبارت عبور</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>عبارت عبور نو</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>تکرار عبارت عبور نو</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+33"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>وارد کنید..&amp;lt;br/&amp;gt عبارت عبور نو در پنجره 10 یا بیشتر کاراکتورهای تصادفی استفاده کنید &amp;lt;b&amp;gt لطفا عبارت عبور</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>رمز بندی پنجره</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>این عملیت نیاز عبارت عبور پنجره شما دارد برای رمز گشایی آن</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>تکرار عبارت عبور نو</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>این عملیت نیاز عبارت عبور شما دارد برای رمز بندی آن</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>رمز بندی پنجره</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>تغییر عبارت عبور</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>عبارت عبور نو و قدیم در پنجره وارد کنید</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>تایید رمز گذاری</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR ZETACOINS&lt;/b&gt;!</source> <translation>هشدار: اگر wallet رمزگذاری شود و شما passphrase را گم کنید شما همه اطلاعات omgcoin را از دست خواهید داد.</translation> </message> <message> <location line="+0"/><|fim▁hole|> <source>Are you sure you wish to encrypt your wallet?</source> <translation>آیا اطمینان دارید که می خواهید wallet رمزگذاری شود؟</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation type="unfinished"/> </message> <message> <location line="+100"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>هشدار: Caps lock key روشن است</translation> </message> <message> <location line="-130"/> <location line="+58"/> <source>Wallet encrypted</source> <translation>تغییر عبارت عبور</translation> </message> <message> <location line="-56"/> <source>OMGCoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your omgcoins from being stolen by malware infecting your computer.</source> <translation>Biticon هم اکنون بسته می‌شود تا فرایند رمزگذاری را تمام کند. به خاطر داشته باشید که رمزگذاری کیف پولتان نمی‌تواند به طور کامل بیتیکون‌های شما را در برابر دزدیده شدن توسط بدافزارهایی که رایانه شما را آلوده می‌کنند، محافظت نماید.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+42"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>عبارت عبور نو و قدیم در پنجره وارد کنید</translation> </message> <message> <location line="-54"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>تنا موفق رمز بندی پنجره ناشی از خطای داخل شد. پنجره شما مرز بندی نشده است</translation> </message> <message> <location line="+7"/> <location line="+48"/> <source>The supplied passphrases do not match.</source> <translation>عبارت عبور عرضه تطابق نشد</translation> </message> <message> <location line="-37"/> <source>Wallet unlock failed</source> <translation>نجره رمز گذار شد</translation> </message> <message> <location line="+1"/> <location line="+11"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>اموفق رمز بندی پنجر</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>ناموفق رمز بندی پنجره</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>wallet passphrase با موفقیت تغییر یافت</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+233"/> <source>Sign &amp;message...</source> <translation>امضا و پیام</translation> </message> <message> <location line="+280"/> <source>Synchronizing with network...</source> <translation>همگام سازی با شبکه ...</translation> </message> <message> <location line="-349"/> <source>&amp;Overview</source> <translation>بررسی اجمالی</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>نمای کلی پنجره نشان بده</translation> </message> <message> <location line="+20"/> <source>&amp;Transactions</source> <translation>&amp;معاملات</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>نمایش تاریخ معاملات</translation> </message> <message> <location line="+7"/> <source>Edit the list of stored addresses and labels</source> <translation>ویرایش لیست آدرسها و بر چسب های ذخیره ای</translation> </message> <message> <location line="-14"/> <source>Show the list of addresses for receiving payments</source> <translation>نمایش لیست آدرس ها برای در یافت پر داخت ها</translation> </message> <message> <location line="+31"/> <source>E&amp;xit</source> <translation>خروج</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>خروج از برنامه </translation> </message> <message> <location line="+4"/> <source>Show information about OMGCoin</source> <translation>نمایش اطلاعات در مورد بیتکویین</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>درباره &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>نمایش اطلاعات درباره Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>تنظیمات...</translation> </message> <message> <location line="+6"/> <source>&amp;Encrypt Wallet...</source> <translation>رمزگذاری wallet</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>پشتیبان گیری از wallet</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>تغییر Passphrase</translation> </message> <message> <location line="+285"/> <source>Importing blocks from disk...</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Reindexing blocks on disk...</source> <translation type="unfinished"/> </message> <message> <location line="-347"/> <source>Send coins to a OMGCoin address</source> <translation>سکه ها را به آدرس bitocin ارسال کن</translation> </message> <message> <location line="+49"/> <source>Modify configuration options for OMGCoin</source> <translation>انتخابهای پیکربندی را برای omgcoin اصلاح کن</translation> </message> <message> <location line="+9"/> <source>Backup wallet to another location</source> <translation>نسخه پیشتیبان wallet را به محل دیگر انتقال دهید</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>عبارت عبور رمز گشایی پنجره تغییر کنید</translation> </message> <message> <location line="+6"/> <source>&amp;Debug window</source> <translation>اشکال زدایی از صفحه</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>کنسول اشکال زدایی و تشخیص را باز کنید</translation> </message> <message> <location line="-4"/> <source>&amp;Verify message...</source> <translation>بازبینی پیام</translation> </message> <message> <location line="-165"/> <location line="+530"/> <source>OMGCoin</source> <translation>یت کویین </translation> </message> <message> <location line="-530"/> <source>Wallet</source> <translation>wallet</translation> </message> <message> <location line="+101"/> <source>&amp;Send</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>&amp;Receive</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>&amp;Addresses</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>&amp;About OMGCoin</source> <translation>در مورد omgcoin</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;نمایش/ عدم نمایش</translation> </message> <message> <location line="+1"/> <source>Show or hide the main Window</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Encrypt the private keys that belong to your wallet</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Sign messages with your OMGCoin addresses to prove you own them</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Verify messages to ensure they were signed with specified OMGCoin addresses</source> <translation type="unfinished"/> </message> <message> <location line="+28"/> <source>&amp;File</source> <translation>فایل</translation> </message> <message> <location line="+7"/> <source>&amp;Settings</source> <translation>تنظیمات</translation> </message> <message> <location line="+6"/> <source>&amp;Help</source> <translation>کمک</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>نوار ابزار زبانه ها</translation> </message> <message> <location line="+17"/> <location line="+10"/> <source>[testnet]</source> <translation>آزمایش شبکه</translation> </message> <message> <location line="+47"/> <source>OMGCoin client</source> <translation>مشتری OMGCoin</translation> </message> <message numerus="yes"> <location line="+141"/> <source>%n active connection(s) to OMGCoin network</source> <translation><numerusform>در صد ارتباطات فعال بیتکویین با شبکه %n</numerusform></translation> </message> <message> <location line="+22"/> <source>No block source available...</source> <translation type="unfinished"/> </message> <message> <location line="+12"/> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Processed %1 blocks of transaction history.</source> <translation type="unfinished"/> </message> <message numerus="yes"> <location line="+20"/> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+4"/> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Transactions after this will not yet be visible.</source> <translation type="unfinished"/> </message> <message> <location line="+22"/> <source>Error</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+70"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation type="unfinished"/> </message> <message> <location line="-140"/> <source>Up to date</source> <translation>تا تاریخ</translation> </message> <message> <location line="+31"/> <source>Catching up...</source> <translation>ابتلا به بالا</translation> </message> <message> <location line="+113"/> <source>Confirm transaction fee</source> <translation>هزینه تراکنش را تایید کنید</translation> </message> <message> <location line="+8"/> <source>Sent transaction</source> <translation>معامله ارسال شده</translation> </message> <message> <location line="+0"/> <source>Incoming transaction</source> <translation>معامله در یافت شده</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>تاریخ %1 مبلغ%2 نوع %3 آدرس %4</translation> </message> <message> <location line="+33"/> <location line="+23"/> <source>URI handling</source> <translation>مدیریت URI</translation> </message> <message> <location line="-23"/> <location line="+23"/> <source>URI can not be parsed! This can be caused by an invalid OMGCoin address or malformed URI parameters.</source> <translation>URI قابل تحلیل نیست. این خطا ممکن است به دلیل ادرس ZETACOIN اشتباه یا پارامترهای اشتباه URI رخ داده باشد</translation> </message> <message> <location line="+17"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>زمایش شبکهه</translation> </message> <message> <location line="+8"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>زمایش شبکه</translation> </message> <message> <location filename="../bitcoin.cpp" line="+111"/> <source>A fatal error occurred. OMGCoin can no longer continue safely and will quit.</source> <translation>خطا روی داده است. OMGCoin نمی تواند بدون مشکل ادامه دهد و باید بسته شود</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+104"/> <source>Network Alert</source> <translation>پیام شبکه</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>اصلاح آدرس</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>بر چسب</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>بر چسب با دفتر آدرس ورود مرتبط است</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>آدرس</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>آدرس با دفتر آدرس ورودی مرتبط است. این فقط در مورد آدرسهای ارسال شده است</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+21"/> <source>New receiving address</source> <translation>آدرس در یافت نو</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>آدرس ارسال نو</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>اصلاح آدرس در یافت</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>اصلاح آدرس ارسال</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>%1آدرس وارد شده دیگر در دفتر آدرس است</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid OMGCoin address.</source> <translation>آدرس وارد شده %1 یک ادرس صحیح omgcoin نیست</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>رمز گشایی پنجره امکان پذیر نیست</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>کلید نسل جدید ناموفق است</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+424"/> <location line="+12"/> <source>OMGCoin-Qt</source> <translation>OMGCoin-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>نسخه</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>ستفاده :</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>انتخابها برای خطوط دستور command line</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>انتخابهای UI </translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>زبان را تنظیم کنید برای مثال &quot;de_DE&quot; (پیش فرض: system locale)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>شروع حد اقل</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>نمایش صفحه splash در STARTUP (پیش فرض:1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>اصلی</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>اصلی</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation type="unfinished"/> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>دستمزد&amp;پر داخت معامله</translation> </message> <message> <location line="+31"/> <source>Automatically start OMGCoin after logging in to the system.</source> <translation>در زمان ورود به سیستم به صورت خودکار omgcoin را اجرا کن</translation> </message> <message> <location line="+3"/> <source>&amp;Start OMGCoin on system login</source> <translation>اجرای omgcoin در زمان ورود به سیستم</translation> </message> <message> <location line="+35"/> <source>Reset all client options to default.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>&amp;Reset Options</source> <translation type="unfinished"/> </message> <message> <location line="+13"/> <source>&amp;Network</source> <translation>شبکه</translation> </message> <message> <location line="+6"/> <source>Automatically open the OMGCoin client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>اتوماتیک باز کردن بندر بیتکویین در روتر . این فقط در مواردی می باشد که روتر با کمک یو پ ن پ کار می کند</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>درگاه با استفاده از</translation> </message> <message> <location line="+7"/> <source>Connect to the OMGCoin network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>اتصال به شبکه ZETACOIN از طریق پراکسی ساکس (برای مثال وقتی از طریق نرم افزار TOR متصل می شوید)</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>اتصال با پراکسی SOCKS</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>پراکسی و آی.پی.</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>درس پروکسی</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>درگاه</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>درگاه پراکسی (مثال 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>SOCKS و نسخه</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>نسخه SOCKS از پراکسی (مثال 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>صفحه</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>tray icon را تنها بعد از کوچک کردن صفحه نمایش بده</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>حد اقل رساندن در جای نوار ابزار ها</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>حد اقل رساندن در جای خروج بر نامه وقتیکه پنجره بسته است.وقتیکه این فعال است برنامه خاموش می شود بعد از انتخاب دستور خاموش در منیو</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>کوچک کردن صفحه در زمان بستن</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>نمایش</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>میانجی کاربر و زبان</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting OMGCoin.</source> <translation>زبان میانجی کاربر می تواند در اینجا تنظیم شود. این تنظیمات بعد از شروع دوباره RESTART در ZETACOIN اجرایی خواهند بود.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>واحد برای نمایش میزان وجوه در:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>بخش فرعی پیش فرض را برای نمایش میانجی و زمان ارسال سکه ها مشخص و انتخاب نمایید</translation> </message> <message> <location line="+9"/> <source>Whether to show OMGCoin addresses in the transaction list or not.</source> <translation>تا آدرسهای bITCOIN در فهرست تراکنش نمایش داده شوند یا نشوند.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>نمایش آدرسها در فهرست تراکنش</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>تایید</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>رد</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>انجام</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+53"/> <source>default</source> <translation>پیش فرض</translation> </message> <message> <location line="+130"/> <source>Confirm options reset</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Some settings may require a client restart to take effect.</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <location line="+42"/> <location line="+9"/> <source>Warning</source> <translation>هشدار</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting OMGCoin.</source> <translation>این تنظیمات پس از اجرای دوباره OMGCoin اعمال می شوند</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>آدرس پراکسی داده شده صحیح نیست</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>تراز</translation> </message> <message> <location line="+50"/> <location line="+166"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the OMGCoin network after a connection is established, but this process has not completed yet.</source> <translation>اطلاعات نمایش داده شده روزآمد نیستند.wallet شما به صورت خودکار با شبکه omgcoin بعد از برقراری اتصال روزآمد می شود اما این فرایند هنوز کامل نشده است.</translation> </message> <message> <location line="-124"/> <source>Balance:</source> <translation>راز:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>تایید نشده</translation> </message> <message> <location line="-78"/> <source>Wallet</source> <translation>wallet</translation> </message> <message> <location line="+107"/> <source>Immature:</source> <translation>نابالغ</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>بالانس/تتمه حساب استخراج شده، نابالغ است /تکمیل نشده است</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>اخرین معاملات&amp;lt</translation> </message> <message> <location line="-101"/> <source>Your current balance</source> <translation>تزار جاری شما</translation> </message> <message> <location line="+29"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>تعداد معاملات که تایید شده ولی هنوز در تزار جاری شما بر شمار نرفته است</translation> </message> <message> <location filename="../overviewpage.cpp" line="+116"/> <location line="+1"/> <source>out of sync</source> <translation>روزآمد نشده</translation> </message> </context> <context> <name>PaymentServer</name> <message> <location filename="../paymentserver.cpp" line="+107"/> <source>Cannot start omgcoin: click-to-pay handler</source> <translation type="unfinished"/> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>دیالوگ QR CODE</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>درخواست پرداخت</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>مقدار:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>برچسب:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>پیام</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;ذخیره به عنوان...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>خطا در زمان رمزدار کردن URI در کد QR</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>میزان وجه وارد شده صحیح نیست، لطفا بررسی نمایید</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URI ذکر شده بسیار طولانی است، متن برچسب/پیام را کوتاه کنید</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>ذخیره کد QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>تصاویر با فرمت PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>نام مشتری</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+339"/> <source>N/A</source> <translation>-</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>نسخه مشتری</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>اطلاعات</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>استفاده از نسخه OPENSSL</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>زمان آغاز STARTUP</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>شبکه</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>تعداد اتصالات</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>در testnetکها</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>زنجیره بلاک</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>تعداد کنونی بلاکها</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>تعداد تخمینی بلاکها</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>زمان آخرین بلاک</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>باز کردن</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>گزینه های command-line</translation> </message> <message> <location line="+7"/> <source>Show the OMGCoin-Qt help message to get a list with possible OMGCoin command-line options.</source> <translation>پیام راهنمای OMGCoin-Qt را برای گرفتن فهرست گزینه های command-line نشان بده</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>نمایش</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>کنسول</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>ساخت تاریخ</translation> </message> <message> <location line="-104"/> <source>OMGCoin - Debug window</source> <translation>صفحه اشکال زدایی OMGCoin </translation> </message> <message> <location line="+25"/> <source>OMGCoin Core</source> <translation> هسته OMGCoin </translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>فایلِ لاگِ اشکال زدایی</translation> </message> <message> <location line="+7"/> <source>Open the OMGCoin debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>فایلِ لاگِ اشکال زدایی OMGCoin را از دایرکتوری جاری داده ها باز کنید. این عملیات ممکن است برای فایلهای لاگِ حجیم طولانی شود.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>پاکسازی کنسول</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-30"/> <source>Welcome to the OMGCoin RPC console.</source> <translation>به کنسول OMGCoin RPC خوش آمدید</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>دکمه های بالا و پایین برای مرور تاریخچه و Ctrl-L برای پاکسازی صفحه</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>با تایپ عبارت HELP دستورهای در دسترس را مرور خواهید کرد</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+124"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>ارسال سکه ها</translation> </message> <message> <location line="+50"/> <source>Send to multiple recipients at once</source> <translation>ارسال چندین در یافت ها فورا</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>اضافه کردن دریافت کننده</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>پاک کردن تمام ستون‌های تراکنش</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>پاکسازی همه</translation> </message> <message> <location line="+22"/> <source>Balance:</source> <translation>تزار :</translation> </message> <message> <location line="+10"/> <source>123.456 BTC</source> <translation>123.456 بتس</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>عملیت دوم تایید کنید</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>&amp;;ارسال</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-59"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>(%3) تا &lt;b&gt;%1&lt;/b&gt; درصد%2</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>ارسال سکه ها تایید کنید</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation> %1شما متماینید که می خواهید 1% ارسال کنید ؟</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>و</translation> </message> <message> <location line="+23"/> <source>The recipient address is not valid, please recheck.</source> <translation>آدرس گیرنده نادرست است، لطفا دوباره بررسی کنید.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>مبلغ پر داخت باید از 0 بیشتر باشد </translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>میزان وجه از بالانس/تتمه حساب شما بیشتر است</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>کل میزان وجه از بالانس/تتمه حساب شما بیشتر می شود وقتی %1 هزینه تراکنش نیز به ین میزان افزوده می شود</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>آدرس تکراری یافت شده است، در زمان انجام عملیات به هر آدرس تنها یکبار می توانید اطلاعات ارسال کنید</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>خطا: تراکنش تایید نشد. این پیام زمانی روی می دهد که مقداری از سکه های WALLET شما استفاده شده اند برای مثال اگر شما از WALLET.DAT استفاده کرده اید، ممکن است سکه ها استفاده شده باشند اما در اینجا نمایش داده نشوند</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>تراز</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>A&amp;مبلغ :</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>به&amp;پر داخت :</translation> </message> <message> <location line="+34"/> <source>The address to send the payment to (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation type="unfinished"/> </message> <message> <location line="+60"/> <location filename="../sendcoinsentry.cpp" line="+26"/> <source>Enter a label for this address to add it to your address book</source> <translation>برای آدرس بر پسب وارد کنید که در دفتر آدرس اضافه شود</translation> </message> <message> <location line="-78"/> <source>&amp;Label:</source> <translation>&amp;بر چسب </translation> </message> <message> <location line="+28"/> <source>Choose address from address book</source> <translation>اآدرسن ازدفتر آدرس انتخاب کنید</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>آدرس از تخته رسم گیره دار پست کنید </translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>بر داشتن این در یافت کننده</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a OMGCoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>آدرس بیتکویین وارد کنید (bijvoorbeeld: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>امضا - امضا کردن /شناسایی یک پیام</translation> </message> <message> <location line="+13"/> <source>&amp;Sign Message</source> <translation>&amp;امضای پیام</translation> </message> <message> <location line="+6"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>شما می توانید پیامها را با آدرس خودتان امضا نمایید تا ثابت شود متعلق به شما هستند. مواظب باشید تا چیزی که بدان مطمئن نیستنید را امضا نکنید زیرا حملات فیشینگ در زمان ورود شما به سیستم فریبنده هستند. تنها مواردی را که حاوی اطلاعات دقیق و قابل قبول برای شما هستند را امضا کنید</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>آدرس برای امضا کردن پیام با (برای مثال 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+10"/> <location line="+213"/> <source>Choose an address from the address book</source> <translation>یک آدرس را از فهرست آدرسها انتخاب کنید</translation> </message> <message> <location line="-203"/> <location line="+213"/> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <location line="-203"/> <source>Paste address from clipboard</source> <translation>آدرس از تخته رسم گیره دار پست کنید </translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>پیامی را که می‌خواهید امضا کنید در اینجا وارد کنید</translation> </message> <message> <location line="+7"/> <source>Signature</source> <translation type="unfinished"/> </message> <message> <location line="+27"/> <source>Copy the current signature to the system clipboard</source> <translation>این امضا را در system clipboard کپی کن</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this OMGCoin address</source> <translation>پیام را برای اثبات آدرس ZETACOIN خود امضا کنید</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all sign message fields</source> <translation>تنظیم دوباره تمامی فیلدهای پیام</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>پاکسازی همه</translation> </message> <message> <location line="-87"/> <source>&amp;Verify Message</source> <translation>تایید پیام</translation> </message> <message> <location line="+6"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>آدرس/پیام خود را وارد کنید (مطمئن شوید که فاصله بین خطوط، فاصله ها، تب ها و ... را دقیقا کپی می کنید) و سپس امضا کنید تا پیام تایید شود. مراقب باشید که پیام را بیشتر از مطالب درون امضا مطالعه نمایید تا فریب شخص سوم/دزدان اینترنتی را نخورید.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>آدرس برای امضا کردن پیام با (برای مثال 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified OMGCoin address</source> <translation>پیام را برای اطمنان از ورود به سیستم با آدرس ZETACOIN مشخص خود،تایید کنید</translation> </message> <message> <location line="+3"/> <source>Verify &amp;Message</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Reset all verify message fields</source> <translation>تنظیم دوباره تمامی فیلدهای پیام تایید شده</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a OMGCoin address (e.g. 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>آدرس بیتکویین وارد کنید (bijvoorbeeld: 1NS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>با کلیک بر &quot;امضای پیام&quot; شما یک امضای جدید درست می کنید</translation> </message> <message> <location line="+3"/> <source>Enter OMGCoin signature</source> <translation>امضای BITOCOIN خود را وارد کنید</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>آدرس وارد شده صحیح نیست</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>اطفا آدرس را بررسی کرده و دوباره امتحان کنید</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>آدرس وارد شده با کلید وارد شده مرتبط نیست</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>قفل کردن wallet انجام نشد</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>کلید شخصی برای آدرس وارد شده در دسترس نیست</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>پیام امضا کردن انجام نشد</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>پیام امضا شد</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>امضا نمی تواند رمزگشایی شود</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>لطفا امضا را بررسی و دوباره تلاش نمایید</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>امضا با تحلیلِ پیام مطابقت ندارد</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>عملیات شناسایی پیام انجام نشد</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>پیام شناسایی شد</translation> </message> </context> <context> <name>SplashScreen</name> <message> <location filename="../splashscreen.cpp" line="+25"/> <source>The OMGCoin developers</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>[testnet]</source> <translation>آزمایش شبکه</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+20"/> <source>Open until %1</source> <translation>باز کردن تا%1</translation> </message> <message> <location line="+6"/> <source>%1/offline</source> <translation>%1 آفلاین</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1 تایید نشده </translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>ایید %1 </translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>وضعیت</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>انتشار از طریق n% گره انتشار از طریق %n گره</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>تاریخ </translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>منبع</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>تولید شده</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>فرستنده</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>گیرنده</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>آدرس شما</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>برچسب</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>بدهی </translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>بلوغ در n% از بیشتر بلاکها بلوغ در %n از بیشتر بلاکها</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>غیرقابل قبول</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>اعتبار</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>هزینه تراکنش</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>هزینه خالص</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>پیام</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>نظر</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>شناسه کاربری برای تراکنش</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>سکه های ایجاد شده باید 120 بلاک را قبل از استفاده بالغ کنند. در هنگام ایجاد بلاک، آن بلاک در شبکه منتشر می شود تا به زنجیره بلاکها بپیوندد. اگر در زنجیره قرار نگیرد، پیام وضعیت به غیرقابل قبول تغییر می بپیابد و قابل استفاده نیست. این مورد معمولا زمانی پیش می آید که گره دیگری به طور همزمان بلاکی را با فاصل چند ثانیه ای از شما ایجاد کند.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>اشکال زدایی طلاعات</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>تراکنش</translation> </message> <message> <location line="+3"/> <source>Inputs</source> <translation>درونداد</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>مبلغ</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>صحیح</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>نادرست</translation> </message> <message> <location line="-209"/> <source>, has not been successfully broadcast yet</source> <translation>هنوز با مو فقیت ارسال نشده</translation> </message> <message numerus="yes"> <location line="-35"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+70"/> <source>unknown</source> <translation>مشخص نیست </translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>جزییات معاملات</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>در این قاب شیشه توصیف دقیق معامله نشان می شود</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+225"/> <source>Date</source> <translation>تاریخ</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>نوع</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>ایل جدا </translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>مبلغ</translation> </message> <message numerus="yes"> <location line="+57"/> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform></translation> </message> <message> <location line="+3"/> <source>Open until %1</source> <translation>از شده تا 1%1</translation> </message> <message> <location line="+3"/> <source>Offline (%1 confirmations)</source> <translation>افلایین (%1)</translation> </message> <message> <location line="+3"/> <source>Unconfirmed (%1 of %2 confirmations)</source> <translation>تایید نشده (%1/%2)</translation> </message> <message> <location line="+3"/> <source>Confirmed (%1 confirmations)</source> <translation>تایید شده (%1)</translation> </message> <message numerus="yes"> <location line="+8"/> <source>Mined balance will be available when it matures in %n more block(s)</source> <translation><numerusform>بالانس/تتمه حساب استخراج شده زمانی که %n از بیشتر بلاکها بالغ شدند در دسترس خواهد بود بالانس/تتمه حساب استخراج شده زمانی که n% از بیشتر بلاکها بالغ شدند در دسترس خواهد بود</numerusform></translation> </message> <message> <location line="+5"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>این بلوک از دیگر گره ها در یافت نشده بدین دلیل شاید قابل قابول نیست</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>تولید شده ولی قبول نشده</translation> </message> <message> <location line="+43"/> <source>Received with</source> <translation>در یافت با :</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>دریافتی از</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>ارسال به :</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>پر داخت به خودتان</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>استخراج</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(کاربرد ندارد)</translation> </message> <message> <location line="+199"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>وضعیت معالمه . عرصه که تعداد تایید نشان می دهد</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>تاریخ و ساعت در یافت معامله</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>نوع معاملات</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>آدرس مقصود معاملات </translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>مبلغ از تزار شما خارج یا وارد شده</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+52"/> <location line="+16"/> <source>All</source> <translation>همه</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>امروز</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>این هفته</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>این ماه</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>ماه گذشته</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>امسال</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>محدوده </translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>در یافت با</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>ارسال به</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>به خودتان </translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>استخراج</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>یگر </translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>برای جست‌‌وجو نشانی یا برچسب را وارد کنید</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>حد اقل مبلغ </translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>کپی آدرس </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>کپی بر چسب</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>روگرفت مقدار</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>اصلاح بر چسب</translation> </message> <message> <location line="+1"/> <source>Show transaction details</source> <translation>جزئیات تراکنش را نمایش بده</translation> </message> <message> <location line="+139"/> <source>Export Transaction Data</source> <translation>صادرات تاریخ معامله</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Comma فایل جدا </translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>تایید شده</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>تاریخ </translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>نوع </translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>ر چسب</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>ایل جدا </translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>مبلغ</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>آی دی</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>خطای صادرت</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>تا فایل %1 نمی شود نوشت</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>&gt;محدوده</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>به</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+193"/> <source>Send Coins</source> <translation>ارسال سکه ها</translation> </message> </context> <context> <name>WalletView</name> <message> <location filename="../walletview.cpp" line="+42"/> <source>&amp;Export</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Export the data in the current tab to a file</source> <translation>داده ها نوارِ جاری را به فایل انتقال دهید</translation> </message> <message> <location line="+193"/> <source>Backup Wallet</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Backup Successful</source> <translation type="unfinished"/> </message> <message> <location line="+0"/> <source>The wallet data was successfully saved to the new location.</source> <translation type="unfinished"/> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+94"/> <source>OMGCoin version</source> <translation>سخه بیتکویین</translation> </message> <message> <location line="+102"/> <source>Usage:</source> <translation>ستفاده :</translation> </message> <message> <location line="-29"/> <source>Send command to -server or omgcoind</source> <translation>ارسال فرمان به سرور یا باتکویین</translation> </message> <message> <location line="-23"/> <source>List commands</source> <translation>لیست فومان ها</translation> </message> <message> <location line="-12"/> <source>Get help for a command</source> <translation>کمک برای فرمان </translation> </message> <message> <location line="+24"/> <source>Options:</source> <translation>تنظیمات</translation> </message> <message> <location line="+24"/> <source>Specify configuration file (default: omgcoin.conf)</source> <translation>(: omgcoin.confپیش فرض: )فایل تنظیمی خاص </translation> </message> <message> <location line="+3"/> <source>Specify pid file (default: omgcoind.pid)</source> <translation>(omgcoind.pidپیش فرض : ) فایل پید خاص</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>دایرکتور اطلاعاتی خاص</translation> </message> <message> <location line="-9"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>سایز کَش بانک داده را بر حسب مگابایت تنظیم کنید (پیش فرض:25)</translation> </message> <message> <location line="-28"/> <source>Listen for connections on &lt;port&gt; (default: 8333 or testnet: 18333)</source> <translation>برای اتصالات به &lt;port&gt; (پیش‌فرض: 8333 یا تست‌نت: 18333) گوش کنید</translation> </message> <message> <location line="+5"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>حداکثر &lt;n&gt; اتصال با همکاران برقرار داشته باشید (پیش‌فرض: 125)</translation> </message> <message> <location line="-48"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>اتصال به گره برای دریافت آدرسهای قرینه و قطع اتصال</translation> </message> <message> <location line="+82"/> <source>Specify your own public address</source> <translation>آدرس عمومی خود را ذکر کنید</translation> </message> <message> <location line="+3"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>آستانه برای قطع ارتباط با همکاران بدرفتار (پیش‌فرض: 100)</translation> </message> <message> <location line="-134"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>مدت زمان به ثانیه برای جلوگیری از همکاران بدرفتار برای اتصال دوباره (پیش‌فرض: 86400)</translation> </message> <message> <location line="-29"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>در زمان تنظیم درگاه RPX %u در فهرست کردن %s اشکالی رخ داده است</translation> </message> <message> <location line="+27"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 8332 or testnet: 18332)</source> <translation>( 8332پیش فرض :) &amp;lt;poort&amp;gt; JSON-RPC شنوایی برای ارتباطات</translation> </message> <message> <location line="+37"/> <source>Accept command line and JSON-RPC commands</source> <translation>JSON-RPC قابل فرمانها و</translation> </message> <message> <location line="+76"/> <source>Run in the background as a daemon and accept commands</source> <translation>اجرای در پس زمینه به عنوان شبح و قبول فرمان ها</translation> </message> <message> <location line="+37"/> <source>Use the test network</source> <translation>استفاده شبکه آزمایش</translation> </message> <message> <location line="-112"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>پذیرش اتصالات از بیرون (پیش فرض:1 بدون پراکسی یا اتصال)</translation> </message> <message> <location line="-80"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=omgcoinrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;OMGCoin Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <location line="+17"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Cannot obtain a lock on data directory %s. OMGCoin is probably already running.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <location line="+11"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>حجم حداکثر تراکنشهای با/کم اهمیت را به بایت تنظیم کنید (پیش فرض:27000)</translation> </message> <message> <location line="+6"/> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation type="unfinished"/> </message> <message> <location line="+5"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>هشدار:paytxfee بسیار بالا تعریف شده است! این هزینه تراکنش است که باید در زمان ارسال تراکنش بپردازید</translation> </message> <message> <location line="+3"/> <source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source> <translation>هشدار: تراکنش نمایش داده شده ممکن است صحیح نباشد! شما/یا یکی از گره ها به روزآمد سازی نیاز دارید </translation> </message> <message> <location line="+3"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong OMGCoin will not work properly.</source> <translation>هشدار: لطفا زمان و تاریخ رایانه خود را تصحیح نمایید! اگر ساعت رایانه شما اشتباه باشد omgcoin ممکن است صحیح کار نکند</translation> </message> <message> <location line="+3"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation type="unfinished"/> </message> <message> <location line="+14"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Block creation options:</source> <translation>بستن گزینه ایجاد</translation> </message> <message> <location line="+5"/> <source>Connect only to the specified node(s)</source> <translation>تنها در گره (های) مشخص شده متصل شوید</translation> </message> <message> <location line="+3"/> <source>Corrupted block database detected</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>آدرس آی.پی. خود را شناسایی کنید (پیش فرض:1 در زمان when listening وno -externalip)</translation> </message> <message> <location line="+1"/> <source>Do you want to rebuild the block database now?</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error initializing block database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error loading block database</source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Error opening block database</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Error: Disk space is low!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: Wallet locked, unable to create transaction!</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Error: system error: </source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>شنیدن هر گونه درگاه انجام پذیر نیست. ازlisten=0 برای اینکار استفاده کیند.</translation> </message> <message> <location line="+1"/> <source>Failed to read block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to read block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to sync block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write block</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write file info</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write to coin database</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write transaction index</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Failed to write undo data</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>قرینه ها را برای جستجوی DNS بیاب (پیش فرض: 1 مگر در زمان اتصال)</translation> </message> <message> <location line="+1"/> <source>Generate coins (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <location line="+19"/> <source>Not enough file descriptors available.</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <location line="+16"/> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation type="unfinished"/> </message> <message> <location line="+26"/> <source>Verifying blocks...</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Verifying wallet...</source> <translation type="unfinished"/> </message> <message> <location line="-69"/> <source>Imports blocks from external blk000??.dat file</source> <translation type="unfinished"/> </message> <message> <location line="-76"/> <source>Set the number of script verification threads (up to 16, 0 = auto, &lt;0 = leave that many cores free, default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+77"/> <source>Information</source> <translation type="unfinished"/> </message> <message> <location line="+3"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>آدرس نرم افزار تور غلط است %s</translation> </message> <message> <location line="+1"/> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <location line="+8"/> <source>Maintain a full transaction index (default: 0)</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>حداکثر بافر دریافت شده بر اساس اتصال &lt;n&gt;* 1000 بایت (پیش فرض:5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>حداکثر بافر دریافت شده بر اساس اتصال &lt;n&gt;* 1000 بایت (پیش فرض:1000)</translation> </message> <message> <location line="+2"/> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>تنها =به گره ها در شبکه متصا شوید &lt;net&gt; (IPv4, IPv6 or Tor)</translation> </message> <message> <location line="+2"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>برونداد اطلاعات اشکال زدایی اضافی. گزینه های اشکال زدایی دیگر رفع شدند</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>برونداد اطلاعات اشکال زدایی اضافی برای شبکه</translation> </message> <message> <location line="+2"/> <source>Prepend debug output with timestamp</source> <translation>به خروجی اشکال‌زدایی برچسب زمان بزنید</translation> </message> <message> <location line="+5"/> <source>SSL options: (see the OMGCoin Wiki for SSL setup instructions)</source> <translation>گزینه ssl (به ویکیomgcoin برای راهنمای راه اندازی ssl مراجعه شود)</translation> </message> <message> <location line="+1"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>نسخه ای از پراکسی ساکس را برای استفاده انتخاب کنید (4-5 پیش فرض:5)</translation> </message> <message> <location line="+3"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>اطلاعات ردگیری/اشکال‌زدایی را به جای فایل لاگ اشکال‌زدایی به کنسول بفرستید</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>اطلاعات ردگیری/اشکال‌زدایی را به اشکال‌زدا بفرستید</translation> </message> <message> <location line="+5"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>حداکثر سایز بلاک بر اساس بایت تنظیم شود (پیش فرض: 250000)</translation> </message> <message> <location line="+1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>حداقل سایز بلاک بر اساس بایت تنظیم شود (پیش فرض: 0)</translation> </message> <message> <location line="+2"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>فایل debug.log را در startup مشتری کوچک کن (پیش فرض:1 اگر اشکال زدایی روی نداد)</translation> </message> <message> <location line="+1"/> <source>Signing transaction failed</source> <translation type="unfinished"/> </message> <message> <location line="+2"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>(میلی ثانیه )فاصله ارتباط خاص</translation> </message> <message> <location line="+4"/> <source>System error: </source> <translation type="unfinished"/> </message> <message> <location line="+4"/> <source>Transaction amount too small</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction amounts must be positive</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Transaction too large</source> <translation type="unfinished"/> </message> <message> <location line="+7"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>از UPnP برای شناسایی درگاه شنیداری استفاده کنید (پیش فرض:0)</translation> </message> <message> <location line="+1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>از UPnP برای شناسایی درگاه شنیداری استفاده کنید (پیش فرض:1 در زمان شنیدن)</translation> </message> <message> <location line="+1"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>برای دستیابی به سرویس مخفیانه نرم افزار تور از پراکسی استفاده کنید (پیش فرض:same as -proxy)</translation> </message> <message> <location line="+2"/> <source>Username for JSON-RPC connections</source> <translation>JSON-RPC شناسه برای ارتباطات</translation> </message> <message> <location line="+4"/> <source>Warning</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>هشدار: این نسخه قدیمی است، روزآمدسازی مورد نیاز است</translation> </message> <message> <location line="+1"/> <source>You need to rebuild the databases using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <location line="+1"/> <source>wallet.dat corrupt, salvage failed</source> <translation type="unfinished"/> </message> <message> <location line="-50"/> <source>Password for JSON-RPC connections</source> <translation>JSON-RPC عبارت عبور برای ارتباطات</translation> </message> <message> <location line="-67"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>از آدرس آی پی خاص JSON-RPC قبول ارتباطات</translation> </message> <message> <location line="+76"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>(127.0.0.1پیش فرض: ) &amp;lt;ip&amp;gt; دادن فرمانها برای استفاده گره ها روی</translation> </message> <message> <location line="-120"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>زمانی که بهترین بلاک تغییر کرد، دستور را اجرا کن (%s در cmd با block hash جایگزین شده است)</translation> </message> <message> <location line="+147"/> <source>Upgrade wallet to latest format</source> <translation>wallet را به جدیدترین فرمت روزآمد کنید</translation> </message> <message> <location line="-21"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation> (100پیش فرض:)&amp;lt;n&amp;gt; گذاشتن اندازه کلید روی </translation> </message> <message> <location line="-12"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>اسکان مجدد زنجیر بلوکها برای گم والت معامله</translation> </message> <message> <location line="+35"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>JSON-RPCبرای ارتباطات استفاده کنید OpenSSL (https)</translation> </message> <message> <location line="-26"/> <source>Server certificate file (default: server.cert)</source> <translation> (server.certپیش فرض: )گواهی نامه سرور</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>(server.pemپیش فرض: ) کلید خصوصی سرور</translation> </message> <message> <location line="-151"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>رمز های قابل قبول( TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+165"/> <source>This help message</source> <translation>پیام کمکی</translation> </message> <message> <location line="+6"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>امکان اتصال به %s از این رایانه وجود ندارد ( bind returned error %d, %s)</translation> </message> <message> <location line="-91"/> <source>Connect through socks proxy</source> <translation>اتصال از طریق پراکسی ساکس</translation> </message> <message> <location line="-10"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>به DNS اجازه بده تا برای addnode ، seednode و اتصال جستجو کند</translation> </message> <message> <location line="+55"/> <source>Loading addresses...</source> <translation>بار گیری آدرس ها</translation> </message> <message> <location line="-35"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>خطا در بارگیری wallet.dat: کیف پول خراب شده است</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat: Wallet requires newer version of OMGCoin</source> <translation>خطا در بارگیری wallet.dat: کیف پول به ویرایش جدیدتری از Biticon نیاز دارد</translation> </message> <message> <location line="+93"/> <source>Wallet needed to be rewritten: restart OMGCoin to complete</source> <translation>سلام</translation> </message> <message> <location line="-95"/> <source>Error loading wallet.dat</source> <translation>خطا در بارگیری wallet.dat</translation> </message> <message> <location line="+28"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>آدرس پراکسی اشتباه %s</translation> </message> <message> <location line="+56"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>شبکه مشخص شده غیرقابل شناسایی در onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>نسخه پراکسی ساکس غیرقابل شناسایی درخواست شده است: %i</translation> </message> <message> <location line="-96"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>آدرس قابل اتصال- شناسایی نیست %s</translation> </message> <message> <location line="+1"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>آدرس خارجی قابل اتصال- شناسایی نیست %s</translation> </message> <message> <location line="+44"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>میزان وجه اشتباه برای paytxfee=&lt;میزان وجه&gt;: %s</translation> </message> <message> <location line="+1"/> <source>Invalid amount</source> <translation>میزان وجه اشتباه</translation> </message> <message> <location line="-6"/> <source>Insufficient funds</source> <translation>بود جه نا کافی </translation> </message> <message> <location line="+10"/> <source>Loading block index...</source> <translation>بار گیری شاخص بلوک</translation> </message> <message> <location line="-57"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>به اتصال یک گره اضافه کنید و اتصال را باز نگاه دارید</translation> </message> <message> <location line="-25"/> <source>Unable to bind to %s on this computer. OMGCoin is probably already running.</source> <translation>اتصال به %s از این رایانه امکان پذیر نیست. OMGCoin احتمالا در حال اجراست.</translation> </message> <message> <location line="+64"/> <source>Fee per KB to add to transactions you send</source> <translation>پر داجت برای هر کیلو بیت برای اضافه به معامله ارسال</translation> </message> <message> <location line="+19"/> <source>Loading wallet...</source> <translation>بار گیری والت</translation> </message> <message> <location line="-52"/> <source>Cannot downgrade wallet</source> <translation>امکان تنزل نسخه در wallet وجود ندارد</translation> </message> <message> <location line="+3"/> <source>Cannot write default address</source> <translation>آدرس پیش فرض قابل ذخیره نیست</translation> </message> <message> <location line="+64"/> <source>Rescanning...</source> <translation>اسکان مجدد</translation> </message> <message> <location line="-57"/> <source>Done loading</source> <translation>بار گیری انجام شده است</translation> </message> <message> <location line="+82"/> <source>To use the %s option</source> <translation>برای استفاده از %s از انتخابات</translation> </message> <message> <location line="-74"/> <source>Error</source> <translation>خطا</translation> </message> <message> <location line="-31"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>%s، شما باید یک rpcpassword را در فایل پیکربندی تنظیم کنید :⏎%s⏎ اگر فایل ایجاد نشد، یک فایل فقط متنی ایجاد کنید. </translation> </message> </context> </TS><|fim▁end|>
<|file_name|>ProcessApplicationAttachments.java<|end_file_name|><|fim▁begin|>/* * Copyright © 2013-2018 camunda services GmbH and various authors ([email protected]) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.camunda.bpm.container.impl.jboss.deployment.marker; import java.util.List; import org.camunda.bpm.application.AbstractProcessApplication; import org.camunda.bpm.application.impl.metadata.spi.ProcessesXml; import org.camunda.bpm.container.impl.jboss.util.ProcessesXmlWrapper; import org.jboss.as.ee.component.ComponentDescription; import org.jboss.as.server.deployment.AttachmentKey; import org.jboss.as.server.deployment.AttachmentList; import org.jboss.as.server.deployment.DeploymentUnit; import org.jboss.jandex.AnnotationInstance; /** * * @author Daniel Meyer * */ public class ProcessApplicationAttachments { private static final AttachmentKey<Boolean> MARKER = AttachmentKey.create(Boolean.class); private static final AttachmentKey<Boolean> PART_OF_MARKER = AttachmentKey.create(Boolean.class); private static final AttachmentKey<AttachmentList<ProcessesXmlWrapper>> PROCESSES_XML_LIST = AttachmentKey.createList(ProcessesXmlWrapper.class); private static final AttachmentKey<ComponentDescription> PA_COMPONENT = AttachmentKey.create(ComponentDescription.class); private static final AttachmentKey<AnnotationInstance> POST_DEPLOY_METHOD = AttachmentKey.create(AnnotationInstance.class); private static final AttachmentKey<AnnotationInstance> PRE_UNDEPLOY_METHOD = AttachmentKey.create(AnnotationInstance.class); /** * Attach the parsed ProcessesXml file to a deployment unit. * */ public static void addProcessesXml(DeploymentUnit unit, ProcessesXmlWrapper processesXmlWrapper) { unit.addToAttachmentList(PROCESSES_XML_LIST, processesXmlWrapper); } /** * Returns the attached {@link ProcessesXml} marker or null; * */ public static List<ProcessesXmlWrapper> getProcessesXmls(DeploymentUnit deploymentUnit) {<|fim▁hole|> } /** * marks a a {@link DeploymentUnit} as a process application */ public static void mark(DeploymentUnit unit) { unit.putAttachment(MARKER, Boolean.TRUE); } /** * marks a a {@link DeploymentUnit} as part of a process application */ public static void markPartOfProcessApplication(DeploymentUnit unit) { if(unit.getParent() != null && unit.getParent() != unit) { unit.getParent().putAttachment(PART_OF_MARKER, Boolean.TRUE); } } /** * return true if the deployment unit is either itself a process * application or part of a process application. */ public static boolean isPartOfProcessApplication(DeploymentUnit unit) { if(isProcessApplication(unit)) { return true; } if(unit.getParent() != null && unit.getParent() != unit) { return unit.getParent().hasAttachment(PART_OF_MARKER); } return false; } /** * Returns true if the {@link DeploymentUnit} itself is a process application (carries a processes.xml) * */ public static boolean isProcessApplication(DeploymentUnit deploymentUnit) { return deploymentUnit.hasAttachment(MARKER); } /** * Returns the {@link ComponentDescription} for the {@link AbstractProcessApplication} component */ public static ComponentDescription getProcessApplicationComponent(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachment(PA_COMPONENT); } /** * Attach the {@link ComponentDescription} for the {@link AbstractProcessApplication} component */ public static void attachProcessApplicationComponent(DeploymentUnit deploymentUnit, ComponentDescription componentDescription){ deploymentUnit.putAttachment(PA_COMPONENT, componentDescription); } /** * Attach the {@link AnnotationInstance}s for the PostDeploy methods */ public static void attachPostDeployDescription(DeploymentUnit deploymentUnit, AnnotationInstance annotation){ deploymentUnit.putAttachment(POST_DEPLOY_METHOD, annotation); } /** * Attach the {@link AnnotationInstance}s for the PreUndeploy methods */ public static void attachPreUndeployDescription(DeploymentUnit deploymentUnit, AnnotationInstance annotation){ deploymentUnit.putAttachment(PRE_UNDEPLOY_METHOD, annotation); } /** * @return the description of the PostDeploy method */ public static AnnotationInstance getPostDeployDescription(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachment(POST_DEPLOY_METHOD); } /** * @return the description of the PreUndeploy method */ public static AnnotationInstance getPreUndeployDescription(DeploymentUnit deploymentUnit) { return deploymentUnit.getAttachment(PRE_UNDEPLOY_METHOD); } private ProcessApplicationAttachments() { } }<|fim▁end|>
return deploymentUnit.getAttachmentList(PROCESSES_XML_LIST);
<|file_name|>empty.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>/// don't applying any check to the passed data. /// use std::collections::HashMap; use std::sync::Arc; use futures::future::lazy; use crate::engine::middleware::base::{Middleware, MiddlewareFuture}; use crate::engine::serializer::JsonMessage; use crate::rabbitmq::RabbitMQContext; /// A middleware that used for reverse proxy for cases when /// not necessary to do validating tokens or permissions. pub struct EmptyMiddleware; impl EmptyMiddleware { pub fn new() -> EmptyMiddleware { EmptyMiddleware {} } } impl Middleware for EmptyMiddleware { /// Returns an empty future which is doesn't doing anything. fn process_request(&self, _message: JsonMessage, _rabbitmq_context: Arc<RabbitMQContext>) -> MiddlewareFuture { Box::new(lazy(move || Ok(HashMap::new()))) } }<|fim▁end|>
/// The following module contains the simple middleware that