text
stringlengths
2
99.9k
meta
dict
{ "images" : [ { "idiom" : "iphone", "size" : "29x29", "scale" : "2x" }, { "idiom" : "iphone", "size" : "40x40", "scale" : "2x" }, { "idiom" : "iphone", "size" : "60x60", "scale" : "2x" }, { "idiom" : "ipad", "size" : "29x29", "scale" : "1x" }, { "idiom" : "ipad", "size" : "29x29", "scale" : "2x" }, { "idiom" : "ipad", "size" : "40x40", "scale" : "1x" }, { "idiom" : "ipad", "size" : "40x40", "scale" : "2x" }, { "idiom" : "ipad", "size" : "76x76", "scale" : "1x" }, { "idiom" : "ipad", "size" : "76x76", "scale" : "2x" } ], "info" : { "version" : 1, "author" : "xcode" } }
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Sep 17 2017 16:24:48). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2015 by Steve Nygard. // #import <MMCommon/WXPBGeneratedMessage.h> @interface SmcSelfMonitorItem : WXPBGeneratedMessage { } + (void)initialize; // Remaining properties @property(nonatomic) unsigned int action; // @dynamic action; @property(nonatomic) unsigned int collectdate; // @dynamic collectdate; @property(nonatomic) unsigned int datacount; // @dynamic datacount; @property(nonatomic) unsigned int dataid; // @dynamic dataid; @property(nonatomic) unsigned int expand1; // @dynamic expand1; @property(nonatomic) unsigned int expand2; // @dynamic expand2; @property(nonatomic) unsigned int reportdate; // @dynamic reportdate; @property(nonatomic) unsigned int uuid; // @dynamic uuid; @end
{ "pile_set_name": "Github" }
MIIF4AoBAKCCBdkwggXVBgkrBgEFBQcwAQEEggXGMIIFwjCBmaIWBBTrlwecTarB yVdbHxANRLCFYj1mqBgPMjAxMjEwMTExMTI1MjVaMG4wbDBEMAkGBSsOAwIaBQAE FLdXtbacB/gWIxOOkMkqDr4yAaoxBBRge2YaRQ2XyolQL30EzTSo//z9SwILBAAA AAABL07hRxCAABgPMjAxMjEwMDEwNjAwMDBaoBEYDzIwMTMwNDE1MDYwMDAwWjAL BgkqhkiG9w0BAQUDggEBAHThkPoy6eA7qX9y5C5b1ElRSwdjzsd15OJSqP2yjQbS Ol1K8DWtX0UhTfRH+CrIPoWL40g2HjXtIVeMD6s3hakYimZUenIJ/IRRSVWp+EXU MewgTVPz/wJN/9dJIkSbOI/BmpIGlaaBaLwcb39nJjZMq0sXj8jRI5i0isotOAFz Zc0R20viBEH099KuGktB2fKKEpVbbWPljTxKzkIBs9SXZBIqd/X2MWzQWcLKzhL0 oynkvqxTFqNVjjZKcKSXPS/XEUufLrv/E3xQZYAfTJr778kFkyA8JzrXiH6W5DX6 UbqsnO5DaPZvMDfvlQWETkoS1j+Qgu2mIWzdiw7sPrOgggQQMIIEDDCCBAgwggLw oAMCAQICCwQAAAAAAThXovYBMA0GCSqGSIb3DQEBBQUAMFcxCzAJBgNVBAYTAkJF MRkwFwYDVQQKExBHbG9iYWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRsw GQYDVQQDExJHbG9iYWxTaWduIFJvb3QgQ0EwHhcNMTIwNzA1MTgwMDAwWhcNMTMw NzA1MTgwMDAwWjBZMQswCQYDVQQGEwJCRTEZMBcGA1UEChMQR2xvYmFsU2lnbiBu di1zYTEvMC0GA1UEAxMmR2xvYmFsU2lnbiBPQ1NQIGZvciBSb290IFIxIC0gQnJh bmNoIDEwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDP2QF8p0+Fb7ID MwwD1gEr2oazjqbW28EZr3YEyMPk+7VFaGePSO1xjBGIE48Q7m7d6p6ZXCzlBZEi oudrHSr3WDqdIVKLDrZIDkgEgdjJE72Hq6Pf5CEGXyebbODm4sV96EfewSvOOYLL 866g3aoVhLDK02ny+Q5OsokW7nhnmGMMh10tZqR5VmdQTiw8MgeqUxBEaEO4WH2J ltgSsgNJBNBYuDgnn5ryzVqhvmCJvYZMYeN6qZFKy1MgHcR+wEpGLPlRL4ttu6e5 MJrVta7dVFobHUHoFog97LtQT1PY0Ubaihswjge5O04bYeCrgSSjr1e4xH/KDxRw yyhoscaFAgMBAAGjgdIwgc8wDgYDVR0PAQH/BAQDAgeAMB0GA1UdDgQWBBTqlwec TarByVdbHxANRLCFYj1mqDBMBgNVHSAERTBDMEEGCSsGAQQBoDIBXzA0MDIGCCsG AQUFBwIBFiZodHRwczovL3d3dy5nbG9iYWxzaWduLmNvbS9yZXBvc2l0b3J5LzAJ BgNVHRMEAjAAMBMGA1UdJQQMMAoGCCsGAQUFBwMJMB8GA1UdIwQYMBaAFGB7ZhpF DZfKiVAvfQTNNKj//P1LMA8GCSsGAQUFBzABBQQCBQAwDQYJKoZIhvcNAQEFBQAD ggEBAHiC6N1uF29d7CmiVapA8Nr1xLSVeIkBd4A8yHsUTQ7ATI7bwT14QUV4awe7 8cvmO5ZND8YG1ViwN162WFm9ivSoWBzvWDbU2JhQFb+XzrzCcdn0YbNiTxJh/vYm uDuxto00dpBgujSOAQv8B90iDEJ+sZpYRzDRj62qStRey0zpq5eX+pA+gdppMUFb 4QvJf0El8TbLCWLN4TjrFe6ju7ZaN9zmgVYGQ2fMHKIGNScLuIA950nYwzRkIfHa YW6HqP1rCR1EiYmstEeCQyDxJx+RUlh+q8L1BKzaMYhS6s63MZzQuGseYStaCmbC fBIRKjnK621vAWvc7UR+0hqnZ+U=
{ "pile_set_name": "Github" }
function _extends() { _extends = Object.assign || function (target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i]; for (var key in source) { if (Object.prototype.hasOwnProperty.call(source, key)) { target[key] = source[key]; } } } return target; }; return _extends.apply(this, arguments); } import CalendarLocale from "rc-picker/es/locale/es_ES"; import TimePickerLocale from '../../time-picker/locale/es_ES'; // Merge into a locale object var locale = { lang: _extends({ placeholder: 'Seleccionar fecha', rangePlaceholder: ['Fecha inicial', 'Fecha final'] }, CalendarLocale), timePickerLocale: _extends({}, TimePickerLocale) }; // All settings at: // https://github.com/ant-design/ant-design/blob/master/components/date-picker/locale/example.json export default locale;
{ "pile_set_name": "Github" }
/* * Copyright (C) 2017 Álvaro Fernández Rojas <[email protected]> * * SPDX-License-Identifier: GPL-2.0+ */ /dts-v1/; #include "brcm,bcm6358.dtsi" / { model = "Huawei EchoLife HG556a"; compatible = "huawei,hg556a", "brcm,bcm6358"; aliases { serial0 = &uart0; }; chosen { stdout-path = "serial0:115200n8"; }; gpio-leds { compatible = "gpio-leds"; message_red { label = "HG556a:red:message"; gpios = <&gpio0 0 GPIO_ACTIVE_LOW>; }; hspa_red { label = "HG556a:red:hspa"; gpios = <&gpio0 1 GPIO_ACTIVE_LOW>; }; dsl_red { label = "HG556a:red:dsl"; gpios = <&gpio0 2 GPIO_ACTIVE_LOW>; }; power_red { label = "HG556a:red:power"; gpios = <&gpio0 3 GPIO_ACTIVE_LOW>; }; all_red { label = "HG556a:red:all"; gpios = <&gpio0 6 GPIO_ACTIVE_LOW>; }; lan1_green { label = "HG556a:green:lan1"; gpios = <&gpio0 12 GPIO_ACTIVE_LOW>; }; lan1_red { label = "HG556a:red:lan1"; gpios = <&gpio0 13 GPIO_ACTIVE_LOW>; }; lan2_green { label = "HG556a:green:lan2"; gpios = <&gpio0 15 GPIO_ACTIVE_LOW>; }; lan2_red { label = "HG556a:red:lan2"; gpios = <&gpio0 22 GPIO_ACTIVE_LOW>; }; lan3_green { label = "HG556a:green:lan3"; gpios = <&gpio0 23 GPIO_ACTIVE_LOW>; }; lan3_red { label = "HG556a:red:lan3"; gpios = <&gpio0 26 GPIO_ACTIVE_LOW>; }; lan4_green { label = "HG556a:green:lan4"; gpios = <&gpio0 27 GPIO_ACTIVE_LOW>; }; lan4_red { label = "HG556a:red:lan4"; gpios = <&gpio0 28 GPIO_ACTIVE_LOW>; }; }; }; &gpio0 { status = "okay"; }; &pflash { status = "okay"; }; &uart0 { u-boot,dm-pre-reloc; status = "okay"; };
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <wodefinitions> <wo class="ERD2WPick" wocomponentcontent="false"> <binding name="dataSource"/> <binding name="list"/> <binding name="selectedObjects"/> <binding name="action"/> <binding name="displayKeys"/> <binding name="entityName"/> <binding name="pageConfiguration"/> <binding name="branchDelegate"/> </wo> </wodefinitions>
{ "pile_set_name": "Github" }
class MoveStackingId < ActiveRecord::Migration[5.1] class StackSubscription < ApplicationRecord self.table_name = 'katello_subscriptions' has_many :pools, :class_name => "StackPool", :inverse_of => :subscription, :dependent => :destroy, :foreign_key => 'subscription_id' end class StackPool < ApplicationRecord self.table_name = 'katello_pools' belongs_to :subscription, :inverse_of => :pools, :class_name => "StackSubscription" end def up add_column :katello_pools, :stacking_id, :string StackSubscription.find_each do |sub| sub.pools.update_all(:stacking_id => sub.stacking_id) end remove_column :katello_subscriptions, :stacking_id end def down fail ActiveRecord::IrreversibleMigration end end
{ "pile_set_name": "Github" }
// Copyright (c) 2013 Blake Gentry. All rights reserved. Use of // this source code is governed by an MIT license that can be // found in the LICENSE file. package heroku import ( "bytes" "encoding/json" "errors" "fmt" "io" "log" "net/http" "net/http/httputil" "os" "reflect" "runtime" "strings" "github.com/heroku/hk/Godeps/_workspace/src/code.google.com/p/go-uuid/uuid" ) const ( Version = "0.10.2" DefaultAPIURL = "https://api.heroku.com" DefaultUserAgent = "heroku-go/" + Version + " (" + runtime.GOOS + "; " + runtime.GOARCH + ")" ) // A Client is a Heroku API client. Its zero value is a usable client that uses // default settings for the Heroku API. The Client has an internal HTTP client // (HTTP) which defaults to http.DefaultClient. // // As with all http.Clients, this Client's Transport has internal state (cached // HTTP connections), so Clients should be reused instead of created as needed. // Clients are safe for use by multiple goroutines. type Client struct { // HTTP is the Client's internal http.Client, handling HTTP requests to the // Heroku API. HTTP *http.Client // The URL of the Heroku API to communicate with. Defaults to // "https://api.heroku.com". URL string // Username is the HTTP basic auth username for API calls made by this Client. Username string // Password is the HTTP basic auth password for API calls made by this Client. Password string // UserAgent to be provided in API requests. Set to DefaultUserAgent if not // specified. UserAgent string // Debug mode can be used to dump the full request and response to stdout. Debug bool // AdditionalHeaders are extra headers to add to each HTTP request sent by // this Client. AdditionalHeaders http.Header // Path to the Unix domain socket or a running heroku-agent. HerokuAgentSocket string } func (c *Client) Get(v interface{}, path string) error { return c.APIReq(v, "GET", path, nil) } func (c *Client) Patch(v interface{}, path string, body interface{}) error { return c.APIReq(v, "PATCH", path, body) } func (c *Client) Post(v interface{}, path string, body interface{}) error { return c.APIReq(v, "POST", path, body) } func (c *Client) Put(v interface{}, path string, body interface{}) error { return c.APIReq(v, "PUT", path, body) } func (c *Client) Delete(path string) error { return c.APIReq(nil, "DELETE", path, nil) } // Generates an HTTP request for the Heroku API, but does not // perform the request. The request's Accept header field will be // set to: // // Accept: application/vnd.heroku+json; version=3 // // The Request-Id header will be set to a random UUID. The User-Agent header // will be set to the Client's UserAgent, or DefaultUserAgent if UserAgent is // not set. // // The type of body determines how to encode the request: // // nil no body // io.Reader body is sent verbatim // else body is encoded as application/json func (c *Client) NewRequest(method, path string, body interface{}) (*http.Request, error) { var ctype string var rbody io.Reader switch t := body.(type) { case nil: case string: rbody = bytes.NewBufferString(t) case io.Reader: rbody = t default: v := reflect.ValueOf(body) if !v.IsValid() { break } if v.Type().Kind() == reflect.Ptr { v = reflect.Indirect(v) if !v.IsValid() { break } } j, err := json.Marshal(body) if err != nil { log.Fatal(err) } rbody = bytes.NewReader(j) ctype = "application/json" } apiURL := strings.TrimRight(c.URL, "/") if apiURL == "" { apiURL = DefaultAPIURL } req, err := http.NewRequest(method, apiURL+path, rbody) if err != nil { return nil, err } // If we're talking to heroku-agent over a local Unix socket, downgrade to // HTTP; heroku-agent will establish a secure connection between itself and // the Heroku API. if c.HerokuAgentSocket != "" { req.URL.Scheme = "http" } req.Header.Set("Accept", "application/vnd.heroku+json; version=3") req.Header.Set("Request-Id", uuid.New()) useragent := c.UserAgent if useragent == "" { useragent = DefaultUserAgent } req.Header.Set("User-Agent", useragent) if ctype != "" { req.Header.Set("Content-Type", ctype) } req.SetBasicAuth("", c.Password) for k, v := range c.AdditionalHeaders { req.Header[k] = v } return req, nil } // Sends a Heroku API request and decodes the response into v. As // described in NewRequest(), the type of body determines how to // encode the request body. As described in DoReq(), the type of // v determines how to handle the response body. func (c *Client) APIReq(v interface{}, meth, path string, body interface{}) error { req, err := c.NewRequest(meth, path, body) if err != nil { return err } return c.DoReq(req, v) } // Submits an HTTP request, checks its response, and deserializes // the response into v. The type of v determines how to handle // the response body: // // nil body is discarded // io.Writer body is copied directly into v // else body is decoded into v as json // func (c *Client) DoReq(req *http.Request, v interface{}) error { if c.Debug { dump, err := httputil.DumpRequestOut(req, true) if err != nil { log.Println(err) } else { os.Stderr.Write(dump) os.Stderr.Write([]byte{'\n', '\n'}) } } httpClient := c.HTTP if httpClient == nil { httpClient = http.DefaultClient } res, err := httpClient.Do(req) if err != nil { return err } defer res.Body.Close() if c.Debug { dump, err := httputil.DumpResponse(res, true) if err != nil { log.Println(err) } else { os.Stderr.Write(dump) os.Stderr.Write([]byte{'\n'}) } } if err = checkResp(res); err != nil { return err } switch t := v.(type) { case nil: case io.Writer: _, err = io.Copy(t, res.Body) default: err = json.NewDecoder(res.Body).Decode(v) } return err } // An Error represents a Heroku API error. type Error struct { error Id string URL string } type errorResp struct { Message string Id string URL string `json:"url"` } func checkResp(res *http.Response) error { if res.StatusCode/100 != 2 { // 200, 201, 202, etc var e errorResp err := json.NewDecoder(res.Body).Decode(&e) if err != nil { return errors.New("Unexpected error: " + res.Status) } return Error{error: errors.New(e.Message), Id: e.Id, URL: e.URL} } if msg := res.Header.Get("X-Heroku-Warning"); msg != "" { fmt.Fprintln(os.Stderr, strings.TrimSpace(msg)) } return nil } type ListRange struct { Field string Max int Descending bool FirstId string LastId string } func (lr *ListRange) SetHeader(req *http.Request) { var hdrval string if lr.Field != "" { hdrval += lr.Field + " " } hdrval += lr.FirstId + ".." + lr.LastId if lr.Max != 0 { hdrval += fmt.Sprintf("; max=%d", lr.Max) if lr.Descending { hdrval += ", " } } if lr.Descending { hdrval += ", order=desc" } req.Header.Set("Range", hdrval) return }
{ "pile_set_name": "Github" }
--- title: BaselineSavedDate Property, Project [vbapj.chm131584] keywords: vbapj.chm131584 f1_keywords: - vbapj.chm131584 ms.prod: office ms.assetid: 7b50ec09-aad4-4358-9f70-54d594aea861 ms.date: 06/08/2017 localization_priority: Normal --- # BaselineSavedDate Property, Project [vbapj.chm131584] Hi there! You have landed on one of our F1 Help redirector pages. Please select the topic you were looking for below. [Project.BaselineSavedDate Property (Project)](https://msdn.microsoft.com/library/780c5190-68bb-1c10-0dbb-612e5606184e%28Office.15%29.aspx) [Task.CPI Property (Project)](https://msdn.microsoft.com/library/91988461-bcde-3b5a-d051-71596b76597e%28Office.15%29.aspx) [!include[Support and feedback](~/includes/feedback-boilerplate.md)]
{ "pile_set_name": "Github" }
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* ***** BEGIN LICENSE BLOCK ***** * Version: MPL 1.1/GPL 2.0/LGPL 2.1 * * The contents of this file are subject to the Mozilla Public License Version * 1.1 (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * http://www.mozilla.org/MPL/ * * Software distributed under the License is distributed on an "AS IS" basis, * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License * for the specific language governing rights and limitations under the * License. * * The Original Code is Mozilla Communicator client code. * * The Initial Developer of the Original Code is * Netscape Communications Corporation. * Portions created by the Initial Developer are Copyright (C) 1998 * the Initial Developer. All Rights Reserved. * * Contributor(s): * * Alternatively, the contents of this file may be used under the terms of * either of the GNU General Public License Version 2 or later (the "GPL"), * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), * in which case the provisions of the GPL or the LGPL are applicable instead * of those above. If you wish to allow use of your version of this file only * under the terms of either the GPL or the LGPL, and not to allow others to * use your version of this file under the terms of the MPL, indicate your * decision by deleting the provisions above and replace them with the notice * and other provisions required by the GPL or the LGPL. If you do not delete * the provisions above, a recipient may use your version of this file under * the terms of any one of the MPL, the GPL or the LGPL. * * ***** END LICENSE BLOCK ***** */ #ifndef nsIHTMLContentSink_h___ #define nsIHTMLContentSink_h___ /** * This file declares the concrete HTMLContentSink class. * This class is used during the parsing process as the * primary interface between the parser and the content * model. * * After the tokenizer completes, the parser iterates over * the known token list. As the parser identifies valid * elements, it calls the contentsink interface to notify * the content model that a new node or child node is being * created and added to the content model. * * The HTMLContentSink interface assumes 4 underlying * containers: HTML, HEAD, BODY and FRAMESET. Before * accessing any these, the parser will call the appropriate * OpennsIHTMLContentSink method: OpenHTML,OpenHead,OpenBody,OpenFrameSet; * likewise, the ClosensIHTMLContentSink version will be called when the * parser is done with a given section. * * IMPORTANT: The parser may Open each container more than * once! This is due to the irregular nature of HTML files. * For example, it is possible to encounter plain text at * the start of an HTML document (that preceeds the HTML tag). * Such text is treated as if it were part of the body. * In such cases, the parser will Open the body, pass the text- * node in and then Close the body. The body will likely be * re-Opened later when the actual <BODY> tag has been seen. * * Containers within the body are Opened and Closed * using the OpenContainer(...) and CloseContainer(...) calls. * It is assumed that the document or contentSink is * maintaining its state to manage where new content should * be added to the underlying document. * * NOTE: OpenHTML() and OpenBody() may get called multiple times * in the same document. That's fine, and it doesn't mean * that we have multiple bodies or HTML's. * * NOTE: I haven't figured out how sub-documents (non-frames) * are going to be handled. Stay tuned. */ #include "nsIParserNode.h" #include "nsIContentSink.h" #include "nsHTMLTags.h" // d19e6730-5e2f-4131-89db-8a918515097d #define NS_IHTML_CONTENT_SINK_IID \ { 0xd19e6730, 0x5e2f, 0x4131, \ { 0x89, 0xdb, 0x8a, 0x91, 0x85, 0x15, 0x09, 0x7d } } #if defined(XP_MAC) #define MAX_REFLOW_DEPTH 75 //setting to 75 to prevent layout from crashing on mac. Bug 55095. #else #define MAX_REFLOW_DEPTH 200 //windows and linux (etc) can do much deeper structures. #endif class nsIHTMLContentSink : public nsIContentSink { public: NS_DECLARE_STATIC_IID_ACCESSOR(NS_IHTML_CONTENT_SINK_IID) /** * This method is used to open the HEAD container. It is useful if a tag * is forcing us to open the head (probably again), like if we find a <meta> * tag in the body. */ NS_IMETHOD OpenHead() = 0; /** * This gets called when handling illegal contents, especially * in dealing with tables. This method creates a new context. * * @update 04/04/99 harishd * @param aPosition - The position from where the new context begins. */ NS_IMETHOD BeginContext(PRInt32 aPosition) = 0; /** * This method terminates any new context that got created by * BeginContext and switches back to the main context. * * @update 04/04/99 harishd * @param aPosition - Validates the end of a context. */ NS_IMETHOD EndContext(PRInt32 aPosition) = 0; /** * @update 01/09/2003 harishd * @param aTag - Check if this tag is enabled or not. */ NS_IMETHOD IsEnabled(PRInt32 aTag, PRBool* aReturn) = 0; /** * This method is called when parser has * completed processing a chunk of tokens. The processing of the * tokens may be interrupted by returning NS_ERROR_HTMLPARSER_INTERRUPTED from * DidProcessAToken. */ NS_IMETHOD DidProcessTokens() = 0; /** * This method is called when parser is about to * process a single token */ NS_IMETHOD WillProcessAToken(void) = 0; /** * This method is called when parser has completed * the processing for a single token. * @return NS_OK if processing should not be interrupted * NS_ERROR_HTMLPARSER_INTERRUPTED if the parsing should be interrupted */ NS_IMETHOD DidProcessAToken(void) = 0; /** * This method is used to open a generic container in the sink. * * @update 4/1/98 gess * @param nsIParserNode reference to parser node interface */ NS_IMETHOD OpenContainer(const nsIParserNode& aNode) = 0; /** * This method gets called by the parser when a close * container tag has been consumed and needs to be closed. * * @param aTag - The tag to be closed. */ NS_IMETHOD CloseContainer(const nsHTMLTag aTag) = 0; /** * This method is used when we're closing a tag that was malformed * in some way. This way, the content sink can do special processing * (e.g., not execute a malformed script tag). * * @param aTag The tag to be closed. */ NS_IMETHOD CloseMalformedContainer(const nsHTMLTag aTag) { return CloseContainer(aTag); } /** * This gets called by the parser when you want to add * a leaf node to the current container in the content * model. * * @update 4/1/98 gess * @param nsIParserNode reference to parser node interface */ NS_IMETHOD AddLeaf(const nsIParserNode& aNode) = 0; /** * This gets called by the parser when you want to add * a leaf node to the current container in the content * model. * * @update 4/1/98 gess * @param nsIParserNode reference to parser node interface */ NS_IMETHOD AddComment(const nsIParserNode& aNode) = 0; /** * This gets called by the parser when you want to add * a leaf node to the current container in the content * model. * * @update 4/1/98 gess * @param nsIParserNode reference to parser node interface */ NS_IMETHOD AddProcessingInstruction(const nsIParserNode& aNode) = 0; /** * This method is called by the parser when it encounters * a document type declaration. * * XXX Should the parser also parse the internal subset? * * @param nsIParserNode reference to parser node interface */ NS_IMETHOD AddDocTypeDecl(const nsIParserNode& aNode) = 0; /** * This gets called by the parser to notify observers of * the tag * * @param aErrorResult the error code */ NS_IMETHOD NotifyTagObservers(nsIParserNode* aNode) = 0; /** * Call this method to determnine if a FORM is on the sink's stack * * @return PR_TRUE if found else PR_FALSE */ NS_IMETHOD_(PRBool) IsFormOnStack() = 0; }; NS_DEFINE_STATIC_IID_ACCESSOR(nsIHTMLContentSink, NS_IHTML_CONTENT_SINK_IID) #endif /* nsIHTMLContentSink_h___ */
{ "pile_set_name": "Github" }
#!/bin/sh # # Recipe for creating the pcp-dstat archive # . $PCP_DIR/etc/pcp.env here=`pwd` tmp=/tmp/$$ rm -rf $tmp PMLOGCONF=$PCP_BINADM_DIR/pmlogconf PMLOGGER=$PCP_BINADM_DIR/pmlogger PMSLEEP=$PCP_BINADM_DIR/pmsleep MKAF=$PCP_BINADM_DIR/mkaf if which curl >/dev/null 2>&1 then : else echo "Arrgh, curl binary is apparently not installed" exit 1 fi trap "rm -rf $tmp; exit" 0 1 2 3 15 mkdir -p $tmp/config cp $PCP_VAR_DIR/config/pmlogconf/tools/dstat* $tmp/config # create an empty pmlogconf configuration echo "#pmlogconf 2.0" > $tmp.config echo "#+ groupdir $tmp/config" >> $tmp.config # interactive - set 1 second interval, and log everything! $PMLOGCONF -d $tmp/config $tmp.config rm -f pcp-dstat.* $PMLOGGER -t 1 -s 5 -c $tmp.config -l $tmp.log pcp-dstat & # # Do some work to make kernel stats move ... # # misc net traffic curl http://localhost:80/status >/dev/null 2>&1 $PMSLEEP 0.2 curl http://localhost:80/status >/dev/null 2>&1 $PMSLEEP 0.8 curl http://www.google.com/ >/dev/null 2>&1 $PMSLEEP 0.5 # some disk I/O and cpu time find /var/log >/dev/null 2>&1 & $PMSLEEP 0.05 sum /usr/bin/bash >/dev/null & $PMSLEEP 1.5 sum /usr/bin/ls >/dev/null & wait echo "pmlogger log:" cat $tmp.log xz pcp-dstat.0
{ "pile_set_name": "Github" }
// Copyright 2020 The Grin Developers // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Build hooks to spit out version+build time info extern crate built; fn main() { let mut opts = built::Options::default(); opts.set_dependencies(true); let manifest_location = std::env::var("CARGO_MANIFEST_DIR").unwrap(); let dst = std::path::Path::new(&std::env::var("OUT_DIR").unwrap()).join("built.rs"); built::write_built_file_with_opts(&opts, manifest_location.as_ref(), &dst) .expect("Failed to acquire build-time information"); }
{ "pile_set_name": "Github" }
/* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ package mozilla.lockbox.uiTests import androidx.test.rule.ActivityTestRule import androidx.test.ext.junit.runners.AndroidJUnit4 import mozilla.lockbox.robots.fingerprintDialog import mozilla.lockbox.robots.uiComponents import mozilla.lockbox.view.UITestActivity import org.junit.Ignore import org.junit.Rule import org.junit.Test import org.junit.runner.RunWith /** * This is an example test to demonstrate a test activity and some UI Components from the main app. */ @RunWith(AndroidJUnit4::class) open class FingerprintDialogTest { @Rule @JvmField val activityRule = ActivityTestRule(UITestActivity::class.java) @Ignore("619-ui-tests-bitrise (#620)") @Test fun testLaunchRobot() { uiComponents { launchFingerprintDialog() } fingerprintDialog { exists() // touchFingerprint() // does not work. tapCancel() } uiComponents { exists() } } @Ignore("619-ui-tests-bitrise (#620)") @Test fun testLaunchEnableFingerprintRobot() { uiComponents { launchEnableFingerprintDialog() } fingerprintDialog { exists() // touchFingerprint() // does not work. tapCancel() } uiComponents { exists() } } }
{ "pile_set_name": "Github" }
package com.slack.api.app_backend.events.payload; import com.slack.api.model.event.StarAddedEvent; import lombok.Data; import java.util.List; @Data public class StarAddedPayload implements EventsApiPayload<StarAddedEvent> { private String token; private String enterpriseId; private String teamId; private String apiAppId; private String type; private List<String> authedUsers; private List<String> authedTeams; private String eventId; private Integer eventTime; private StarAddedEvent event; }
{ "pile_set_name": "Github" }
# ****************************************************************************** # Copyright 2017-2018 Intel Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ****************************************************************************** import logging import os import re import sys from nlp_architect.data.cdc_resources.data_types.wiki.wikipedia_page import WikipediaPage from nlp_architect.data.cdc_resources.data_types.wiki.wikipedia_page_extracted_relations import ( WikipediaPageExtractedRelations, ) from nlp_architect.data.cdc_resources.wikipedia.wiki_search_page_result import ( WikipediaSearchPageResult, ) from nlp_architect.utils.text import SpacyInstance os.environ["PYWIKIBOT_NO_USER_CONFIG"] = "1" DISAMBIGUATE_PAGE = ["wikimedia disambiguation page", "wikipedia disambiguation page"] NAME_DESCRIPTIONS = ["given name", "first name", "family name"] logger = logging.getLogger(__name__) class WikiOnline(object): def __init__(self): try: import pywikibot except (AttributeError, ImportError): logger.error( "pywikibot is not installed, please install nlp_architect with [all] package. " + "for example: pip install nlp_architect[all]" ) sys.exit() self.spacy = SpacyInstance() self.pywikibot = pywikibot self.cache = dict() self.site = pywikibot.Site("en", "wikipedia") # The site we want to run our bot on def get_pages(self, phrase): if phrase in self.cache: return self.cache[phrase] ret_pages = set() word_clean = phrase.replace("-", " ") word_lower = word_clean.lower() word_upper = word_clean.upper() word_title = word_clean.title() words_set = {phrase, word_clean, word_lower, word_upper, word_title} for appr in words_set: try: page_result = self.get_page_redirect(appr) if page_result.pageid != 0: full_page = self.get_wiki_page_with_items(phrase, page_result) ret_pages.add(WikipediaSearchPageResult(appr, full_page)) except Exception as e: logger.error(e) self.cache[phrase] = ret_pages return ret_pages # pylint: disable=protected-access def get_wiki_page_with_items(self, phrase, page): item = self.get_wiki_page_item(page) pageid = page.pageid aliases = self.get_aliases(item) description = self.get_description(item) text = page.text page_title = page._link._title relations = WikipediaPageExtractedRelations() relations.is_disambiguation = self.is_disambiguation_page(item) relations.is_part_name = self.is_name_description(text, item, relations.is_disambiguation) relations.aliases = aliases relations.be_comp, relations.be_comp_norm = self.extract_be_comp(text) relations.extract_relations_from_text_v0(text) ret_page = WikipediaPage(phrase, None, page_title, None, 0, pageid, description, relations) logger.debug("Page: {}. Extracted successfully".format(ret_page)) return ret_page def get_wiki_page_item(self, page): if page is not None: try: item = self.pywikibot.ItemPage.fromPage( page ) # this can be used for any page object item.get() # need to call it to access any data. return item except (self.pywikibot.NoPage, AttributeError, TypeError, NameError): pass return None def get_page_redirect(self, word): page = self.pywikibot.Page(self.site, word) if page.pageid != 0 and page.isRedirectPage(): return page.getRedirectTarget() return page @staticmethod def get_aliases(item): if item is not None and item.aliases is not None: if "en" in item.aliases: aliases = item.aliases["en"] return aliases return None @staticmethod def get_description(item): description = {} if item is not None: item_desc = item.get() if "desctiptions" in item_desc and "en" in item_desc["descriptions"]: dict([("age", 25)]) description["descriptions"] = dict([("en", item_desc["descriptions"]["en"])]) return description @staticmethod def is_disambiguation_page(item): if item is not None: dic = item.get() if dic is not None and "descriptions" in dic: desc = dic["descriptions"] if desc is not None and "en" in desc: return desc["en"].lower() in DISAMBIGUATE_PAGE return False @staticmethod def is_name_description(text, item, is_disambiguation): if item is not None: if is_disambiguation: if WikipediaPageExtractedRelations.is_name_part(text): return True else: dic = item.get() if dic is not None and "descriptions" in dic: desc = dic["descriptions"] if desc is not None and "en" in desc: if [s for s in NAME_DESCRIPTIONS if s in desc["en"].lower()]: return True return False # pylint: disable=no-else-return def extract_be_comp(self, text): first_sentence_start_index = text.index("'''") if first_sentence_start_index >= 0: last_temp_index = text.find("\n", first_sentence_start_index) if last_temp_index == -1: last_temp_index = len(text) first_paragraph = text[first_sentence_start_index:last_temp_index] if WikiOnline.extract_be_a_index(first_paragraph) == -1 and last_temp_index != len(text): return self.extract_be_comp(text[last_temp_index:]) elif last_temp_index == len(text): return None, None first_paragraph_clean = re.sub(r"\([^)]*\)", "", first_paragraph) first_paragraph_clean = re.sub(r"<[^>]*>", "", first_paragraph_clean) first_paragraph_clean = re.sub(r"{[^}]*}", "", first_paragraph_clean) first_paragraph_clean = re.sub(r"\[\[[^]]*\]\]", "", first_paragraph_clean) first_paragraph_clean = re.sub(r"[\']", "", first_paragraph_clean) first_paragraph_clean = re.sub(r"&nbsp;", " ", first_paragraph_clean) return self.extract_be_comp_relations(first_paragraph_clean) # pylint: disable=not-callable def extract_be_comp_relations(self, first_paragraph): be_comp = set() be_comp_norm = set() if first_paragraph: doc = self.spacy.parser(first_paragraph) for token in doc: target = token.text target_lemma = token.lemma_ relation = token.dep_ governor = token.head.text governor_lemma = token.head.lemma_ if relation == "acl": break if relation == "punct" and target == ".": break elif relation == "cop": be_comp.add(governor) be_comp_norm.add(governor_lemma) elif relation == "nsubj": be_comp.add(target) be_comp_norm.add(target_lemma) elif relation == "dep": be_comp.add(governor) be_comp_norm.add(governor_lemma) elif relation == "compound": be_comp.add(target + " " + governor) be_comp_norm.add(target_lemma + " " + governor_lemma) elif relation == "amod": be_comp.add(target + " " + governor) be_comp_norm.add(target_lemma + " " + governor_lemma) elif relation in ["conj", "appos"]: be_comp.add(target) be_comp_norm.add(target_lemma) return be_comp, be_comp_norm @staticmethod def extract_be_a_index(sentence): result = None if "is a" in sentence: result = sentence.index("is a") elif "are a" in sentence: result = sentence.index("are a") elif "was a" in sentence: result = sentence.index("was a") elif "were a" in sentence: result = sentence.index("were a") elif "be a" in sentence: result = sentence.index("be a") elif "is the" in sentence: result = sentence.index("is the") elif "are the" in sentence: result = sentence.index("are the") elif "was the" in sentence: result = sentence.index("was the") elif "were the" in sentence: result = sentence.index("were the") elif "be the" in sentence: result = sentence.index("be the") return result
{ "pile_set_name": "Github" }
# This file is distributed under the same license as the Django package. # # Translators: # Filip Dupanović <[email protected]>, 2011. msgid "" msgstr "" "Project-Id-Version: Django\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2012-03-23 02:35+0100\n" "PO-Revision-Date: 2012-03-08 10:42+0000\n" "Last-Translator: Filip Dupanović <[email protected]>\n" "Language-Team: Bosnian (http://www.transifex.com/projects/p/django/language/" "bs/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: bs\n" "Plural-Forms: nplurals=3; plural=(n%10==1 && n%100!=11 ? 0 : n%10>=2 && n" "%10<=4 && (n%100<10 || n%100>=20) ? 1 : 2);\n" #: static/admin/js/SelectFilter2.js:45 #, c-format msgid "Available %s" msgstr "Dostupno %s" #: static/admin/js/SelectFilter2.js:46 #, c-format msgid "" "This is the list of available %s. You may choose some by selecting them in " "the box below and then clicking the \"Choose\" arrow between the two boxes." msgstr "" #: static/admin/js/SelectFilter2.js:53 #, c-format msgid "Type into this box to filter down the list of available %s." msgstr "" #: static/admin/js/SelectFilter2.js:57 msgid "Filter" msgstr "Filter" #: static/admin/js/SelectFilter2.js:61 msgid "Choose all" msgstr "Odaberi sve" #: static/admin/js/SelectFilter2.js:61 #, c-format msgid "Click to choose all %s at once." msgstr "" #: static/admin/js/SelectFilter2.js:67 msgid "Choose" msgstr "" #: static/admin/js/SelectFilter2.js:69 msgid "Remove" msgstr "Ukloni" #: static/admin/js/SelectFilter2.js:75 #, c-format msgid "Chosen %s" msgstr "Odabrani %s" #: static/admin/js/SelectFilter2.js:76 #, c-format msgid "" "This is the list of chosen %s. You may remove some by selecting them in the " "box below and then clicking the \"Remove\" arrow between the two boxes." msgstr "" #: static/admin/js/SelectFilter2.js:80 msgid "Remove all" msgstr "" #: static/admin/js/SelectFilter2.js:80 #, c-format msgid "Click to remove all chosen %s at once." msgstr "" #: static/admin/js/actions.js:18 static/admin/js/actions.min.js:1 msgid "%(sel)s of %(cnt)s selected" msgid_plural "%(sel)s of %(cnt)s selected" msgstr[0] "Izabran %(sel)s od %(cnt)s" msgstr[1] "Izabrano %(sel)s od %(cnt)s" msgstr[2] "Izabrano %(sel)s od %(cnt)s" #: static/admin/js/actions.js:109 static/admin/js/actions.min.js:5 msgid "" "You have unsaved changes on individual editable fields. If you run an " "action, your unsaved changes will be lost." msgstr "" "Imate nespašene izmjene na pojedinim uređenim poljima. Ako pokrenete ovu " "akciju, te izmjene će biti izgubljene." #: static/admin/js/actions.js:121 static/admin/js/actions.min.js:6 msgid "" "You have selected an action, but you haven't saved your changes to " "individual fields yet. Please click OK to save. You'll need to re-run the " "action." msgstr "" #: static/admin/js/actions.js:123 static/admin/js/actions.min.js:6 msgid "" "You have selected an action, and you haven't made any changes on individual " "fields. You're probably looking for the Go button rather than the Save " "button." msgstr "" #: static/admin/js/calendar.js:26 msgid "" "January February March April May June July August September October November " "December" msgstr "" #: static/admin/js/calendar.js:27 msgid "S M T W T F S" msgstr "" #: static/admin/js/collapse.js:8 static/admin/js/collapse.js.c:19 #: static/admin/js/collapse.min.js:1 msgid "Show" msgstr "" #: static/admin/js/collapse.js:15 static/admin/js/collapse.min.js:1 msgid "Hide" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:49 #: static/admin/js/admin/DateTimeShortcuts.js:85 msgid "Now" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:53 msgid "Clock" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:81 msgid "Choose a time" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:86 msgid "Midnight" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:87 msgid "6 a.m." msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:88 msgid "Noon" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:92 #: static/admin/js/admin/DateTimeShortcuts.js:204 msgid "Cancel" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:144 #: static/admin/js/admin/DateTimeShortcuts.js:197 msgid "Today" msgstr "Danas" #: static/admin/js/admin/DateTimeShortcuts.js:148 msgid "Calendar" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:195 msgid "Yesterday" msgstr "" #: static/admin/js/admin/DateTimeShortcuts.js:199 msgid "Tomorrow" msgstr ""
{ "pile_set_name": "Github" }
export { default } from 'ember-bootstrap/components/bs-modal/footer';
{ "pile_set_name": "Github" }
{ "$and": [ { "device/custom_data/string_qwerty": { "$starts_with": "qwert" } }, { "device/custom_data/string_qwerty": { "$starts_with": "QWERT" } }, { "device/custom_data/string_qwerty": { "$starts_with": "qwerty" } }, { "device/custom_data/string_qwerty": { "$starts_with": "QWERTY" } }, { "$not": { "device/custom_data/string_qwerty": { "$starts_with": "werty" } } }, { "$not": { "device/custom_data/string_qwerty": { "$starts_with": "WERTY" } } }, { "$not": { "device/custom_data/string_qwerty": { "$starts_with": "foo" } } }, { "$not": { "device/custom_data/string_qwerty": { "$starts_with": null } } }, { "$not": { "device/custom_data/string_qwerty": { "$starts_with": 5 } } }, { "$not": { "device/custom_data/string_qwerty": { "$starts_with": { "_type": "version", "version": "1.0.0" } } } }, { "$not": { "device/custom_data/number_5": { "$starts_with": "five" } } }, { "$not": { "device/custom_data/number_5": { "$starts_with": 5 } } }, { "$not": { "device/custom_data/key_with_null_value": { "$starts_with": null } } }, { "$not": { "device/custom_data/key_with_null_value": { "$starts_with": "" } } }, { "$not": { "device/custom_data/key_with_null_value": { "$starts_with": 5 } } }, { "$not": { "device/custom_data/non_existent_key": { "$starts_with": 5 } } }, { "$not": { "device/custom_data/non_existent_key": { "$starts_with": "" } } } ] }
{ "pile_set_name": "Github" }
import ntn_input import ntn import scipy.io as sio import numpy as np data_number = 0 #0 - Wordnet, 1 - Freebase if data_number == 0: data_name = 'Wordnet' else: data_name = 'Freebase' embedding_size = 100 data_path = 'data\\'+data_name output_path = 'data\\output\\'+data_name+'\\' entities_string='/entities.txt' relations_string='/relations.txt' embeds_string='/initEmbed.mat' training_string='/train.txt' test_string='/test.txt' dev_string='/dev.txt' def load_entities(data_path=data_path): entities_file = open(data_path+entities_string) entities_list = entities_file.read().strip().split('\n') entities_file.close() return entities_list def load_relations(data_path=data_path): relations_file = open(data_path+relations_string) relations_list = relations_file.read().strip().split('\n') relations_file.close() return relations_list def load_init_embeds(data_path=data_path): embeds_path = data_path+embeds_string return load_embeds(embeds_path) def load_embeds(file_path): mat_contents = sio.loadmat(file_path) words = mat_contents['words'] we = mat_contents['We'] tree = mat_contents['tree'] word_vecs = [[we[j][i] for j in range(embedding_size)] for i in range(len(words[0]))] entity_words = [map(int, tree[i][0][0][0][0][0]) for i in range(len(tree))] return (word_vecs,entity_words) def load_training_data(data_path=data_path): training_file = open(data_path+training_string) training_data = [line.split('\t') for line in training_file.read().strip().split('\n')] return np.array(training_data) def load_dev_data(data_path=data_path): #print data_path+dev_string dev_file = open(data_path+dev_string) dev_data = [line.split('\t') for line in dev_file.read().strip().split('\n')] return np.array(dev_data) def load_test_data(data_path=data_path): test_file = open(data_path+test_string) test_data = [line.split('\t') for line in test_file.read().strip().split('\n')] return np.array(test_data) def data_to_indexed(data, entities, relations): entity_to_index = {entities[i] : i for i in range(len(entities))} relation_to_index = {relations[i] : i for i in range(len(relations))} indexed_data = [(entity_to_index[data[i][0]], relation_to_index[data[i][1]],\ entity_to_index[data[i][2]], float(data[i][3])) for i in range(len(data))] return indexed_data #dataset is in the form (e1, R, e2, label) def data_to_relation_sets(data_batch, num_relations): batches = [[] for i in range(num_relations)] labels = [[] for i in range(num_relations)] for e1,r,e2,label in data_batch: batches[r].append((e1,e2,1)) labels[r].append([label]) return (batches, labels)
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package http2; import java.io.IOException; import java.io.PrintWriter; import javax.servlet.ServletException; import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import javax.servlet.http.PushBuilder; public class SimpleImagePush extends HttpServlet { private static final long serialVersionUID = 1L; @Override protected void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException { resp.setCharacterEncoding("UTF-8"); resp.setContentType("text/html"); PrintWriter pw = resp.getWriter(); PushBuilder pb = req.newPushBuilder(); if (pb != null) { pb.path("servlets/images/code.gif"); pb.push(); pw.println("<html>"); pw.println("<body>"); pw.println("<p>The following image was provided via a push request.</p>"); pw.println("<img src=\"" + req.getContextPath() + "/servlets/images/code.gif\"/>"); pw.println("</body>"); pw.println("</html>"); pw.flush(); } else { pw.println("<html>"); pw.println("<body>"); pw.println("<p>Server push requests are not supported by this protocol.</p>"); pw.println("</body>"); pw.println("</html>"); } } }
{ "pile_set_name": "Github" }
#!/usr/bin/env python # -*- coding: utf-8 -*- import json from alipay.aop.api.response.AlipayResponse import AlipayResponse from alipay.aop.api.domain.PromoItemInfo import PromoItemInfo class KoubeiMarketingCampaignItemBatchqueryResponse(AlipayResponse): def __init__(self): super(KoubeiMarketingCampaignItemBatchqueryResponse, self).__init__() self._items = None @property def items(self): return self._items @items.setter def items(self, value): if isinstance(value, list): self._items = list() for i in value: if isinstance(i, PromoItemInfo): self._items.append(i) else: self._items.append(PromoItemInfo.from_alipay_dict(i)) def parse_response_content(self, response_content): response = super(KoubeiMarketingCampaignItemBatchqueryResponse, self).parse_response_content(response_content) if 'items' in response: self.items = response['items']
{ "pile_set_name": "Github" }
/** * Copyright (C) 2010-2018 Gordon Fraser, Andrea Arcuri and EvoSuite * contributors * * This file is part of EvoSuite. * * EvoSuite is free software: you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published * by the Free Software Foundation, either version 3.0 of the License, or * (at your option) any later version. * * EvoSuite is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with EvoSuite. If not, see <http://www.gnu.org/licenses/>. */ package com.examples.with.different.packagename.inheritance; /** * Created by Andrea Arcuri on 16/07/15. */ public class TheSuperClass { public void foo(){ System.out.println("ASuperClass"); } }
{ "pile_set_name": "Github" }
- if @registries.empty? .panel.panel-default .panel-heading h5 | Registries .panel-body p No registry configured. Please configure one. = link_to "Create registry", new_admin_registry_path, {class: 'btn btn-primary', role: 'button'} - else .panel.panel-default .panel-heading h5 | Registries .panel-body .table-responsive table.table.table-stripped.table-hover col.col-40 col.col-40 col.col-10 col.col-10 thead tr th Name th Hostname th SSL th tbody - @registries.each do |registry| tr id="registry_#{registry.id}" td= registry.name td= registry.hostname td i.fa.fa-lg class="fa-toggle-#{registry.use_ssl? ? 'on': 'off'}" title="SSL in the comunication between Portus and this registry is #{registry.use_ssl? ? "enabled" : "disabled"}" td a href="#{edit_admin_registry_path(registry.id)}" Edit .alert.alert-info strong Note well: | right now Portus is designed to handle only a single private Registry.
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <PropertyGroup> <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> <Platform Condition=" '$(Platform)' == '' ">AnyCPU</Platform> <ProjectTypeGuids>{786C830F-07A1-408B-BD7F-6EE04809D6DB};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids> <ProjectGuid>{EE919791-32BE-472B-A111-4A171BC65FA0}</ProjectGuid> <OutputType>Library</OutputType> <RootNamespace>FormsNativeVideoPlayer</RootNamespace> <AssemblyName>FormsNativeVideoPlayer</AssemblyName> <TargetFrameworkVersion>v4.5</TargetFrameworkVersion> <TargetFrameworkProfile>Profile78</TargetFrameworkProfile> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' "> <DebugSymbols>true</DebugSymbols> <DebugType>full</DebugType> <Optimize>false</Optimize> <OutputPath>bin\Debug</OutputPath> <DefineConstants>DEBUG;</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>false</ConsolePause> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' "> <DebugType>full</DebugType> <Optimize>true</Optimize> <OutputPath>bin\Release</OutputPath> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>false</ConsolePause> </PropertyGroup> <ItemGroup> <Compile Include="FormsNativeVideoPlayer.cs" /> <Compile Include="Properties\AssemblyInfo.cs" /> <Compile Include="LandingPage.cs" /> </ItemGroup> <Import Project="$(MSBuildExtensionsPath32)\Microsoft\Portable\$(TargetFrameworkVersion)\Microsoft.Portable.CSharp.targets" /> <Import Project="..\packages\Xamarin.Forms.1.4.2.6359\build\portable-win+net45+wp80+win81+wpa81+MonoAndroid10+MonoTouch10+Xamarin.iOS10\Xamarin.Forms.targets" Condition="Exists('..\packages\Xamarin.Forms.1.4.2.6359\build\portable-win+net45+wp80+win81+wpa81+MonoAndroid10+MonoTouch10+Xamarin.iOS10\Xamarin.Forms.targets')" /> <ItemGroup> <Reference Include="Xamarin.Forms.Core"> <HintPath>..\packages\Xamarin.Forms.1.4.2.6359\lib\portable-win+net45+wp80+win81+wpa81+MonoAndroid10+MonoTouch10+Xamarin.iOS10\Xamarin.Forms.Core.dll</HintPath> </Reference> <Reference Include="Xamarin.Forms.Xaml"> <HintPath>..\packages\Xamarin.Forms.1.4.2.6359\lib\portable-win+net45+wp80+win81+wpa81+MonoAndroid10+MonoTouch10+Xamarin.iOS10\Xamarin.Forms.Xaml.dll</HintPath> </Reference> <Reference Include="Xamarin.Forms.Platform"> <HintPath>..\packages\Xamarin.Forms.1.4.2.6359\lib\portable-win+net45+wp80+win81+wpa81+MonoAndroid10+MonoTouch10+Xamarin.iOS10\Xamarin.Forms.Platform.dll</HintPath> </Reference> </ItemGroup> <ItemGroup> <None Include="packages.config" /> </ItemGroup> </Project>
{ "pile_set_name": "Github" }
import debug_me # noqa from flask import Flask from flask import render_template app = Flask(__name__) @app.route("/") def home(): content = "Flask-Jinja-Test" print("break here") # @bphome return render_template("hello.html", title="Hello", content=content) @app.route("/handled") def bad_route_handled(): try: raise ArithmeticError("Hello") # @exc_handled except Exception: pass return render_template("hello.html", title="Hello", content="Flask-Jinja-Test") @app.route("/unhandled") def bad_route_unhandled(): raise ArithmeticError("Hello") # @exc_unhandled return render_template("hello.html", title="Hello", content="Flask-Jinja-Test") @app.route("/badtemplate") def bad_template(): return render_template("bad.html", title="Hello", content="Flask-Jinja-Test") @app.route("/exit") def exit_app(): from flask import request func = request.environ.get("werkzeug.server.shutdown") if func is None: raise RuntimeError("No shutdown") func() return "Done"
{ "pile_set_name": "Github" }
// Copyright (c) 2016 Uber Technologies, Inc. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. package zap import "time" func timeToMillis(t time.Time) int64 { return t.UnixNano() / int64(time.Millisecond) }
{ "pile_set_name": "Github" }
/* Firewall Builder Copyright (C) 2001 NetCitadel, LLC Author: Vadim Kurland [email protected] $Id$ This program is free software which we release under the GNU General Public License. You may redistribute and/or modify this program under the terms of that license as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. To get a copy of the GNU General Public License, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "InterfaceData.h" #include "fwbuilder/Resources.h" #include "fwbuilder/InetAddr.h" #include "fwbuilder/IPv4.h" #include "fwbuilder/IPv6.h" #include <iostream> using namespace libfwbuilder; using namespace std; InterfaceData::InterfaceData() : addr_mask() { ext = false; isDyn = false; isUnnumbered = false; isBridgePort = false; securityLevel = 0; snmp_type = 0; ostatus = 0; } InterfaceData::InterfaceData(const InterfaceData& other) : addr_mask() { id = other.id; name = other.name; label = other.label; if (other.addr_mask.size()) { for (list<InetAddrMask*>::const_iterator i=other.addr_mask.begin(); i!=other.addr_mask.end(); ++i) { InetAddrMask *am; const InetAddr *ad = (*i)->getAddressPtr(); const InetAddr *nm = (*i)->getNetmaskPtr(); if (ad==nullptr) continue; if (ad->isV6()) { am = new Inet6AddrMask(); am->setAddress(*(ad)); am->setNetmask(*(nm)); } else am = new InetAddrMask(*(*i)); addr_mask.push_back(am); } } ext = other.ext; isDyn = other.isDyn; isUnnumbered = other.isUnnumbered; isBridgePort = other.isBridgePort; securityLevel = other.securityLevel; mac_addr = other.mac_addr; snmp_type = other.snmp_type; ostatus = other.ostatus; } InterfaceData::InterfaceData(const Interface &iface) : addr_mask() { id = iface.getId(); name = iface.getName(); IPv4 *addr = IPv4::cast(iface.getFirstByType(IPv4::TYPENAME)); if (addr) { addr_mask.push_back(new InetAddrMask(*(addr->getAddressPtr()), *(addr->getNetmaskPtr()))); } IPv6 *addr6 = IPv6::cast(iface.getFirstByType(IPv6::TYPENAME)); if (addr6) { addr_mask.push_back(new Inet6AddrMask(*(addr6->getAddressPtr()), *(addr6->getNetmaskPtr()))); } securityLevel = iface.getSecurityLevel(); isDyn = iface.isDyn(); isUnnumbered = iface.isUnnumbered(); isBridgePort = iface.isBridgePort(); libfwbuilder::physAddress *pa = iface.getPhysicalAddress(); if (pa!=nullptr) mac_addr = pa->getPhysAddress(); label = iface.getLabel(); networkZone = iface.getStr("network_zone"); } InterfaceData::~InterfaceData() { // TODO: memory leak! need to delete items in the list addr_mask. addr_mask.clear(); }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8"> <title>TableFilter tfoot tests</title> <link rel="stylesheet" href="libs/qunit/qunit.css"> <script src="libs/qunit/qunit.js"></script> <script src="libs/polyfill.js"></script> </head> <body> <table id="demo"> <thead> <tr> <th>From</th> <th>Destination</th> <th>Road Distance (km)</th> <th>By Air (hrs)</th> <th>By Rail (hrs)</th> </tr> </thead> <tfoot> <tr> <td><strong>footer</strong></td> <td></td> <td></td> <td></td> <td></td> </tr> <tr> <td><strong>footer</strong></td> <td></td> <td></td> <td></td> <td></td> </tr> </tfoot> <tbody> <tr> <td><strong>Sydney</strong></td> <td>Adelaide</td> <td>1412</td> <td>1.4</td> <td>25.3</td> </tr> <tr> <td><strong>Sydney</strong></td> <td>Brisbane</td> <td>982</td> <td>1.5</td> <td>16</td> </tr> <tr> <td><strong>Sydney</strong></td> <td>Canberra</td> <td>286</td> <td>.6</td> <td>4.3</td> </tr> <tr> <td><strong>Sydney</strong></td> <td>Melbourne</td> <td>872</td> <td>1.1</td> <td>10.5</td> </tr> <tr> <td><strong>Adelaide</strong></td> <td>Perth</td> <td>2781</td> <td>3.1</td> <td>38</td> </tr> <tr> <td><strong>Adelaide</strong></td> <td>Alice Springs</td> <td>1533</td> <td>2</td> <td>20.25</td> </tr> <tr> <td><strong>Adelaide</strong></td> <td>Brisbane</td> <td>2045</td> <td>2.15</td> <td>40</td> </tr> </tbody> </table> <script src="../dist/tablefilter/tablefilter.js"></script> <script src="test-tfoot.js"></script> <div id="qunit"></div> <div id="qunit-fixture"></div> </body> </html>
{ "pile_set_name": "Github" }
.comment-content a { word-wrap: break-word; } .bypostauthor { display: block; }
{ "pile_set_name": "Github" }
# SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR THE PACKAGE'S COPYRIGHT HOLDER # This file is distributed under the same license as the openmediavault-forkeddaapd package. # # Translators: # Nelson Rosado <[email protected]>, 2014 # Nelson Rosado <[email protected]>, 2012 msgid "" msgstr "" "Project-Id-Version: openmediavault\n" "Report-Msgid-Bugs-To: \n" "POT-Creation-Date: 2020-01-07 17:31+0100\n" "PO-Revision-Date: 2017-09-19 13:03+0000\n" "Last-Translator: Nelson Rosado <[email protected]>\n" "Language-Team: Portuguese (Portugal) (http://www.transifex.com/votdev/openmediavault/language/pt_PT/)\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "Language: pt_PT\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" msgid "A password is required to access the library." msgstr "É necessária a senha de acesso para aceder à biblioteca." msgid "Authentication" msgstr "Autenticação" msgid "Enable" msgstr "Ativo" msgid "General settings" msgstr "Definições Gerais" msgid "Library name" msgstr "Nome da biblioteca" msgid "Password" msgstr "Senha de acesso" msgid "Port" msgstr "Porta" msgid "Port to listen on." msgstr "Porta para escutar." msgid "Settings" msgstr "Definições" msgid "Shared folder" msgstr "Pasta partilhada" msgid "The location of the media files to share." msgstr "A localização dos ficheiros multimédia para partilha." msgid "The name of the library as displayed by the clients." msgstr "O nome da biblioteca que aparece aos clientes." msgid "iTunes/DAAP" msgstr "iTunes/DAAP"
{ "pile_set_name": "Github" }
// MainDlg.cpp : implementation of the CMainDlg class // ///////////////////////////////////////////////////////////////////////////// #include "stdafx.h" #include "ChooseAvatar.h" #include "FileHelper.h" #include "SProfilePicture.h" #define SUBSCRIBE(x,y,z) (x)->GetEventSet()->subscribeEvent(y,Subscriber(&z,this)) ChooseAvatarDlg::ChooseAvatarDlg() : SHostDialog(UIRES.LAYOUT.dlg_choose_avatar) { m_bLayoutInited = FALSE; } ChooseAvatarDlg::~ChooseAvatarDlg() { } int ChooseAvatarDlg::OnCreate(LPCREATESTRUCT lpCreateStruct) { SetMsgHandled(FALSE); return 0; } BOOL ChooseAvatarDlg::OnInitDialog(HWND hWnd, LPARAM lParam) { m_bLayoutInited = TRUE; m_pHeadPic = FindChildByName2<SProfilePicture>(L"picchoose"); m_img_HeadPic_Large = FindChildByName2<SImageWnd>(L"HeadPic_Large"); m_img_HeadPic_Middle = FindChildByName2<SImageWnd>(L"HeadPic_Middle"); m_img_HeadPic_Small = FindChildByName2<SImageWnd>(L"HeadPic_Small"); SUBSCRIBE(m_pHeadPic, SProfilePicture::EventSelFrameChange::EventID, ChooseAvatarDlg::OnPicFrameChange); return 0; } bool ChooseAvatarDlg::OnPicFrameChange(SOUI::EventArgs *pEvt) { SProfilePicture::EventSelFrameChange* pFrameEvt = (SProfilePicture::EventSelFrameChange*)pEvt; if (pFrameEvt->iBmp) { m_img_HeadPic_Large->SetImage(pFrameEvt->iBmp); m_img_HeadPic_Middle->SetImage(pFrameEvt->iBmp); m_img_HeadPic_Small->SetImage(pFrameEvt->iBmp); } return true; } void ChooseAvatarDlg::OnChooseHeadPic() { CFileDialogEx openDlg(TRUE, _T("gif"), 0, OFN_HIDEREADONLY | OFN_OVERWRITEPROMPT, _T("图片(*.jpg;*.jpeg;*.png)\0*.jpg;*.jpeg;*.png\0\0")); openDlg.m_ofn.lpstrTitle = _T("选择头像"); if (openDlg.DoModal() == IDOK) { SStringT ttt(openDlg.m_szFileName); BOOL res = openDlg.m_bOpenFileDialog; SStringT ooo(openDlg.m_szFileTitle); if (!PathFileExists(ttt)) { SMessageBox(m_hWnd, _T("图片不存在"), _T("提示"), MB_ICONWARNING); return; } // struct _stat info; // _stat(S_CT2A(ttt), &info); // long filesize = info.st_size; // // if (filesize <= 0) // { // SMessageBox(m_hWnd, _T("图片大小不能为空"), _T("提示"), MB_ICONWARNING); // return; // } m_pHeadPic->SetHeadPic(ttt); } } void ChooseAvatarDlg::OnSaveHeadPic() { // 保存截取的头像 } //TODO:消息映射 void ChooseAvatarDlg::OnClose() { EndDialog(IDCANCEL); } void ChooseAvatarDlg::OnMaximize() { SendMessage(WM_SYSCOMMAND, SC_MAXIMIZE); } void ChooseAvatarDlg::OnRestore() { SendMessage(WM_SYSCOMMAND, SC_RESTORE); } void ChooseAvatarDlg::OnMinimize() { SendMessage(WM_SYSCOMMAND, SC_MINIMIZE); } void ChooseAvatarDlg::OnSize(UINT nType, CSize size) { SetMsgHandled(FALSE); if (!m_bLayoutInited) return; SWindow *pBtnMax = FindChildByName(L"btn_max"); SWindow *pBtnRestore = FindChildByName(L"btn_restore"); if (!pBtnMax || !pBtnRestore) return; if (nType == SIZE_MAXIMIZED) { pBtnRestore->SetVisible(TRUE); pBtnMax->SetVisible(FALSE); } else if (nType == SIZE_RESTORED) { pBtnRestore->SetVisible(FALSE); pBtnMax->SetVisible(TRUE); } }
{ "pile_set_name": "Github" }
auto: REMOVE_FILE,CREATE_FILE,MUNGE,COMPARE_FILE %TESTID%.stdout %TESTID%.stderr pre action: SET_ENV SWAKS_OPT_6 '' test action: CMD_CAPTURE %SWAKS% --dump TRANSPORT --to [email protected] --server ser.ver \ --no-6
{ "pile_set_name": "Github" }
--- Makefile.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) Index: libsndfile-1.0.18/Makefile.in =================================================================== --- libsndfile-1.0.18.orig/Makefile.in +++ libsndfile-1.0.18/Makefile.in @@ -260,7 +260,7 @@ top_srcdir = @top_srcdir@ DISTCHECK_CONFIGURE_FLAGS = --enable-gcc-werror @BUILD_OCTAVE_MOD_TRUE@octave_dir = Octave -SUBDIRS = M4 man doc Win32 src $(octave_dir) examples regtest tests programs +SUBDIRS = src DIST_SUBDIRS = M4 man doc Win32 src Octave examples regtest tests programs EXTRA_DIST = libsndfile.spec.in sndfile.pc.in Mingw-make-dist.sh pkgconfigdir = $(libdir)/pkgconfig
{ "pile_set_name": "Github" }
// Copyright 2014 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. #include "chromeos/login/auth/auth_attempt_state_resolver.h" namespace chromeos { AuthAttemptStateResolver::AuthAttemptStateResolver() = default; AuthAttemptStateResolver::~AuthAttemptStateResolver() = default; } // namespace chromeos
{ "pile_set_name": "Github" }
% This is almost same with cowboy_stream_h, but cowboy has two problems: % 1. cowboy_req:read_body's length can't be auto and period can't be infinity together % 2. cowboy_stream_h can't handle when body is empty and stream is fin -module(grpc_stream_h). -behavior(cowboy_stream). -ifdef(OTP_RELEASE). -compile({nowarn_deprecated_function, [{erlang, get_stacktrace, 0}]}). -endif. -export([init/3]). -export([data/4]). -export([info/3]). -export([terminate/3]). -export([early_error/5]). -export([request_process/3]). -export([resume/5]). -record(state, { next :: any(), ref = undefined :: ranch:ref(), pid = undefined :: pid(), expect = undefined :: undefined | continue, read_body_pid = undefined :: pid() | undefined, read_body_ref = undefined :: reference() | undefined, read_body_timer_ref = undefined :: reference() | undefined, read_body_length = 0 :: non_neg_integer() | infinity | auto, read_body_is_fin = nofin :: cowboy_stream:fin(), read_body_buffer = <<>> :: binary(), body_length = 0 :: non_neg_integer(), stream_body_pid = undefined :: pid() | undefined, stream_body_status = normal :: normal | blocking | blocked }). -spec init(cowboy_stream:streamid(), cowboy_req:req(), cowboy:opts()) -> {[{spawn, pid(), timeout()}], #state{}}. init(StreamID, Req=#{ref := Ref}, Opts) -> Env = maps:get(env, Opts, #{}), Middlewares = maps:get(middlewares, Opts, [cowboy_router, cowboy_handler]), Shutdown = maps:get(shutdown_timeout, Opts, 5000), Pid = proc_lib:spawn_link(?MODULE, request_process, [Req, Env, Middlewares]), Expect = expect(Req), {Commands, Next} = cowboy_stream:init(StreamID, Req, Opts), {[{spawn, Pid, Shutdown}|Commands], #state{next=Next, ref=Ref, pid=Pid, expect=Expect}}. %% Ignore the expect header in HTTP/1.0. expect(#{version := 'HTTP/1.0'}) -> undefined; expect(Req) -> try cowboy_req:parse_header(<<"expect">>, Req) of Expect -> Expect catch _:_ -> undefined end. %% If we receive data and stream is waiting for data: %% If we accumulated enough data or IsFin=fin, send it. %% If we are in auto mode, send it and update flow control. %% If not, buffer it. %% If not, buffer it. %% %% We always reset the expect field when we receive data, %% since the client started sending the request body before %% we could send a 100 continue response. -spec data(cowboy_stream:streamid(), cowboy_stream:fin(), cowboy_req:resp_body(), State) -> {cowboy_stream:commands(), State} when State::#state{}. %% Stream isn't waiting for data. data(StreamID, IsFin, Data, State=#state{ read_body_ref=undefined, read_body_buffer=Buffer, body_length=BodyLen}) -> Commands = case byte_size(Data) of 0 -> []; Size -> [{flow, Size}] end, do_data(StreamID, IsFin, Data, Commands, State#state{ expect=undefined, read_body_is_fin=IsFin, read_body_buffer= << Buffer/binary, Data/binary >>, body_length=BodyLen + byte_size(Data) }); %% Stream is waiting for data using auto mode. % GRPC: We don't pass auto, but treat it as auto data(StreamID, IsFin, Data, State=#state{read_body_pid=Pid, read_body_ref=Ref, body_length=BodyLen}) -> send_request_body(Pid, Ref, IsFin, BodyLen, Data), Commands = case byte_size(Data) of 0 -> []; Size -> [{flow, Size}] end, do_data(StreamID, IsFin, Data, Commands, State#state{ read_body_ref=undefined, body_length=BodyLen }). %% % %% There is no buffering done in auto mode. % data(StreamID, IsFin, Data, State=#state{read_body_pid=Pid, read_body_ref=Ref, % read_body_length=auto, body_length=BodyLen}) -> % send_request_body(Pid, Ref, IsFin, BodyLen, Data), % do_data(StreamID, IsFin, Data, [{flow, byte_size(Data)}], State#state{ % read_body_ref=undefined, % body_length=BodyLen % }); % %% Stream is waiting for data but we didn't receive enough to send yet. % data(StreamID, IsFin=nofin, Data, State=#state{ % read_body_length=ReadLen, read_body_buffer=Buffer, body_length=BodyLen}) % when byte_size(Data) + byte_size(Buffer) < ReadLen -> % do_data(StreamID, IsFin, Data, [], State#state{ % expect=undefined, % read_body_buffer= << Buffer/binary, Data/binary >>, % body_length=BodyLen + byte_size(Data) % }); % %% Stream is waiting for data and we received enough to send. % data(StreamID, IsFin, Data, State=#state{read_body_pid=Pid, read_body_ref=Ref, % read_body_timer_ref=TRef, read_body_buffer=Buffer, body_length=BodyLen0}) -> % BodyLen = BodyLen0 + byte_size(Data), % ok = erlang:cancel_timer(TRef, [{async, true}, {info, false}]), % send_request_body(Pid, Ref, IsFin, BodyLen, <<Buffer/binary, Data/binary>>), % do_data(StreamID, IsFin, Data, [], State#state{ % expect=undefined, % read_body_ref=undefined, % read_body_timer_ref=undefined, % read_body_buffer= <<>>, % body_length=BodyLen % }). do_data(StreamID, IsFin, Data, Commands1, State=#state{next=Next0}) -> {Commands2, Next} = cowboy_stream:data(StreamID, IsFin, Data, Next0), {Commands1 ++ Commands2, State#state{next=Next}}. -spec info(cowboy_stream:streamid(), any(), State) -> {cowboy_stream:commands(), State} when State::#state{}. info(StreamID, Info={'EXIT', Pid, normal}, State=#state{pid=Pid}) -> do_info(StreamID, Info, [stop], State); info(StreamID, Info={'EXIT', Pid, {{request_error, Reason, _HumanReadable}, _}}, State=#state{pid=Pid}) -> Status = case Reason of timeout -> 408; payload_too_large -> 413; _ -> 400 end, %% @todo Headers? Details in body? Log the crash? More stuff in debug only? do_info(StreamID, Info, [ {error_response, Status, #{<<"content-length">> => <<"0">>}, <<>>}, stop ], State); info(StreamID, Exit={'EXIT', Pid, {Reason, Stacktrace}}, State=#state{ref=Ref, pid=Pid}) -> Commands0 = [{internal_error, Exit, 'Stream process crashed.'}], Commands = case Reason of normal -> Commands0; shutdown -> Commands0; {shutdown, _} -> Commands0; _ -> [{log, error, "Ranch listener ~p, connection process ~p, stream ~p " "had its request process ~p exit with reason " "~999999p and stacktrace ~999999p~n", [Ref, self(), StreamID, Pid, Reason, Stacktrace]} |Commands0] end, do_info(StreamID, Exit, [ {error_response, 500, #{<<"content-length">> => <<"0">>}, <<>>} |Commands], State); % GRPC: We don't pass auto, but treat it as auto info(StreamID, Info={read_body, Pid, Ref, _, _}, State=#state{ read_body_is_fin=fin, read_body_buffer= <<>>, body_length=BodyLen}) -> send_request_body(Pid, Ref, fin, BodyLen, <<>>), do_info(StreamID, Info, [], State); info(StreamID, Info={read_body, Pid, Ref, _, _}, State=#state{read_body_buffer= <<>>}) -> do_info(StreamID, Info, [], State#state{ read_body_pid=Pid, read_body_ref=Ref }); %% Request body, auto mode, body buffered or complete. info(StreamID, Info={read_body, Pid, Ref, _, _}, State=#state{ read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) -> send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), do_info(StreamID, Info, [{flow, byte_size(Buffer)}], State#state{read_body_buffer= <<>>}); % GRPC end %% Request body, auto mode, no body buffered. info(StreamID, Info={read_body, Pid, Ref, auto, infinity}, State=#state{read_body_buffer= <<>>}) -> do_info(StreamID, Info, [], State#state{ read_body_pid=Pid, read_body_ref=Ref, read_body_length=auto }); %% Request body, auto mode, body buffered or complete. info(StreamID, Info={read_body, Pid, Ref, auto, infinity}, State=#state{ read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) -> send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), do_info(StreamID, Info, [{flow, byte_size(Buffer)}], State#state{read_body_buffer= <<>>}); %% Request body, body buffered large enough or complete. %% %% We do not send a 100 continue response if the client %% already started sending the body. info(StreamID, Info={read_body, Pid, Ref, Length, _}, State=#state{ read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) when IsFin =:= fin; byte_size(Buffer) >= Length -> send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), do_info(StreamID, Info, [], State#state{read_body_buffer= <<>>}); %% Request body, not enough to send yet. info(StreamID, Info={read_body, Pid, Ref, Length, Period}, State=#state{expect=Expect}) -> Commands = case Expect of continue -> [{inform, 100, #{}}, {flow, Length}]; undefined -> [{flow, Length}] end, TRef = erlang:send_after(Period, self(), {{self(), StreamID}, {read_body_timeout, Ref}}), do_info(StreamID, Info, Commands, State#state{ read_body_pid=Pid, read_body_ref=Ref, read_body_timer_ref=TRef, read_body_length=Length }); %% Request body reading timeout; send what we got. info(StreamID, Info={read_body_timeout, Ref}, State=#state{read_body_pid=Pid, read_body_ref=Ref, read_body_is_fin=IsFin, read_body_buffer=Buffer, body_length=BodyLen}) -> send_request_body(Pid, Ref, IsFin, BodyLen, Buffer), do_info(StreamID, Info, [], State#state{ read_body_ref=undefined, read_body_timer_ref=undefined, read_body_buffer= <<>> }); info(StreamID, Info={read_body_timeout, _}, State) -> do_info(StreamID, Info, [], State); %% Response. %% %% We reset the expect field when a 100 continue response %% is sent or when any final response is sent. info(StreamID, Inform={inform, Status, _}, State0) -> State = case cow_http:status_to_integer(Status) of 100 -> State0#state{expect=undefined}; _ -> State0 end, do_info(StreamID, Inform, [Inform], State); info(StreamID, Response={response, _, _, _}, State) -> do_info(StreamID, Response, [Response], State#state{expect=undefined}); info(StreamID, Headers={headers, _, _}, State) -> do_info(StreamID, Headers, [Headers], State#state{expect=undefined}); %% Sending data involves the data message, the stream_buffer_full alarm %% and the connection_buffer_full alarm. We stop sending acks when an alarm is on. %% %% We only apply backpressure when the message includes a pid. Otherwise %% it is a message from Cowboy, or the user circumventing the backpressure. %% %% We currently do not support sending data from multiple processes concurrently. info(StreamID, Data={data, _, _}, State) -> do_info(StreamID, Data, [Data], State); info(StreamID, Data0={data, Pid, _, _}, State0=#state{stream_body_status=Status}) -> State = case Status of normal -> Pid ! {data_ack, self()}, State0; blocking -> State0#state{stream_body_pid=Pid, stream_body_status=blocked}; blocked -> State0 end, Data = erlang:delete_element(2, Data0), do_info(StreamID, Data, [Data], State); info(StreamID, Alarm={alarm, Name, on}, State) when Name =:= connection_buffer_full; Name =:= stream_buffer_full -> do_info(StreamID, Alarm, [], State#state{stream_body_status=blocking}); info(StreamID, Alarm={alarm, Name, off}, State=#state{stream_body_pid=Pid, stream_body_status=Status}) when Name =:= connection_buffer_full; Name =:= stream_buffer_full -> _ = case Status of normal -> ok; blocking -> ok; blocked -> Pid ! {data_ack, self()} end, do_info(StreamID, Alarm, [], State#state{stream_body_pid=undefined, stream_body_status=normal}); info(StreamID, Trailers={trailers, _}, State) -> do_info(StreamID, Trailers, [Trailers], State); info(StreamID, Push={push, _, _, _, _, _, _, _}, State) -> do_info(StreamID, Push, [Push], State); info(StreamID, SwitchProtocol={switch_protocol, _, _, _}, State) -> do_info(StreamID, SwitchProtocol, [SwitchProtocol], State#state{expect=undefined}); %% Convert the set_options message to a command. info(StreamID, SetOptions={set_options, _}, State) -> do_info(StreamID, SetOptions, [SetOptions], State); %% Unknown message, either stray or meant for a handler down the line. info(StreamID, Info, State) -> do_info(StreamID, Info, [], State). do_info(StreamID, Info, Commands1, State0=#state{next=Next0}) -> {Commands2, Next} = cowboy_stream:info(StreamID, Info, Next0), {Commands1 ++ Commands2, State0#state{next=Next}}. -spec terminate(cowboy_stream:streamid(), cowboy_stream:reason(), #state{}) -> ok. terminate(StreamID, Reason, #state{next=Next}) -> cowboy_stream:terminate(StreamID, Reason, Next). -spec early_error(cowboy_stream:streamid(), cowboy_stream:reason(), cowboy_stream:partial_req(), Resp, cowboy:opts()) -> Resp when Resp::cowboy_stream:resp_command(). early_error(StreamID, Reason, PartialReq, Resp, Opts) -> cowboy_stream:early_error(StreamID, Reason, PartialReq, Resp, Opts). send_request_body(Pid, Ref, nofin, _, Data) -> Pid ! {request_body, Ref, nofin, Data}, ok; send_request_body(Pid, Ref, fin, BodyLen, Data) -> Pid ! {request_body, Ref, fin, BodyLen, Data}, ok. %% Request process. %% We catch all exceptions in order to add the stacktrace to %% the exit reason as it is not propagated by proc_lib otherwise %% and therefore not present in the 'EXIT' message. We want %% the stacktrace in order to simplify debugging of errors. %% %% This + the behavior in proc_lib means that we will get a %% {Reason, Stacktrace} tuple for every exceptions, instead of %% just for errors and throws. %% %% @todo Better spec. -spec request_process(cowboy_req:req(), cowboy_middleware:env(), [module()]) -> ok. request_process(Req, Env, Middlewares) -> OTP = erlang:system_info(otp_release), try execute(Req, Env, Middlewares) catch exit:Reason -> Stacktrace = erlang:get_stacktrace(), erlang:raise(exit, {Reason, Stacktrace}, Stacktrace); %% OTP 19 does not propagate any exception stacktraces, %% we therefore add it for every class of exception. _:Reason when OTP =:= "19" -> Stacktrace = erlang:get_stacktrace(), erlang:raise(exit, {Reason, Stacktrace}, Stacktrace); %% @todo I don't think this clause is necessary. Class:Reason -> erlang:raise(Class, Reason, erlang:get_stacktrace()) end. execute(_, _, []) -> ok; execute(Req, Env, [Middleware|Tail]) -> case Middleware:execute(Req, Env) of {ok, Req2, Env2} -> execute(Req2, Env2, Tail); {suspend, Module, Function, Args} -> proc_lib:hibernate(?MODULE, resume, [Env, Tail, Module, Function, Args]); {stop, _Req2} -> ok end. -spec resume(cowboy_middleware:env(), [module()], module(), atom(), [any()]) -> ok. resume(Env, Tail, Module, Function, Args) -> case apply(Module, Function, Args) of {ok, Req2, Env2} -> execute(Req2, Env2, Tail); {suspend, Module2, Function2, Args2} -> proc_lib:hibernate(?MODULE, resume, [Env, Tail, Module2, Function2, Args2]); {stop, _Req2} -> ok end.
{ "pile_set_name": "Github" }
package org.aspectj.aopalliance.tests; import junit.framework.Test; import junit.framework.TestSuite; public class AllTests { public static Test suite() { TestSuite suite = new TestSuite( "Test for org.aspectj.aopalliance.tests"); //$JUnit-BEGIN$ suite.addTestSuite(JoinPointClosureTest.class); suite.addTestSuite(InvocationJoinPointClosureTest.class); suite.addTestSuite(MethodInvocationClosureTest.class); suite.addTestSuite(ConstructorInvocationClosureTest.class); suite.addTestSuite(AOPAllianceAdapterTest.class); //$JUnit-END$ return suite; } }
{ "pile_set_name": "Github" }
/** * @license * Copyright 2013 Google Inc. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * @fileoverview DSA for OpenPGP as described in RFC 4880 and FIPS 186. * @author [email protected] (Thai Duong) */ goog.provide('e2e.signer.Dsa'); goog.require('e2e.AlgorithmImpl'); goog.require('e2e.BigNum'); goog.require('e2e.BigPrimeNum'); goog.require('e2e.async.Result'); goog.require('e2e.hash.Algorithm'); goog.require('e2e.hash.factory'); goog.require('e2e.openpgp.error.InvalidArgumentsError'); goog.require('e2e.openpgp.error.UnsupportedError'); goog.require('e2e.random'); goog.require('e2e.signer.Algorithm'); goog.require('e2e.signer.Signer'); goog.require('e2e.signer.factory'); goog.require('goog.array'); goog.require('goog.asserts'); /** * Representation of a DSA public or private key. * @param {e2e.signer.Algorithm} algorithm The algorithm to retrieve. * It must be e2e.signer.Algorithm.DSA. * @param {e2e.signer.key.Key=} opt_key The DSA key as specified in * RFC 4480. * @constructor * @implements {e2e.signer.Signer} * @extends {e2e.AlgorithmImpl} */ e2e.signer.Dsa = function(algorithm, opt_key) { goog.asserts.assert(algorithm == e2e.signer.Algorithm.DSA, 'Algorithm must be DSA.'); goog.base(this, e2e.signer.Algorithm.DSA, opt_key); }; goog.inherits(e2e.signer.Dsa, e2e.AlgorithmImpl); /** * List of Hash algorithms that are allowed to be used for DSA for a given q * bitlength. See {@link https://tools.ietf.org/html/rfc4880#section-13.6}. * @type {!Object<number,!Array<!e2e.hash.Algorithm>>} * @private */ e2e.signer.Dsa.ALLOWED_HASHES_ = { 160: [ e2e.hash.Algorithm.SHA1, e2e.hash.Algorithm.SHA224, e2e.hash.Algorithm.SHA256, e2e.hash.Algorithm.SHA384, e2e.hash.Algorithm.SHA512 ], 224: [ e2e.hash.Algorithm.SHA224, e2e.hash.Algorithm.SHA256, e2e.hash.Algorithm.SHA384, e2e.hash.Algorithm.SHA512 ], 256: [ e2e.hash.Algorithm.SHA256, e2e.hash.Algorithm.SHA384, e2e.hash.Algorithm.SHA512 ] }; /** * The prime modulus. This must be a prime number. * @private {e2e.BigPrimeNum} */ e2e.signer.Dsa.prototype.p_; /** * The prime order of the subgroup. This is a prime divisor of (p - 1). * @private {e2e.BigPrimeNum} */ e2e.signer.Dsa.prototype.q_; /** * The generator of the subgroup of order q. 1 < g < p. * @private {e2e.BigNum} */ e2e.signer.Dsa.prototype.g_; /** * The private key. x is a randomly or pseudorandomly generated integer, * such that x is in [1, q - 1]. * @private {e2e.BigNum} */ e2e.signer.Dsa.prototype.x_; /** * The public key, where y = g^x (mod p). * @private {e2e.BigNum} */ e2e.signer.Dsa.prototype.y_; /** * The hash function that should be used. This is selected based on the bit * lengths p and q. * @private {!e2e.hash.Hash} */ e2e.signer.Dsa.prototype.hash_; /** @override */ e2e.signer.Dsa.prototype.getHash = function() { return this.hash_; }; /** @override */ e2e.signer.Dsa.prototype.getHashAlgorithm = function() { return this.hash_.algorithm; }; /** @override */ e2e.signer.Dsa.prototype.setHash = function(hash) { var lenQ = this.q_.getBitLength(); var algorithm = hash.algorithm; if (!e2e.signer.Dsa.ALLOWED_HASHES_[lenQ] || !goog.array.contains(e2e.signer.Dsa.ALLOWED_HASHES_[lenQ], algorithm)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Given hash algorithm is disallowed for this DSA key: ' + algorithm); } this.hash_ = hash; }; /** * Sets the DSA public key and/or private key. * @override */ e2e.signer.Dsa.prototype.setKey = function(keyArg, opt_keySize) { var key = /** @type {!e2e.signer.key.Dsa} */ (keyArg); goog.asserts.assertArray(key['p'], 'The prime modulus should be defined.'); this.p_ = new e2e.BigPrimeNum(key['p']); var lenP = this.p_.getBitLength(); goog.asserts.assertArray(key['q'], 'The prime order should be defined.'); this.q_ = new e2e.BigPrimeNum(key['q']); var lenQ = this.q_.getBitLength(); switch (lenP) { case 1024: if (lenQ != 160) { throw new e2e.openpgp.error.InvalidArgumentsError( 'q must be 160-bit when p is 1024-bit.'); } this.hash_ = e2e.hash.factory.require( e2e.hash.Algorithm.SHA1); break; case 2048: if (lenQ == 224) { this.hash_ = e2e.hash.factory.require( e2e.hash.Algorithm.SHA224); } else if (lenQ == 256) { this.hash_ = e2e.hash.factory.require( e2e.hash.Algorithm.SHA256); } else { throw new e2e.openpgp.error.InvalidArgumentsError( 'q must be 224-bit or 256-bit when p is 2048-bit.'); } break; case 3072: if (lenQ != 256) { throw new e2e.openpgp.error.InvalidArgumentsError( 'q must be 256-bit when p is 3072-bit.'); } this.hash_ = e2e.hash.factory.require( e2e.hash.Algorithm.SHA256); break; default: throw new e2e.openpgp.error.UnsupportedError( 'The bit lengths of p and q are not supported.'); break; } var pminus1 = this.p_.subtract(e2e.BigNum.ONE); // q should be a divisor of p - 1. if (this.q_.mod(pminus1).isEqual( e2e.BigNum.ZERO)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'q must be a divisor of p - 1.'); } goog.asserts.assertArray(key['g'], 'The generator should be defined.'); this.g_ = new e2e.BigNum(key['g']); // 1 < g < p and g^q (mod p) == 1. if (!this.g_.isBetween(e2e.BigNum.ONE, this.p_) || !this.p_.modPower(this.g_, key['q']).isEqual(e2e.BigNum.ONE)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Invalid generator.'); } if (!goog.isDefAndNotNull(key['x']) && !goog.isDefAndNotNull(key['y'])) { goog.asserts.fail('Either public key or private key should be defined.'); } if (goog.isDefAndNotNull(key['x'])) { this.x_ = new e2e.BigNum(key['x']); if (!this.x_.isBetween(e2e.BigNum.ZERO, this.q_)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'x must be in range (0, q).'); } } if (goog.isDefAndNotNull(key['y'])) { this.y_ = new e2e.BigNum(key['y']); // NIST SP 800-89 checks for DSA. // 1 < y < p-1 if (!this.y_.isBetween(e2e.BigNum.ONE, pminus1)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'y must be in the range(1, p-1).'); } // y^q = 1 (mod p). if (!this.p_.modPower(this.y_, this.q_).isEqual(e2e.BigNum.ONE)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Invalid public key.'); } if (goog.isDefAndNotNull(key['x'])) { // y == g^x (mod p). if (!this.p_.modPower(this.g_, key['x']).isEqual(this.y_)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Invalid public key.'); } } } // Save key material to serialize later the key. goog.base(this, 'setKey', key); }; /** @inheritDoc */ e2e.signer.Dsa.prototype.sign = function(m) { /** @type {!e2e.signer.signature.Signature} */ var sig; do { var k = this.generatePerMessageSecret_(); sig = this.signWithNonce_(m, k); var r = new e2e.BigNum(sig['r']); var s = new e2e.BigNum(sig['s']); } while (r.isEqual(e2e.BigNum.ZERO) || s.isEqual(e2e.BigNum.ZERO)); return e2e.async.Result.toResult(sig); }; /** * Exports the sign function for testing. * @param {!e2e.ByteArray} m The message to be signed. * @param {!e2e.BigNum} k The per-message secret. * @return {!e2e.async.Result.<!e2e.signer.signature.Signature>} The result of * signing. */ e2e.signer.Dsa.prototype.signForTestingOnly = function(m, k) { return e2e.async.Result.toResult(this.signWithNonce_(m, k)); }; /** @inheritDoc */ e2e.signer.Dsa.prototype.verify = function(m, sig) { goog.asserts.assertObject(this.p_, 'The prime modulus should be defined.'); goog.asserts.assertObject(this.q_, 'The prime order should be defined.'); goog.asserts.assertObject(this.g_, 'The order should be defined.'); goog.asserts.assertObject(this.y_, 'The public key should be defined.'); var r = new e2e.BigNum(sig['r']); var s = new e2e.BigNum(sig['s']); if (!r.isBetween(e2e.BigNum.ZERO, this.q_) || !s.isBetween(e2e.BigNum.ZERO, this.q_)) { return e2e.async.Result.toResult(false); } var w = this.q_.modInverse(s); var z = new e2e.BigNum(this.hashWithTruncation_(m)); var u1 = this.q_.modMultiply(z.mod(this.q_), w); // z may be >= q_ var u2 = this.q_.modMultiply(r, w); var v = this.p_.modMultiply(this.p_.modPower(this.g_, u1), this.p_.modPower(this.y_, u2)).mod(this.q_); return e2e.async.Result.toResult(v.isEqual(r)); }; /** * Generates the DSA signature using the provided per-message secret. * @param {!e2e.ByteArray} m The message to be signed. * @param {!e2e.BigNum} k The per-message secret. * @return {!e2e.signer.signature.Signature} * @private */ e2e.signer.Dsa.prototype.signWithNonce_ = function(m, k) { goog.asserts.assertObject(this.p_, 'The prime modulus should be defined.'); goog.asserts.assertObject(this.q_, 'The prime order should be defined.'); goog.asserts.assertObject(this.g_, 'The order should be defined.'); goog.asserts.assertObject(this.x_, 'The private key should be defined.'); // Sanity check on the per-message nonce that it's in [1, q-1]. if (!k.isBetween(e2e.BigNum.ZERO, this.q_)) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Failed to sign message: invalid per-message nonce.'); } // r = (g^k mod p) mod q. var r = this.p_.modPower(this.g_, k).mod(this.q_); var hashValue = this.hashWithTruncation_(m); var z = new e2e.BigNum(hashValue); // s = (k^{-1} (z + xr)) mod q. var tmp = z.add(this.q_.modMultiply(this.x_, r)).mod(this.q_); var s = this.q_.modMultiply(this.q_.modInverse(k), tmp); return { 'r': r.toByteArray(), 's': s.toByteArray(), 'hashValue': hashValue }; }; /** * Generates a random number used as the per-message secret in DSA. * @return {!e2e.BigNum} * @private */ e2e.signer.Dsa.prototype.generatePerMessageSecret_ = function() { goog.asserts.assertObject(this.q_, 'Cannot generate random per-message secret: q should be defined.'); // 64 more bits are requested from the PRNG than are needed for this nonce // to avoid bias in the modular reduction in the last step of this function. // Otherwise this might leak a fraction of a bit of the nonce, and that's // enough for Bleichenbacher to steal the private key. var nonceLength = Math.ceil((this.q_.getBitLength() + 64) / 8); // OpenPGP supports only 3 key sizes in bits: 160, 224 and 256, so double // check nonce length to ensure enough entropy shall be requested from the // PRNG. if (nonceLength != 28 /* 160-bit q */ && nonceLength != 36 /* 224-bit q */ && nonceLength != 40 /* 256-bit q */) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Cannot generate random nonce: invalid nonce length.'); } var nonce = new e2e.BigNum( e2e.random.getRandomBytes(nonceLength)); // nonce is [1, q - 1]. return nonce.mod(this.q_.subtract(e2e.BigNum.ONE)).add( e2e.BigNum.ONE); }; /** * Creates a message digest, truncating it to the bit-length of the prime order. * @param {!e2e.ByteArray} message The message to hash. * @return {!Array.<number>} The digest. * @private */ e2e.signer.Dsa.prototype.hashWithTruncation_ = function(message) { var hash = this.hash_.hash(message); var requiredLength = Math.ceil(this.q_.getBitLength() / 8); if (requiredLength > hash.length) { throw new e2e.openpgp.error.InvalidArgumentsError( 'Digest algorithm is too short for given DSA parameters.'); } return goog.array.slice(hash, 0, requiredLength); }; e2e.signer.factory.add(e2e.signer.Dsa, e2e.signer.Algorithm.DSA);
{ "pile_set_name": "Github" }
// RUN: %clang_cc1 -fsyntax-only -verify %s // PR clang/3175 void bar(int*); class c { int var; static int svar; void foo() { bar(&var); bar(&svar); } static void wibble() { bar(&var); // expected-error{{invalid use of member 'var' in static member function}} bar(&svar); } }; enum E { Enumerator }; void test() { (void)&Enumerator; // expected-error{{address expression must be an lvalue or a function designator}} } template<int N> void test2() { (void)&N; // expected-error{{address expression must be an lvalue or a function designator}} } // PR clang/3222 void xpto(); void (*xyz)(void) = &xpto; struct PR11066 { static int foo(short); static int foo(float); void test(); }; void PR11066::test() { int (PR11066::*ptr)(int) = & &PR11066::foo; // expected-error{{address expression must be an lvalue or a function designator}} }
{ "pile_set_name": "Github" }
// // Copyright 2017 gRPC authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // #include "src/core/ext/filters/workarounds/workaround_utils.h" #include <grpc/support/alloc.h> #include <grpc/support/log.h> user_agent_parser ua_parser[GRPC_MAX_WORKAROUND_ID]; static void destroy_user_agent_md(void *user_agent_md) { gpr_free(user_agent_md); } grpc_workaround_user_agent_md *grpc_parse_user_agent(grpc_mdelem md) { grpc_workaround_user_agent_md *user_agent_md = (grpc_workaround_user_agent_md *)grpc_mdelem_get_user_data( md, destroy_user_agent_md); if (NULL != user_agent_md) { return user_agent_md; } user_agent_md = (grpc_workaround_user_agent_md *)gpr_malloc( sizeof(grpc_workaround_user_agent_md)); for (int i = 0; i < GRPC_MAX_WORKAROUND_ID; i++) { if (ua_parser[i]) { user_agent_md->workaround_active[i] = ua_parser[i](md); } } grpc_mdelem_set_user_data(md, destroy_user_agent_md, (void *)user_agent_md); return user_agent_md; } void grpc_register_workaround(uint32_t id, user_agent_parser parser) { GPR_ASSERT(id < GRPC_MAX_WORKAROUND_ID); ua_parser[id] = parser; }
{ "pile_set_name": "Github" }
/* * Copyright 2015 Stormpath, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.stormpath.sdk.impl.ds; import com.stormpath.sdk.impl.http.CanonicalUri; import com.stormpath.sdk.impl.http.HttpHeaders; import com.stormpath.sdk.resource.Resource; import java.util.Map; public interface ResourceMessage { ResourceAction getAction(); CanonicalUri getUri(); Map<String,Object> getData(); Class<? extends Resource> getResourceClass(); HttpHeaders getHttpHeaders(); }
{ "pile_set_name": "Github" }
(ns jepsen.faunadb.monotonic "Verifies that clients observe monotonic state and timestamps when performing current reads, and that reads of past timestamps observe monotonic state. For our monotonic state, we'll use a register, implemented as an instance with a single value. That register will be incremented by `inc` calls, starting at 0. {:type :invoke, :f :inc, :value nil} which returns {:type :invoke, :f inc, :value [ts, v]} Meaning that we set the value to v at time ts. Meanwhile, we'll execute reads like: {:type :invoke, :f :read, :value [ts, nil]} which means we should read the register at time `ts`, returning {:type :ok, :f :read, :value [ts, v]}. If the timestamp is nil, we read at the current time, and return the timestamp we executed at." (:refer-clojure :exclude [test]) (:require [clojure.tools.logging :refer :all] [clojure.set :as set] [clojure.core.reducers :as r] [dom-top.core :as dt] [knossos.op :as op] [jepsen [client :as client] [checker :as checker] [generator :as gen] [independent :as independent] [util :as util] [store :as store]] [jepsen.checker.perf :as perf] [gnuplot.core :as g] [jepsen.faunadb [query :as q] [client :as f]]) (:import (java.time Instant) (java.time.temporal ChronoUnit))) (def registers-name "registers") (def registers (q/class registers-name)) (def k 0) (defn strip-time "Timestamps like 2018-12-05T22:15:09Z and 2018-12-05T22:15:09.143Z won't compare properly as strings; we strip off the trailing Z so we can sort them. Also converts instants to strings." [ts] (let [s (str ts) i (dec (count s))] (assert (= \Z (.charAt s i))) (subs s 0 i))) (defn stripped-time->instant "Converts a stripped time string (without a Z) to an Instant." [s] (Instant/parse (str s "Z"))) (defn restrict-history "FaunaDB appears to return infinite sequences of events for the history of single instances. This function takes a sequence of those events and cuts it off when it starts to loop over itself." [original-events] (loop [events original-events i 0 seen (transient #{})] (if-not (seq events) ; Finite sequence; done original-events (let [event (first events)] (if (seen event) ; This is a dup (take i original-events) (recur (next events) (inc i) (conj! seen event))))))) (defrecord Client [conn] client/Client (open! [this test node] (assoc this :conn (f/client node))) (setup! [this test] (f/with-retry (f/upsert-class! conn {:name registers-name}))) (invoke! [this test op] (try (f/with-errors op #{:read-at :read} (let [v (:value op) r (q/ref registers k) res (case (:f op) :inc (f/query conn [(q/time "now") (q/if (q/exists? r) ; Record exists, increment (q/let [v (q/select ["data" "value"] (q/get r)) v' (q/+ v 1)] (q/update r {:data {:value v'}}) v) ; Record doesn't exist, init to 1 (q/do (q/create r {:data {:value 1}}) 0))]) :read (f/query conn [(q/time "now") (q/if (q/exists? r) (q/select ["data" "value"] (q/get r)) 0)]) :read-at (let [ts (or (first v) (f/jitter-time (f/now conn) (:at-query-jitter test)))] (f/query conn [ts (q/at ts (q/if (q/exists? r) (q/select ["data" "value"] (q/get r)) 0))])) ; This is broken--the next/prev pointers for history ; traversal result in infinite loops. :events (->> (f/query-all conn (q/events r)) (take 1000) (map (juxt :ts :action (comp :value :data))) )) ; For increments and reads, convert timestamp to string v (case (:f op) (:inc :read :read-at) (update (vec res) 0 strip-time) :events res)] (assoc op :type :ok, :value v))) (catch com.faunadb.client.errors.NotFoundException e (assoc op :type :fail, :error :not-found)))) (teardown! [this test]) (close! [this test] (.close conn))) (defn non-monotonic-pairs-by-process "Given a history, and a function of an operation that extracts a numeric value, finds pairs of ops on the same process where that value decreases." [extractor history] (->> history (r/filter op/ok?) (reduce (fn [[last errs] op] ; Last is a map of process ids to the last ; operation we saw for that process. Errs is a ; collection of error maps. (let [p (:process op) value (extractor op) last-value (some-> (last p) extractor)] (if (or (nil? last-value) (<= (compare last-value value) 0)) ; Monotonic [(assoc last p op) errs] ; Non-monotonic! [(assoc last p op) (conj errs [(last p) op])]))) [{} []]) second)) (defn checker "This checks a single register's read and inc queries to ensure that clients observe a locally monotonic order. We generalize it using independent/checker." [] (reify checker/Checker (check [_ test history opts] (let [history (r/filter (comp #{:read :inc} :f) history) ; Check that register values are monotonic value-errs (->> history (non-monotonic-pairs-by-process (comp second :value))) ; Check that timestamps are monotonic ts-errs (->> history (non-monotonic-pairs-by-process (comp first :value)))] {:valid? (and (empty? value-errs) (empty? ts-errs)) :value-errors value-errs :ts-errors ts-errs})))) (defn non-monotonic-pairs "Given a history, and a function of an operation that extracts a comparable value, finds pairs of ops where that value decreases." [extractor history] (->> history (partition 2 1) (keep (fn [[op1 op2 :as pair]] (let [v1 (extractor op1) v2 (extractor op2)] (when-not (<= (compare v1 v2) 0) pair)))))) (defn timestamp-value-checker "Checks a single register to ensure that the relationship between timestamps and register values is globally monotonic." [] (reify checker/Checker (check [_ test history opts] (let [errs (->> history (r/filter op/ok?) (r/filter (comp #{:read-at :inc} :f)) (into []) (sort-by (comp first :value)) (non-monotonic-pairs (comp second :value)))] {:valid? (empty? errs) :errors errs})))) (defn merged-windows "Takes a collection of points, and computes [lower, upper] windows of s elements before and after each point, then merges overlapping windows together. s determines the size of the window, in... points, I think?" [s points] (when (seq points) (let [points (sort points) ; Build up a vector of windows by keeping track of the current lower ; and upper bounds, expanding upper whenever necessary. [windows lower upper] (reduce (fn [[windows lower upper] p] (let [lower' (- p s) upper' (+ p s)] (if (<= upper lower') ; Start a new window [(conj windows [lower upper]) lower' upper'] ; Expand this window [windows lower upper']))) [[] (- (first points) s) (+ (first points) s)] points)] (conj windows [lower upper])))) (defn timestamp-value-plot! "Renders a plot of the value of a register over different timestamps. Takes a test and checker opts to determine the subdirectory to write to." [test opts filename history] (let [t0 (-> test :start-time .getMillis Instant/ofEpochMilli) series (->> history (r/filter (comp number? :process)) (r/filter #(= :ok (:type %))) (group-by :process) (util/map-vals (partial mapv (fn [op] [; Convert fauna timestamp to seconds ; since start of test (let [t (-> (:value op) first (stripped-time->instant)) dt (-> t0 (.until t ChronoUnit/NANOS) (util/nanos->secs))] dt) ; Observed value (second (:value op))])))) colors (perf/qs->colors (keys series)) path (.getCanonicalPath (store/path! test (:subdirectory opts) (str "sequential " filename ".png")))] (try (let [plot {:preamble (concat (perf/preamble path) [[:set :title (str (:name test) " sequential by process")] [:set :ylabel "register value"] [:set :xlabel "faunadb timestamp"]]) :series (map (fn [process points] {:title (str process) :with :linespoints :data points :pointtype 2 :linetype (colors process)}) series)}] (when (perf/has-data? plot) (-> plot (perf/without-empty-series) (perf/with-range) (perf/with-nemeses history (:nemeses (:plot test))) (perf/plot!)))) {:valid? true} (catch java.io.IOException _ (throw (IllegalStateException. "Error rendering plot; verify gnuplot is installed and reachable")))))) (defn timestamp-value-plotter "Plots interesting bits of the value as seen by each process history." [] (reify checker/Checker (check [this test history opts] ; Identify interesting regions (let [; Set aside nemesis operations so we can plot them later nemesis-history (r/filter (comp #{:nemesis} :process) history) ; Extract temporal reads and sort by timestamp history (->> history (r/filter op/ok?) (r/filter (comp #{:read-at} :f)) (into []) (sort-by (comp first :value)) vec) extractor (comp second :value) spots (nth (reduce (fn [[i last spots] op] ; Figure out if this is a spot (let [p (:process op) v (some-> (last p) extractor) v' (extractor op)] [(inc i) (assoc last p op) (if (or (nil? v) (<= 0 (compare v v'))) ; Monotonic spots ; Non-monotonic (conj spots i))])) [0 {} []] history) 2)] (->> spots (merged-windows 32) (map-indexed (fn [i [lower upper]] (->> (subvec history (max lower 0) (min upper (dec (count history)))) (timestamp-value-plot! test opts i)))) dorun)) {:valid? true}))) (defn not-found-checker "We do explicit existence checks before all reads, and should never observe a not-found result. Let's make sure of that." [] (reify checker/Checker (check [_ test history opts] (let [errs (->> history (r/filter op/fail?) (r/filter (comp #{:not-found} :error)) (into []))] {:valid? (empty? errs) :invoke-count (->> history (r/filter op/invoke?) (into []) count) :error-count (count errs) :first (first errs) :last (peek errs)})))) (defn inc-gen [_ _] {:type :invoke, :f :inc, :value nil}) (defn read-gen [_ _] {:type :invoke, :f :read, :value nil}) (defn read-at-gen [_ _] {:type :invoke, :f :read-at :value [nil nil]}) (defn workload [opts] (let [n (count (:nodes opts))] {:client (Client. nil) :generator (gen/mix [inc-gen read-gen read-at-gen]) :final-generator (gen/once {:type :invoke, :f :events}) :checker (checker/compose {:monotonic (checker) :not-found (not-found-checker) :timestamp-value-plot (timestamp-value-plotter) :timestamp-value (timestamp-value-checker)})}))
{ "pile_set_name": "Github" }
/* ==================================================================== Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ==================================================================== */ package org.apache.poi.hslf.record; import java.io.IOException; import java.io.OutputStream; import java.util.Arrays; import java.util.Map; import java.util.function.Supplier; import org.apache.poi.util.GenericRecordUtil; import org.apache.poi.util.IOUtils; import org.apache.poi.util.LittleEndian; import org.apache.poi.util.StringUtil; /** * A CString (type 4026). Holds a unicode string, and the first two bytes * of the record header normally encode the count. Typically attached to * some complex sequence of records, eg Commetns. * * @author Nick Burch */ public final class CString extends RecordAtom { //arbitrarily selected; may need to increase private static final int MAX_RECORD_LENGTH = 1_000_000; private byte[] _header; /** The bytes that make up the text */ private byte[] _text; /** Grabs the text. Never <code>null</code> */ public String getText() { return StringUtil.getFromUnicodeLE(_text); } /** Updates the text in the Atom. */ public void setText(String text) { // Convert to little endian unicode _text = new byte[text.length()*2]; StringUtil.putUnicodeLE(text,_text,0); // Update the size (header bytes 5-8) LittleEndian.putInt(_header,4,_text.length); } /** * Grabs the count, from the first two bytes of the header. * The meaning of the count is specific to the type of the parent record */ public int getOptions() { return LittleEndian.getShort(_header); } /** * Sets the count * The meaning of the count is specific to the type of the parent record */ public void setOptions(int count) { LittleEndian.putShort(_header, 0, (short)count); } /* *************** record code follows ********************** */ /** * For the CStrubg Atom */ protected CString(byte[] source, int start, int len) { // Sanity Checking if(len < 8) { len = 8; } // Get the header _header = Arrays.copyOfRange(source, start, start+8); // Grab the text _text = IOUtils.safelyClone(source,start+8, len-8, MAX_RECORD_LENGTH); } /** * Create an empty CString */ public CString() { // 0 length header _header = new byte[] { 0, 0, 0xBA-256, 0x0f, 0, 0, 0, 0 }; // Empty text _text = new byte[0]; } /** * We are of type 4026 */ public long getRecordType() { return RecordTypes.CString.typeID; } /** * Write the contents of the record back, so it can be written * to disk */ public void writeOut(OutputStream out) throws IOException { // Header - size or type unchanged out.write(_header); // Write out our text out.write(_text); } /** * Gets a string representation of this object, primarily for debugging. * @return a string representation of this object. */ public String toString() { return getText(); } @Override public Map<String, Supplier<?>> getGenericProperties() { return GenericRecordUtil.getGenericProperties("text", this::getText); } }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # Copyright (c) 2015-2016 Ericsson AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import wrapt import functools import time import copy from calvin.utilities import calvinuuid from calvin.actor import actorport from calvin.utilities.calvinlogger import get_logger from calvin.utilities.utils import enum from calvin.runtime.north.calvin_token import Token, ExceptionToken # from calvin.runtime.north import calvincontrol from calvin.runtime.north.replicationmanager import ReplicationId import calvin.requests.calvinresponse as response from calvin.runtime.south.async import async from calvin.runtime.north.plugins.authorization_checks import check_authorization_plugin_list from calvin.utilities.calvin_callback import CalvinCB from calvin.csparser.port_property_syntax import get_port_property_capabilities, get_port_property_runtime from calvin.runtime.north.calvinsys import get_calvinsys from calvin.runtime.north.calvinlib import get_calvinlib _log = get_logger(__name__) # Tests in test_manage_decorator.py def manage(include=None, exclude=None): """ Decorator for Actor::init() providing automatic management of state variables. Usage: @manage() # Manage every instance variable known upon completion of __init__ @manage(include = []) # Manage nothing @manage(include = [foo, bar]) # Manage self.foo and self.bar only. Equivalent to @manage([foo, bar]) @manage(exclude = [foo, bar]) # Manage everything except self.foo and self.bar @manage(exclude = []) # Same as @manage() @manage(<list>) # Same as @manage(include = <list>) N.B. If include and exclude are both present, exclude will be disregarded. """ if include and type(include) is not list or exclude and type(exclude) is not list: raise Exception("@manage decorator: Must use list as argument") include_set = set(include) if include else set() exclude_set = set(exclude) if exclude else set() # Using wrapt since we need to preserve the signature of the wrapped signature. # See http://wrapt.readthedocs.org/en/latest/index.html # FIXME: Since we use wrapt here, we might as well use it in guard and condition too. @wrapt.decorator def wrapper(wrapped, instance, args, kwargs): # Exclude the instance variables added by superclasses exclude_set.update(instance.__dict__) x = wrapped(*args, **kwargs) if include is None: # include set not given, so construct the implicit include set include_set.update(instance.__dict__) include_set.remove('_managed') include_set.difference_update(exclude_set) instance._managed.update(include_set) return x return wrapper def condition(action_input=[], action_output=[]): """ Decorator condition specifies the required input data and output space. Both parameters are lists of port names Return value is a tuple (did_fire, output_available, exhaust_list) """ tokens_produced = len(action_output) tokens_consumed = len(action_input) def wrap(action_method): @functools.wraps(action_method) def condition_wrapper(self): # # Check if input ports have enough tokens. Note that all([]) evaluates to True # input_ok = all(self.inports[portname].tokens_available(1) for portname in action_input) # # Check if output port have enough free token slots # output_ok = all(self.outports[portname].tokens_available(1) for portname in action_output) if not input_ok or not output_ok: return (False, output_ok, ()) # # Build the arguments for the action from the input port(s) # exhausted_ports = set() exception = False args = [] for portname in action_input: port = self.inports[portname] token, exhaust = port.read() is_exception_token = isinstance(token, ExceptionToken) exception = exception or is_exception_token args.append(token if is_exception_token else token.value ) if exhaust: exhausted_ports.add(port) # # Check for exceptional conditions # if exception: # FIXME: Simplify exception handling production = self.exception_handler(action_method, args) or () else: # # Perform the action (N.B. the method may be wrapped in a decorator) # Action methods not returning a production (i.e. no output ports) returns None # => replace with empty_production constant # production = action_method(self, *args) or () valid_production = (tokens_produced == len(production)) if not valid_production: # # Error condition # action = "%s.%s" % (self._type, action_method.__name__) raise Exception("%s invalid production %s, expected %s" % (action, str(production), str(tuple(action_output)))) # # Write the results from the action to the output port(s) # for portname, retval in zip(action_output, production): port = self.outports[portname] port.write_token(retval if isinstance(retval, Token) else Token(retval)) return (True, True, exhausted_ports) return condition_wrapper return wrap def stateguard(action_guard): """ Decorator guard refines the criteria for picking an action to run by stating a function with THE SAME signature as the guarded action returning a boolean (True if action allowed). If the speciified function is unbound or a lambda expression, you must account for 'self', e.g. 'lambda self, a, b: a>0' """ def wrap(action_method): @functools.wraps(action_method) def guard_wrapper(self, *args): if not action_guard(self): return (False, True, ()) return action_method(self, *args) return guard_wrapper return wrap def verify_status(valid_status_list, raise_=False): """ Decorator to help with debugging of state transitions If a decorated is called when the actors status is not in valid_status_list it will log (or raise exception if raise_ is True) the attempt. """ @wrapt.decorator def wrapper(wrapped, instance, args, kwargs): # Exclude the instance variables added by superclasses if not instance.fsm.disable_state_checks and instance.fsm.state() not in valid_status_list: msg = "Invalid status %s for operation %s" % (instance.fsm, wrapped.__name__) if raise_: raise Exception(msg) else: _log.info(msg) x = wrapped(*args, **kwargs) return x return wrapper def _implements_state(obj): """Helper method to check if foreign object supports setting/getting state.""" return hasattr(obj, 'state') and callable(getattr(obj, 'state')) and \ hasattr(obj, 'set_state') and callable(getattr(obj, 'set_state')) class calvinsys(object): """ Calvinsys interface exposed to actors """ @staticmethod def open(actor, name, **kwargs): return get_calvinsys().open(name, actor, **kwargs) @staticmethod def can_write(ref): return get_calvinsys().can_write(ref) @staticmethod def write(ref, data): return get_calvinsys().write(ref, data) @staticmethod def can_read(ref): return get_calvinsys().can_read(ref) @staticmethod def read(ref): return get_calvinsys().read(ref) @staticmethod def close(ref): return get_calvinsys().close(ref) class calvinlib(object): """ CalvinLib interface exposed to actors """ @staticmethod def use(name, **kwargs): return get_calvinlib().use(name, **kwargs) class Actor(object): """ Base class for all actors Need a name supplied. Subclasses need to declare the parameter calvinsys if they want access to system interface on the node, this parameter will be supplied by the node and not by user """ # Class variable controls action priority order action_priority = tuple() # These are the security variables that will always be serialized, see serialize()/deserialize() below _security_state_keys = ('_subject_attributes') # These are the instance variables that will always be serialized, see serialize()/deserialize() below _private_state_keys = ('_id', '_name', '_has_started', '_deployment_requirements', '_signature', '_migration_info', "_port_property_capabilities", "_replication_id") # Internal state (status) class FSM(object): def __init__(self, states, initial, transitions, hooks=None, allow_invalid_transitions=True, disable_transition_checks=False, disable_state_checks=False): self.states = states self._state = initial self.transitions = transitions self.hooks = hooks or {} self.allow_invalid_transitions = allow_invalid_transitions self.disable_transition_checks = disable_transition_checks # disable_state_checks is used in the verify_status decorator self.disable_state_checks = disable_state_checks def state(self): return self._state def transition_to(self, new_state): if new_state in self.transitions[self._state] or self.disable_transition_checks: hook = self.hooks.get((self._state, new_state), None) if hook: hook() self._state = new_state else: msg = "Invalid transition %s -> %s" % (self, self.printable(new_state)) if self.allow_invalid_transitions: _log.warning("ALLOWING " + msg) self._state = new_state else: raise Exception(msg) def printable(self, state): return self.states.reverse_mapping[state] def __str__(self): return self.printable(self._state) STATUS = enum('LOADED', 'READY', 'PENDING', 'ENABLED', 'DENIED', 'MIGRATABLE') VALID_TRANSITIONS = { STATUS.LOADED : [STATUS.READY], STATUS.READY : [STATUS.PENDING, STATUS.ENABLED, STATUS.DENIED], STATUS.PENDING : [STATUS.READY, STATUS.PENDING, STATUS.ENABLED], STATUS.ENABLED : [STATUS.READY, STATUS.PENDING, STATUS.DENIED], STATUS.DENIED : [STATUS.ENABLED, STATUS.MIGRATABLE, STATUS.PENDING], STATUS.MIGRATABLE: [STATUS.READY, STATUS.DENIED] } test_args = () test_kwargs = {} @property def id(self): return self._id @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def migration_info(self): return self._migration_info # What are the arguments, really? def __init__(self, actor_type, name='', allow_invalid_transitions=True, disable_transition_checks=False, disable_state_checks=False, actor_id=None, security=None): """Should _not_ be overridden in subclasses.""" super(Actor, self).__init__() self._type = actor_type self._name = name # optional: human_readable_name self._id = actor_id or calvinuuid.uuid("ACTOR") _log.debug("New actor id: %s, supplied actor id %s" % (self._id, actor_id)) self._deployment_requirements = [] self._port_property_capabilities = None self._signature = None self._component_members = set([self._id]) # We are only part of component if this is extended self._managed = set() self._has_started = False # self.control = calvincontrol.get_calvincontrol() self._migration_info = None self._migrating_to = None # During migration while on the previous node set to the next node id self._migration_connected = True # False while setup the migrated actor, to prevent further migrations self._last_time_warning = 0.0 self.sec = security self._subject_attributes = self.sec.get_subject_attributes() if self.sec is not None else None self.authorization_checks = None self._replication_id = ReplicationId() self._exhaust_cb = None self._pressure_event = 0 # Time of last pressure event time (not in state only local) self.inports = {p: actorport.InPort(p, self, pp) for p, pp in self.inport_properties.items()} self.outports = {p: actorport.OutPort(p, self, pp) for p, pp in self.outport_properties.items()} hooks = { (Actor.STATUS.PENDING, Actor.STATUS.ENABLED): self._will_start, (Actor.STATUS.ENABLED, Actor.STATUS.PENDING): self.will_stop, } self.fsm = Actor.FSM(Actor.STATUS, Actor.STATUS.LOADED, Actor.VALID_TRANSITIONS, hooks, allow_invalid_transitions=allow_invalid_transitions, disable_transition_checks=disable_transition_checks, disable_state_checks=disable_state_checks) def set_authorization_checks(self, authorization_checks): self.authorization_checks = authorization_checks @verify_status([STATUS.LOADED]) def setup_complete(self): self.fsm.transition_to(Actor.STATUS.READY) def init(self): raise Exception("Implementing 'init()' is mandatory.") def _will_start(self): """Ensure will_start() is only called once""" if not self._has_started: self.will_start() self._has_started = True def will_start(self): """Override in actor subclass if actions need to be taken before starting.""" pass def will_stop(self): """Override in actor subclass if actions need to be taken before stopping.""" pass def will_migrate(self): """Override in actor subclass if actions need to be taken before migrating.""" pass def did_migrate(self): """Override in actor subclass if actions need to be taken after migrating.""" pass def _will_end(self): if hasattr(self, "will_end") and callable(self.will_end): self.will_end() get_calvinsys().close_all(self) def did_replicate(self, index): """Override in actor subclass if actions need to be taken after replication.""" pass def __str__(self): ip = "" for p in self.inports.values(): ip = ip + str(p) op = "" for p in self.outports.values(): op = op + str(p) s = "Actor: '%s' class '%s'\nstatus: %s\ninports: %s\noutports:%s" % ( self._name, self._type, self.fsm, ip, op) return s @verify_status([STATUS.READY, STATUS.PENDING, STATUS.ENABLED]) def did_connect(self, port): """Called when a port is connected, checks actor is fully connected.""" if self.fsm.state() == Actor.STATUS.ENABLED: # We already was enabled thats fine now with dynamic port connections return _log.debug("actor.did_connect BEGIN %s %s " % (self._name, self._id)) # If we happen to be in READY, go to PENDING if self.fsm.state() == Actor.STATUS.READY: self.fsm.transition_to(Actor.STATUS.PENDING) # Three non-patological options: # have inports, have outports, or have in- and outports if self.inports: for p in self.inports.values(): if not p.is_connected(): return if self.outports: for p in self.outports.values(): if not p.is_connected(): return # If we made it here, all ports are connected self.fsm.transition_to(Actor.STATUS.ENABLED) _log.debug("actor.did_connect ENABLED %s %s " % (self._name, self._id)) @verify_status([STATUS.ENABLED, STATUS.PENDING, STATUS.DENIED, STATUS.MIGRATABLE]) def did_disconnect(self, port): """Called when a port is disconnected, checks actor is fully disconnected.""" # If the actor is MIGRATABLE, return since it will be migrated soon. _log.debug("Actor %s did_disconnect %s" % (self._id, Actor.STATUS.reverse_mapping[self.fsm.state()])) if self.fsm.state() == Actor.STATUS.MIGRATABLE: return # If we happen to be in ENABLED/DENIED, go to PENDING if self.fsm.state() != Actor.STATUS.PENDING: self.fsm.transition_to(Actor.STATUS.PENDING) # Three non-patological options: # have inports, have outports, or have in- and outports if self.inports: for p in self.inports.values(): if p.is_connected(): return if self.outports: for p in self.outports.values(): if p.is_connected(): return # If we made it here, all ports are disconnected self.fsm.transition_to(Actor.STATUS.READY) def exhaust(self, callback): self._exhaust_cb = callback def get_pressure(self): _log.debug("get_pressure %s" % self._replication_id.measure_pressure()) if not self._replication_id.measure_pressure(): return None t = time.time() pressure = {} for port in self.inports.values(): for e in port.endpoints: PRESSURE_LENGTH = len(e.pressure) pressure[port.id + "," + e.peer_id] = {'last': e.pressure_last, 'count': e.pressure_count, 'pressure': [e.pressure[i % PRESSURE_LENGTH] for i in range( max(0, e.pressure_count - PRESSURE_LENGTH), e.pressure_count)]} pressure_event = False for p in pressure.values(): if len(p['pressure']) < 2: continue if ((p['pressure'][-1][1] - p['pressure'][-2][1]) < 10 and p['pressure'][-1][1] > self._pressure_event): # Less than 10 sec between queue full and not reported, maybe scale out self._pressure_event = max(p['pressure'][-1][1], self._pressure_event) pressure_event = True break if (p['pressure'][-1][1] < (t - 30) and p['last'] > p['pressure'][-1][0] + 3 and p['pressure'][-1][1] > self._pressure_event): # More than 30 sec since queue full, received at least 3 tokens and not reported, maybe scale in self._pressure_event = max(p['pressure'][-1][1], self._pressure_event) pressure_event = True break pressure['time'] = t _log.debug("get_pressure pressure_event:%s, pressure: %s" % (pressure_event, pressure)) return pressure if pressure_event else None # # FIXME: The following methods (_authorized, _warn_slow_actor, _handle_exhaustion) were # extracted from fire() to make the logic easier to follow # FIXME: Responsibility of scheduler, not actor class # def _authorized(self): authorized = self.check_authorization_decision() if not authorized: _log.info("Access denied for actor %s(%s)" % ( self._type, self._id)) # The authorization decision is not valid anymore. # Change actor status to DENIED. self.fsm.transition_to(Actor.STATUS.DENIED) # Try to migrate actor. self.sec.authorization_runtime_search(self._id, self._signature, callback=CalvinCB(self.set_migration_info)) return authorized def _warn_slow_actor(self, time_spent, start_time): time_since_warning = start_time - self._last_time_warning if time_since_warning < 120.0: return self._last_time_warning = start_time _log.warning("%s (%s) actor blocked for %f sec" % (self._name, self._type, time_spent)) def _handle_exhaustion(self, exhausted_ports, output_ok): _log.debug("actor_fire %s test exhaust %s, %s, %s" % (self._id, self._exhaust_cb is not None, exhausted_ports, output_ok)) for port in exhausted_ports: # Might result in actor changing to PENDING try: port.finished_exhaustion() except: _log.exception("FINSIHED EXHAUSTION FAILED") if (output_ok and self._exhaust_cb is not None and not any([p.any_outstanding_exhaustion_tokens() for p in self.inports.values()])): _log.debug("actor %s exhausted" % self._id) # We are in exhaustion, got all exhaustion tokens from peer ports # but stopped firing while outport token slots available, i.e. exhausted inports or deadlock # FIXME handle exhaustion deadlock # Initiate disconnect of outports and destroy the actor async.DelayedCall(0, self._exhaust_cb, status=response.CalvinResponse(True)) self._exhaust_cb = None @verify_status([STATUS.ENABLED]) def fire(self): """ Fire an actor. Returns tuple (did_fire, output_ok, exhausted) """ # # Go over the action priority list once # for action_method in self.__class__.action_priority: did_fire, output_ok, exhausted = action_method(self) # Action firing should fire the first action that can fire if did_fire: break return did_fire, output_ok, exhausted def enabled(self): # We want to run even if not fully connected during exhaustion r = self.fsm.state() == Actor.STATUS.ENABLED or self._exhaust_cb is not None if not r: _log.debug("Actor %s %s not enabled" % (self._name, self._id)) return r def denied(self): return self.fsm.state() == Actor.STATUS.DENIED def migratable(self): return self.fsm.state() == Actor.STATUS.MIGRATABLE @verify_status([STATUS.DENIED]) def enable_or_migrate(self): """Enable actor if access is permitted. Try to migrate if access still denied.""" if self.check_authorization_decision(): self.fsm.transition_to(Actor.STATUS.ENABLED) else: # Try to migrate actor. self.sec.authorization_runtime_search(self._id, self._signature, callback=CalvinCB(self.set_migration_info)) # DEPRECATED: Only here for backwards compatibility @verify_status([STATUS.ENABLED]) def enable(self): self.fsm.transition_to(Actor.STATUS.ENABLED) @verify_status([STATUS.READY, STATUS.PENDING, STATUS.LOADED]) # DEPRECATED: Only here for backwards compatibility def disable(self): self.fsm.transition_to(Actor.STATUS.PENDING) # TODO verify status should only allow reading connections when and after being fully connected (enabled) @verify_status([STATUS.ENABLED, STATUS.READY, STATUS.PENDING, STATUS.MIGRATABLE]) def connections(self, node_id): c = {'actor_id': self._id, 'actor_name': self._name} inports = {} for port in self.inports.values(): peers = [ (node_id, p[1]) if p[0] == 'local' else p for p in port.get_peers()] inports[port.id] = peers c['inports'] = inports outports = {} for port in self.outports.values(): peers = [ (node_id, p[1]) if p[0] == 'local' else p for p in port.get_peers()] outports[port.id] = peers c['outports'] = outports return c def state(self): """Serialize custom state, implement in subclass if necessary""" return {} def set_state(self, state): """Deserialize and set custom state, implement in subclass if necessary""" pass def _private_state(self): """Serialize state common to all actors""" state = {} state['inports'] = { port: self.inports[port]._state() for port in self.inports} state['outports'] = { port: self.outports[port]._state() for port in self.outports} state['_component_members'] = list(self._component_members) # Place requires in state, in the event we become a ShadowActor state['_requires'] = self.requires if hasattr(self, 'requires') else [] # FIXME: The objects in _private_state_keys are well known, they are private after all, # and we shouldn't need this generic handler. for key in self._private_state_keys: obj = self.__dict__[key] if _implements_state(obj): state[key] = obj.state() else: state[key] = obj state["_calvinsys"] = get_calvinsys().serialize(actor=self) return state def _set_private_state(self, state): """Deserialize and apply state common to all actors""" if "_calvinsys" in state: get_calvinsys().deserialize(actor=self, csobjects=state["_calvinsys"]) for port in state['inports']: # Uses setdefault to support shadow actor self.inports.setdefault(port, actorport.InPort(port, self))._set_state(state['inports'][port]) for port in state['outports']: # Uses setdefault to support shadow actor self.outports.setdefault(port, actorport.OutPort(port, self))._set_state(state['outports'][port]) self._component_members= set(state['_component_members']) # FIXME: The objects in _private_state_keys are well known, they are private after all, # and we shouldn't need this generic handler. for key in self._private_state_keys: if key not in self.__dict__: self.__dict__[key] = state.get(key, None) else: obj = self.__dict__[key] if _implements_state(obj): obj.set_state(state.get(key)) else: self.__dict__[key] = state.get(key, None) def _replication_state(self): return None def _set_replication_state(self, state): """Deserialize and apply state related to a replicating actor """ pass def _security_state(self): """ Serialize security state. Security state can only contain objects that can be JSON-serialized. """ return {'_subject_attributes':self._subject_attributes} def _set_security_state(self, state): """ Deserialize and apply security state. Security state can only contain objects that can be JSON-serialized. """ pass def _managed_state(self): """ Serialize managed state. Managed state can only contain objects that can be JSON-serialized. """ state = {key: self.__dict__[key] for key in self._managed} return state def _set_managed_state(self, state): """ Deserialize and apply managed state. Managed state can only contain objects that can be JSON-serialized. """ self._managed.update(set(state.keys())) for key, val in state.iteritems(): self.__dict__[key] = val def serialize(self): """Returns the serialized state of an actor.""" state = {} state['private'] = self._private_state() rstate = self._replication_state() if rstate is not None: state['replication'] = rstate state['managed'] = self._managed_state() state['security']= self._security_state() state['custom'] = self.state() return state def deserialize(self, state): """Restore an actor's state from the serialized state.""" self._set_private_state(state['private']) self._set_replication_state(state.get('replication', None)) self._set_security_state(state['security']) self._set_managed_state(state['managed']) self.set_state(state['custom']) def exception_handler(self, action, args): """Defult handler when encountering ExceptionTokens""" _log.error("ExceptionToken encountered\n name: %s\n type: %s\n action: %s\n args: %s\n" % (self._name, self._type, action.__name__, args)) raise Exception("ExceptionToken NOT HANDLED") def events(self): return [] def component_add(self, actor_ids): if not isinstance(actor_ids, (set, list, tuple)): actor_ids = [actor_ids] self._component_members.update(actor_ids) def component_remove(self, actor_ids): if not isinstance(actor_ids, (set, list, tuple)): actor_ids = [actor_ids] self._component_members -= set(actor_ids) def part_of_component(self): return len(self._component_members - set([self._id]))>0 def component_members(self): return self._component_members def requirements_add(self, deploy_reqs, extend=False): if extend: self._deployment_requirements.extend(deploy_reqs) else: self._deployment_requirements = deploy_reqs def requirements_get(self): if self._port_property_capabilities is None: self._port_property_capabilities = self._derive_port_property_capabilities() capability_port = [{ 'op': 'port_property_match', 'kwargs': {'port_property': self._port_property_capabilities}, 'type': '+' }] if hasattr(self, 'requires') and self.requires: capability_require = [{ 'op': 'actor_reqs_match', 'kwargs': {'requires': self.requires}, 'type': '+' }] else: capability_require = [] return (self._deployment_requirements + capability_require + capability_port + self._replication_id._placement_req) def _derive_port_property_capabilities(self): port_property_capabilities = set([]) for port in self.inports.values(): port_property_capabilities.update(get_port_property_capabilities(port.properties)) for port in self.outports.values(): port_property_capabilities.update(get_port_property_capabilities(port.properties)) _log.debug("derive_port_property_capabilities:" + str(port_property_capabilities)) return get_port_property_runtime(port_property_capabilities) def signature_set(self, signature): if self._signature is None: self._signature = signature def check_authorization_decision(self): """Check if authorization decision is still valid""" if self.authorization_checks: if any(isinstance(elem, list) for elem in self.authorization_checks): # If list of lists, True must be found in each list. for plugin_list in self.authorization_checks: if not check_authorization_plugin_list(plugin_list): return False return True else: return check_authorization_plugin_list(self.authorization_checks) return True @verify_status([STATUS.DENIED]) def set_migration_info(self, reply): if reply and reply.status == 200 and reply.data["node_id"]: self._migration_info = reply.data self.fsm.transition_to(Actor.STATUS.MIGRATABLE) _log.info("Migrate actor %s to node %s" % (self._name, self._migration_info["node_id"])) # Inform the scheduler that the actor is ready to migrate. get_calvinsys().scheduler_maintenance_wakeup() else: _log.info("No possible migration destination found for actor %s" % self._name) # Try to enable/migrate actor again after a delay. get_calvinsys().scheduler_maintenance_wakeup(delay=True) @verify_status([STATUS.MIGRATABLE, STATUS.READY]) def remove_migration_info(self, status): if status.status != 200: self._migration_info = None # FIXME: destroy() in actormanager.py was called before trying to migrate. # Need to make the actor runnable again before transition to DENIED. #self.fsm.transition_to(Actor.STATUS.DENIED) def is_shadow(self): return False class ShadowActor(Actor): """A shadow actor try to behave as another actor but don't have any implementation""" def __init__(self, actor_type, name='', allow_invalid_transitions=True, disable_transition_checks=False, disable_state_checks=False, actor_id=None, security=None): self.inport_properties = {} self.outport_properties = {} self.calvinsys_state = {} self.requires = None self._replication_state_data = None super(ShadowActor, self).__init__(actor_type, name, allow_invalid_transitions=allow_invalid_transitions, disable_transition_checks=disable_transition_checks, disable_state_checks=disable_state_checks, actor_id=actor_id, security=security) @manage(['_shadow_args']) def init(self, **args): self._shadow_args = args def is_shadow(self): return True def create_shadow_port(self, port_name, port_dir, port_id=None): # TODO check if we should create port against meta info if port_dir == "in": self.inport_properties[port_name] = {} port = actorport.InPort(port_name, self) self.inports[port_name] = port else: self.outport_properties[port_name] = {} port = actorport.OutPort(port_name, self) self.outports[port_name] = port return port def enabled(self): return False def did_connect(self, port): # Do nothing return def did_disconnect(self, port): # Do nothing return def requirements_get(self): # Get standard actor requirements first reqs = super(ShadowActor, self).requirements_get() if self._signature and hasattr(self, '_shadow_args') and self.requires is None: # Fresh ShadowActor, needs to find placement based on signature # Since actor requires is not known locally reqs += [{'op': 'shadow_actor_reqs_match', 'kwargs': {'signature': self._signature, 'shadow_params': self._shadow_args.keys()}, 'type': '+'}] return reqs def _set_private_state(self, state): """Pop _calvinsys state, set requires and call super class""" self.calvinsys_state = state.pop("_calvinsys") # Done only in ShadowActor since requires is normally part of the real Actor sub-class self.requires = state['_requires'] super(ShadowActor, self)._set_private_state(state) def _private_state(self): """Call super class and add stored calvinsys state""" state = super(ShadowActor, self)._private_state() state["_calvinsys"] = self.calvinsys_state return state def _set_replication_state(self, state): """ Save the replication state, besides ports since they are already handled on the shadow instance """ super(ShadowActor, self)._set_replication_state(state) # Need copy since remove the ports, which is needed for connect self._replication_state_data = copy.copy(state) if state is None: return def _replication_state(self): return self._replication_state_data
{ "pile_set_name": "Github" }
======= echoscu ======= .. code-block:: text $ python -m pynetdicom echoscu [options] addr port Description =========== The ``echoscu`` application implements a Service Class User (SCU) for the :dcm:`Verification<part04/chapter_A.html>` service class. It establishes an association with a peer Application Entity (AE), sends a :dcm:`C-ECHO<part07/sect_9.3.5.html#sect_9.3.5.1>` request and waits for a response. The application can be used to verify basic DICOM connectivity. The source code for the application can be found `here <https://github.com/pydicom/pynetdicom/tree/master/pynetdicom/apps/echoscu>`_ Usage ===== The following example shows what happens when it's succesfully run on an SCP at IP ``127.0.0.1`` and listen port ``11112`` that supports the Verification service: .. code-block:: text $ python -m pynetdicom echoscu 127.0.0.1 11112 When attempting to send a C-ECHO request to an SCP that doesn't support the Verification service: .. code-block:: text $ python -m pynetdicom echoscu 127.0.0.1 11112 E: No accepted presentation contexts When the association request is rejected by the SCP (in this case because the called AE title wasn't recognised): .. code-block:: text $ python -m pynetdicom echoscu 127.0.0.1 11112 E: Association Rejected E: Result: Rejected Permanent, Source: Service User E: Reason: Called AE title not recognised When attempting to associate with a non-DICOM peer: .. code-block:: text $ python -m pynetdicom echoscu 127.0.0.1 11112 E: Association request failed: unable to connect to remote E: TCP Initialisation Error: Connection refused More information is available with the ``-d`` flag: .. code-block:: text $ python -m pynetdicom echoscu 127.0.0.1 11112 -d D: echoscu.py v0.7.0 D: I: Requesting Association D: Request Parameters: D: ======================= OUTGOING A-ASSOCIATE-RQ PDU ======================== ... D: ========================== END A-ASSOCIATE-AC PDU ========================== I: Association Accepted I: Sending Echo Request: MsgID 1 D: pydicom.read_dataset() TransferSyntax="Little Endian Implicit" I: Received Echo Response (Status: Success) I: Releasing Association Parameters ========== ``addr`` TCP/IP address or hostname of DICOM peer ``port`` TCP/IP port number of peer Options ======= General Options --------------- ``-q --quiet`` quiet mode, prints no warnings or errors ``-v --verbose`` verbose mode, prints processing details ``-d --debug`` debug mode, prints debugging information ``-ll --log-level [l]evel (str)`` One of [``'critical'``, ``'error'``, ``'warning'``, ``'info'``, ``'debug'``], prints logging messages with corresponding level or lower Network Options --------------- ``-aet --calling-aet [a]etitle (str)`` set the local AE title (default: ``ECHOSCU``) ``-aec --called-aet [a]etitle (str)`` set the called AE title for the peer AE (default: ``ANY-SCP``) ``-ta --acse-timeout [s]econds (float)`` timeout for ACSE messages (default: ``30``) ``-td --dimse-timeout [s]econds (float)`` timeout for DIMSE messages (default: ``30``) ``-tn --network-timeout [s]econds (float)`` timeout for the network (default: ``30``) ``-pdu --max-pdu [n]umber of bytes (int)`` set maximum receive PDU bytes to n bytes (default: ``16382``) Transfer Syntax Options ----------------------- ``-xe --request-little`` request explicit VR little endian TS only ``-xb --request-big`` request explicit VR big endian TS only ``-xi --request-implicit`` request implicit VR little endian TS only Miscellaneous Options --------------------- ``--repeat [n]umber (int)`` repeat echo request ``n`` times ``--abort`` abort association instead of releasing it DICOM Conformance ================= The ``echoscu`` application supports the Verification service as an SCU. The following SOP classes are supported: Verification Service -------------------- SOP Classes ........... +------------------+------------------------+ | UID | SOP Class | +==================+========================+ |1.2.840.10008.1.1 | Verification SOP Class | +------------------+------------------------+ Transfer Syntaxes ................. +------------------------+----------------------------------------------------+ | UID | Transfer Syntax | +========================+====================================================+ | 1.2.840.10008.1.2 | Implicit VR Little Endian | +------------------------+----------------------------------------------------+ | 1.2.840.10008.1.2.1 | Explicit VR Little Endian | +------------------------+----------------------------------------------------+ | 1.2.840.10008.1.2.1.99 | Deflated Explicit VR Little Endian | +------------------------+----------------------------------------------------+ | 1.2.840.10008.1.2.2 | Explicit VR Big Endian | +------------------------+----------------------------------------------------+
{ "pile_set_name": "Github" }
{ "name": "mdxmini", "full_name": "mdxmini", "oldname": null, "aliases": [ ], "versioned_formulae": [ ], "desc": "Plays music in X68000 MDX chiptune format", "license": "GPL-2.0", "homepage": "https://clogging.web.fc2.com/psp/", "versions": { "stable": "1.0.0", "head": null, "bottle": true }, "urls": { "stable": { "url": "https://github.com/mistydemeo/mdxmini/archive/v1.0.0.tar.gz", "tag": null, "revision": null } }, "revision": 0, "version_scheme": 0, "bottle": { "stable": { "rebuild": 1, "cellar": ":any", "prefix": "/home/linuxbrew/.linuxbrew", "root_url": "https://linuxbrew.bintray.com/bottles", "files": { "catalina": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.catalina.bottle.1.tar.gz", "sha256": "04b985db291b09f8f1c4a57e522700f9c67eadcd1074faae78ab0e2ff268d9da" }, "mojave": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.mojave.bottle.1.tar.gz", "sha256": "e93281dc0c64642e33763f0dc2a4cfa0a6da0dd4739222b0411e54913435ee27" }, "high_sierra": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.high_sierra.bottle.1.tar.gz", "sha256": "5bf36e82084146ab4604b4746bcf6634cfe4268f2044712e4d13519b21ab5165" }, "sierra": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.sierra.bottle.1.tar.gz", "sha256": "8e0daf3d508dad59074c567b8c8e60bd88c8026b7dfe1305e4e9c50ec5d8fbbd" }, "el_capitan": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.el_capitan.bottle.1.tar.gz", "sha256": "d20b94107c25833096401be6336544f283e6956758d4238e207e6a4e34fa5fdf" }, "yosemite": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.yosemite.bottle.1.tar.gz", "sha256": "55cf6b84d9a0e649e25db7626db954a47bf1061afd20a959972470b6e5cc9fa2" }, "mavericks": { "url": "https://linuxbrew.bintray.com/bottles/mdxmini-1.0.0.mavericks.bottle.1.tar.gz", "sha256": "d08a617e3a8791b9e5dc93426f3d471408550a4a0bab85e33a726ccdcdcb683c" } } } }, "keg_only": false, "bottle_disabled": false, "options": [ ], "build_dependencies": [ ], "dependencies": [ "sdl" ], "recommended_dependencies": [ ], "optional_dependencies": [ ], "uses_from_macos": [ ], "requirements": [ ], "conflicts_with": [ ], "caveats": null, "installed": [ ], "linked_keg": null, "pinned": false, "outdated": false, "deprecated": false, "disabled": false }
{ "pile_set_name": "Github" }
del mttest.exe cl /O2 -DWIN32 /MD -I..\..\out mttest.c /Femttest ..\..\out\ssleay32.lib ..\..\out\libeay32.lib
{ "pile_set_name": "Github" }
import generate from '../../src/generator/generate'; import { generateQueries, generateMutations, generateSubscriptions } from '../../src/generator/generateAllOperations'; import { buildClientSchema } from 'graphql'; import { GQLDocsGenOptions } from '../../src/generator/types'; jest.mock('../../src/generator/generateAllOperations'); jest.mock('graphql'); describe('generate', () => { const getQueryType = jest.fn(); const getMutationType = jest.fn(); const getSubscriptionType = jest.fn(); const mockSchema = { getQueryType, getMutationType, getSubscriptionType, }; const maxDepth = 4; const generateOption: GQLDocsGenOptions = { useExternalFragmentForS3Object: true }; beforeEach(() => { jest.resetAllMocks(); getQueryType.mockReturnValue('QUERY_TYPE'); getMutationType.mockReturnValue('MUTATION_TYPE'); getSubscriptionType.mockReturnValue('SUBSCRIPTION_TYPE'); buildClientSchema.mockReturnValue(mockSchema); generateQueries.mockReturnValue('MOCK_GENERATED_QUERY'); generateMutations.mockReturnValue('MOCK_GENERATED_MUTATION'); generateSubscriptions.mockReturnValue('MOCK_GENERATED_SUBSCRIPTION'); }); it('should generate operations using the helper methods', () => { generate(mockSchema, maxDepth, generateOption); expect(generateQueries).toHaveBeenCalledWith(mockSchema.getQueryType(), mockSchema, maxDepth, generateOption); expect(generateMutations).toHaveBeenCalledWith(mockSchema.getMutationType(), mockSchema, maxDepth, generateOption); expect(generateSubscriptions).toHaveBeenCalledWith(mockSchema.getSubscriptionType(), mockSchema, maxDepth, generateOption); }); it('should call the individual operation generator and return the value from them', () => { expect(generate(mockSchema, maxDepth, generateOption)).toEqual({ queries: 'MOCK_GENERATED_QUERY', subscriptions: 'MOCK_GENERATED_SUBSCRIPTION', mutations: 'MOCK_GENERATED_MUTATION', }); }); });
{ "pile_set_name": "Github" }
(**************************************************************************) (* *) (* Copyright 2019-2020 OCamlPro *) (* *) (* All rights reserved. This file is distributed under the terms of the *) (* GNU Lesser General Public License version 2.1, with the special *) (* exception on linking described in the file LICENSE. *) (* *) (**************************************************************************) (* Given a list of system packages, retrieve their installation status from the system and returns a pair of [sys_package] set: * first one is available set: package that exist on the default repositories, but not installed) * second one, not found set: packages not found on the defined repositories *) val packages_status: OpamSysPkg.Set.t -> OpamSysPkg.Set.t * OpamSysPkg.Set.t (* Return the commands to run to install given system packages *) val install_packages_commands: OpamSysPkg.Set.t -> (string * string list) list (* Install given system packages, by calling local system package manager *) val install: OpamSysPkg.Set.t -> unit val update: unit -> unit
{ "pile_set_name": "Github" }
function SeType { param( [Parameter(Mandatory = $true, Position = 0)] [string]$Keys, [Parameter(Mandatory = $true, ValueFromPipeline = $true)] [OpenQA.Selenium.IWebElement]$Element, [switch]$ClearFirst, $SleepSeconds = 0 , [switch]$Submit, [Alias('PT')] [switch]$PassThru ) begin { foreach ($Key in $Script:SeKeys.Name) { $Keys = $Keys -replace "{{$Key}}", [OpenQA.Selenium.Keys]::$Key } } process { if ($ClearFirst) { $Element.Clear() } $Element.SendKeys($Keys) if ($Submit) { $Element.Submit() } if ($SleepSeconds) { Start-Sleep -Seconds $SleepSeconds } if ($PassThru) { $Element } } }
{ "pile_set_name": "Github" }
<!-- The MIT License Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi, Seiji Sogabe Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. --> <!-- Config page --> <?jelly escape-by-default='true'?> <j:jelly xmlns:j="jelly:core" xmlns:st="jelly:stapler" xmlns:d="jelly:define" xmlns:l="/lib/layout" xmlns:t="/lib/hudson" xmlns:f="/lib/form"> <f:entry title="${%Description}" help="/help/system-config/master-slave/description.html"> <f:textbox field="nodeDescription" /> </f:entry> <f:entry title="${%# of executors}" field="numExecutors"> <f:textbox /> </f:entry> <f:entry title="${%Remote FS root}" field="remoteFS"> <f:textbox /> </f:entry> <f:entry title="${%Labels}" field="labelString"> <f:textbox /> </f:entry> <f:slave-mode name="mode" node="${it}" /> <!-- TODO: should be packaged as a tag --> <f:dropdownList name="slave.launcher" title="${%Launch method}" help="${descriptor.getHelpFile('launcher')}"> <j:forEach var="d" items="${h.getComputerLauncherDescriptors()}"> <f:dropdownListBlock value="${d.clazz.name}" name="${d.displayName}" selected="${it.launcher.descriptor==d}" title="${d.displayName}"> <j:set var="descriptor" value="${d}"/> <j:set var="instance" value="${it.launcher.descriptor==d ? it.launcher : null}"/> <f:invisibleEntry> <input type="hidden" name="stapler-class" value="${d.clazz.name}" /> </f:invisibleEntry> <st:include from="${d}" page="${d.configPage}" optional="true"/> </f:dropdownListBlock> </j:forEach> </f:dropdownList> <!-- pointless to show this if there's only one option, which is the default --> <j:if test="${h.getRetentionStrategyDescriptors().size() gt 1}"> <f:dropdownList name="slave.retentionStrategy" title="${%Availability}" help="/help/system-config/master-slave/availability.html"> <j:forEach var="d" items="${h.getRetentionStrategyDescriptors()}"> <j:if test="${d != null}"> <f:dropdownListBlock value="${d.clazz.name}" name="${d.displayName}" selected="${it.retentionStrategy.descriptor==d}" title="${d.displayName}"> <j:set var="descriptor" value="${d}"/> <j:set var="instance" value="${it.retentionStrategy.descriptor==d ? it.retentionStrategy : null}"/> <tr><td> <input type="hidden" name="stapler-class" value="${d.clazz.name}" /> </td></tr> <st:include from="${d}" page="${d.configPage}" optional="true"/> </f:dropdownListBlock> </j:if> </j:forEach> </f:dropdownList> </j:if> <f:descriptorList title="${%Node Properties}" descriptors="${h.getNodePropertyDescriptors(descriptor.clazz)}" field="nodeProperties" /> </j:jelly>
{ "pile_set_name": "Github" }
inputs += sym01-txns.ledger exports += { type = journal format = xml outFiles = ["out.sym01.journal.xml"] } reports = []
{ "pile_set_name": "Github" }
# kodkod-star Kodkod with arithmetic overflow prevention and higher-order solving extensions
{ "pile_set_name": "Github" }
{ "extends": "../../tsconfig", "include": ["src/**/*", "test/**/*"] }
{ "pile_set_name": "Github" }
// Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See License.txt in the repository root. /* * This file was automatically generated by com.microsoft.tfs.core.ws.generator.Generator * from the /complexType.vm template. */ package ms.tfs.versioncontrol.clientservices._03; import com.microsoft.tfs.core.ws.runtime.*; import com.microsoft.tfs.core.ws.runtime.serialization.*; import com.microsoft.tfs.core.ws.runtime.types.*; import com.microsoft.tfs.core.ws.runtime.util.*; import com.microsoft.tfs.core.ws.runtime.xml.*; import ms.tfs.versioncontrol.clientservices._03._LocalVersionUpdate; import ms.tfs.versioncontrol.clientservices._03._RepositorySoap_UpdateLocalVersion; import java.lang.String; import java.util.ArrayList; import java.util.List; import javax.xml.stream.XMLStreamConstants; import javax.xml.stream.XMLStreamException; import javax.xml.stream.XMLStreamReader; import javax.xml.stream.XMLStreamWriter; /** * Automatically generated complex type class. */ public class _RepositorySoap_UpdateLocalVersion implements ElementSerializable { // No attributes // Elements protected String workspaceName; protected String ownerName; protected _LocalVersionUpdate[] updates; public _RepositorySoap_UpdateLocalVersion() { super(); } public _RepositorySoap_UpdateLocalVersion( final String workspaceName, final String ownerName, final _LocalVersionUpdate[] updates) { // TODO : Call super() instead of setting all fields directly? setWorkspaceName(workspaceName); setOwnerName(ownerName); setUpdates(updates); } public String getWorkspaceName() { return this.workspaceName; } public void setWorkspaceName(String value) { this.workspaceName = value; } public String getOwnerName() { return this.ownerName; } public void setOwnerName(String value) { this.ownerName = value; } public _LocalVersionUpdate[] getUpdates() { return this.updates; } public void setUpdates(_LocalVersionUpdate[] value) { this.updates = value; } public void writeAsElement( final XMLStreamWriter writer, final String name) throws XMLStreamException { writer.writeStartElement(name); // Elements XMLStreamWriterHelper.writeElement( writer, "workspaceName", this.workspaceName); XMLStreamWriterHelper.writeElement( writer, "ownerName", this.ownerName); if (this.updates != null) { /* * The element type is an array. */ writer.writeStartElement("updates"); for (int iterator0 = 0; iterator0 < this.updates.length; iterator0++) { this.updates[iterator0].writeAsElement( writer, "LocalVersionUpdate"); } writer.writeEndElement(); } writer.writeEndElement(); } }
{ "pile_set_name": "Github" }
/* * Copyright 2020 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.exoplayer2.ext.media2; import android.annotation.SuppressLint; import android.support.v4.media.session.MediaSessionCompat; import androidx.media2.session.MediaSession; /** Utility methods to use {@link MediaSession} with other ExoPlayer modules. */ public final class MediaSessionUtil { /** Gets the {@link MediaSessionCompat.Token} from the {@link MediaSession}. */ // TODO(b/152764014): Deprecate this API when MediaSession#getSessionCompatToken() is released. public static MediaSessionCompat.Token getSessionCompatToken(MediaSession mediaSession) { @SuppressLint("RestrictedApi") @SuppressWarnings("RestrictTo") MediaSessionCompat sessionCompat = mediaSession.getSessionCompat(); return sessionCompat.getSessionToken(); } private MediaSessionUtil() { // Prevent from instantiation. } }
{ "pile_set_name": "Github" }
/* * Swagger Petstore * * This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ * * OpenAPI spec version: 1.0.0 * Contact: [email protected] * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using NUnit.Framework; using System; using System.Linq; using System.IO; using System.Collections.Generic; using IO.Swagger.Api; using IO.Swagger.Model; using IO.Swagger.Client; using System.Reflection; using Newtonsoft.Json; namespace IO.Swagger.Test { /// <summary> /// Class for testing OuterComposite /// </summary> /// <remarks> /// This file is automatically generated by Swagger Codegen. /// Please update the test case below to test the model. /// </remarks> [TestFixture] public class OuterCompositeTests { // TODO uncomment below to declare an instance variable for OuterComposite //private OuterComposite instance; /// <summary> /// Setup before each test /// </summary> [SetUp] public void Init() { // TODO uncomment below to create an instance of OuterComposite //instance = new OuterComposite(); } /// <summary> /// Clean up after each test /// </summary> [TearDown] public void Cleanup() { } /// <summary> /// Test an instance of OuterComposite /// </summary> [Test] public void OuterCompositeInstanceTest() { // TODO uncomment below to test "IsInstanceOfType" OuterComposite //Assert.IsInstanceOfType<OuterComposite> (instance, "variable 'instance' is a OuterComposite"); } /// <summary> /// Test the property 'MyNumber' /// </summary> [Test] public void MyNumberTest() { // TODO unit test for the property 'MyNumber' } /// <summary> /// Test the property 'MyString' /// </summary> [Test] public void MyStringTest() { // TODO unit test for the property 'MyString' } /// <summary> /// Test the property 'MyBoolean' /// </summary> [Test] public void MyBooleanTest() { // TODO unit test for the property 'MyBoolean' } } }
{ "pile_set_name": "Github" }
var isSpace = require('./isSpace'); /** * Used by `_.trim` and `_.trimRight` to get the index of the last non-whitespace * character of `string`. * * @private * @param {string} string The string to inspect. * @returns {number} Returns the index of the last non-whitespace character. */ function trimmedRightIndex(string) { var index = string.length; while (index-- && isSpace(string.charCodeAt(index))) {} return index; } module.exports = trimmedRightIndex;
{ "pile_set_name": "Github" }
/*============================================================================= Copyright (c) 2001-2011 Joel de Guzman Copyright (c) 2001-2011 Hartmut Kaiser http://spirit.sourceforge.net/ Distributed under the Boost Software License, Version 1.0. (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) =============================================================================*/ #ifndef BOOST_SPIRIT_INCLUDE_KARMA_PLUS #define BOOST_SPIRIT_INCLUDE_KARMA_PLUS #if defined(_MSC_VER) #pragma once #endif #include <boost/spirit/home/karma/operator/plus.hpp> #endif
{ "pile_set_name": "Github" }
// Licensed to the Apache Software Foundation (ASF) under one // or more contributor license agreements. See the NOTICE file // distributed with this work for additional information // regarding copyright ownership. The ASF licenses this file // to you under the Apache License, Version 2.0 (the // "License"); you may not use this file except in compliance // with the License. You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, // software distributed under the License is distributed on an // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. #pragma once #include <cstddef> #include <cstdint> #include <memory> #include <string> #include <vector> #include "kudu/util/status.h" namespace kudu { class Env; class RandomAccessFile; class SequentialFile; class WritableFile; struct WritableFileOptions; namespace env_util { Status OpenFileForWrite(Env *env, const std::string &path, std::shared_ptr<WritableFile> *file); Status OpenFileForWrite(const WritableFileOptions& opts, Env *env, const std::string &path, std::shared_ptr<WritableFile> *file); Status OpenFileForRandom(Env *env, const std::string &path, std::shared_ptr<RandomAccessFile> *file); Status OpenFileForSequential(Env *env, const std::string &path, std::shared_ptr<SequentialFile> *file); // Returns Status::IOError with POSIX code ENOSPC if there is not sufficient // disk space to write 'requested_bytes' bytes to the file system represented by 'path'. // Otherwise returns OK. // If 'reserved_bytes' equals -1, it is interpreted as a 1% reservation. No // other values less than 0 are supported at this time. // If 'available_bytes' is not null, it will contain the amount of free disk space (in bytes) // in 'path' when the function finishes. This will happen even if the function returns IOError // with ENOSPC, but not on any other error. Status VerifySufficientDiskSpace(Env *env, const std::string& path, int64_t requested_bytes, int64_t reserved_bytes, int64_t* available_bytes = nullptr); // Creates the directory given by 'path', unless it already exists. // // If 'created' is not NULL, sets it to true if the directory was // created, false otherwise. Status CreateDirIfMissing(Env* env, const std::string& path, bool* created = NULL); // Recursively create directories, if they do not exist, along the given path. // Returns OK if successful or if the given path already existed. // Upon failure, it is possible that some part of the directory structure may // have been successfully created. Emulates the behavior of `mkdir -p`. Status CreateDirsRecursively(Env* env, const std::string& path); // Copy the contents of file source_path to file dest_path. // This is not atomic, and if there is an error while reading or writing, // a partial copy may be left in 'dest_path'. Does not fsync the parent // directory of dest_path -- if you need durability then do that yourself. Status CopyFile(Env* env, const std::string& source_path, const std::string& dest_path, WritableFileOptions opts); // Deletes files matching 'pattern' in excess of 'max_matches' files. // 'max_matches' must be greater than or equal to 0. // The oldest files are deleted first, as determined by last modified time. // In the case that multiple files have the same last modified time, it is not // defined which file will be deleted first. Status DeleteExcessFilesByPattern(Env* env, const std::string& pattern, int max_matches); // Traverses 'path' recursively and deletes all files matching the special Kudu // tmp file infix. Does not follow symlinks. // // Deletion errors generate warnings but do not halt the traversal. Status DeleteTmpFilesRecursively(Env* env, const std::string& path); // Checks if 'path' is an empty directory. // // Returns an error if it's not a directory. Otherwise, sets 'is_empty' // accordingly. Status IsDirectoryEmpty(Env* env, const std::string& path, bool* is_empty); // Synchronize all of the parent directories belonging to 'dirs' and 'files' // to disk. Status SyncAllParentDirs(Env* env, const std::vector<std::string>& dirs, const std::vector<std::string>& files); // Return a list of files within the given 'path'. Status ListFilesInDir(Env* env, const std::string& path, std::vector<std::string>* entries); } // namespace env_util } // namespace kudu
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using Newtonsoft.Json; namespace Alipay.AopSdk.Core.Domain { /// <summary> /// KoubeiAdvertCommissionChannelCreateModel Data Structure. /// </summary> [Serializable] public class KoubeiAdvertCommissionChannelCreateModel : AopObject { /// <summary> /// 新增渠道列表 /// </summary> [JsonProperty("channels")] public List<KbAdvertAddChannelRequest> Channels { get; set; } } }
{ "pile_set_name": "Github" }
/* Copyright (c) 2007 Stefan Kurtz <[email protected]> Copyright (c) 2009 Sascha Steinbiss <[email protected]> Copyright (c) 2007-2009 Center for Bioinformatics, University of Hamburg Permission to use, copy, modify, and distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. */ #include "core/error_api.h" #include "core/sequence_buffer_plain.h" #include "core/sequence_buffer_rep.h" #include "core/sequence_buffer_inline.h" #include "core/unused_api.h" struct GtSequenceBufferPlain { const GtSequenceBuffer parent_instance; bool nextfile, firstseqinfile; }; #define gt_sequence_buffer_plain_cast(SB)\ gt_sequence_buffer_cast(gt_sequence_buffer_plain_class(), SB) static int gt_sequence_buffer_plain_advance(GtSequenceBuffer *sb, GtError *err) { int currentchar; GtUword currentoutpos = 0, currentfileread = 0; GtSequenceBufferMembers *pvt; GtSequenceBufferPlain *sbp; sbp = gt_sequence_buffer_plain_cast(sb); pvt = sb->pvt; gt_error_check(err); if (pvt->descptr != NULL) { gt_error_set(err, "no headers in plain sequence file"); return -1; } while (true) { if (currentoutpos >= (GtUword) OUTBUFSIZE) { if (pvt->filelengthtab != NULL) { pvt->filelengthtab[pvt->filenum].length += (uint64_t) currentfileread; pvt->filelengthtab[pvt->filenum].effectivelength += (uint64_t) currentfileread; } break; } if (sbp->nextfile) { if (pvt->filelengthtab != NULL) { pvt->filelengthtab[pvt->filenum].length = 0; pvt->filelengthtab[pvt->filenum].effectivelength = 0; } sbp->nextfile = false; sbp->firstseqinfile = true; currentfileread = 0; pvt->inputstream = gt_file_xopen(gt_str_array_get(pvt->filenametab, (GtUword) pvt->filenum), "rb"); pvt->currentinpos = 0; pvt->currentfillpos = 0; } else { currentchar = inlinebuf_getchar(sb, pvt->inputstream); if (currentchar == EOF) { gt_file_delete(pvt->inputstream); pvt->inputstream = NULL; if (pvt->filelengthtab != NULL) { pvt->filelengthtab[pvt->filenum].length += (uint64_t) currentfileread; pvt->filelengthtab[pvt->filenum].effectivelength += (uint64_t) currentfileread; } if ((GtUword) pvt->filenum == gt_str_array_size(pvt->filenametab)-1) { pvt->complete = true; break; } pvt->filenum++; sbp->nextfile = true; } else { currentfileread++; pvt->outbuf[currentoutpos++] = (unsigned char) currentchar; } } } if (currentoutpos == 0) { gt_error_set(err, "no characters in plain file(s) %s ...", gt_str_array_get(pvt->filenametab,0)); return -2; } pvt->nextfree = currentoutpos; return 0; } static GtUword gt_sequence_buffer_plain_get_file_index(GtSequenceBuffer *sb) { gt_assert(sb); return (GtUword) sb->pvt->filenum; } void gt_sequence_buffer_plain_free(GT_UNUSED GtSequenceBuffer *sb) { /* not needed */ } const GtSequenceBufferClass* gt_sequence_buffer_plain_class(void) { static const GtSequenceBufferClass sbc = { sizeof (GtSequenceBufferPlain), gt_sequence_buffer_plain_advance, gt_sequence_buffer_plain_get_file_index, gt_sequence_buffer_plain_free }; return &sbc; } GtSequenceBuffer* gt_sequence_buffer_plain_new(const GtStrArray *sequences) { GtSequenceBuffer *sb; GtSequenceBufferPlain *sbf; sb = gt_sequence_buffer_create(gt_sequence_buffer_plain_class()); sbf = gt_sequence_buffer_plain_cast(sb); sb->pvt->filenametab = sequences; sb->pvt->filenum = 0; sbf->firstseqinfile = true; sbf->nextfile = true; sb->pvt->nextread = sb->pvt->nextfree = 0; sb->pvt->complete = false; sb->pvt->lastspeciallength = 0; return sb; }
{ "pile_set_name": "Github" }
Content-Security-Policy: default-src 'self'; style-src 'self' css.example.com; img-src *.example.com; script-src 'unsafe-eval' 'self' js.example.com 'nonce-Nc3n83cnSAd3wc3Sasdfn939hc3'
{ "pile_set_name": "Github" }
<?php /** * Zend Framework * * LICENSE * * This source file is subject to the new BSD license that is bundled * with this package in the file LICENSE.txt. * It is also available through the world-wide-web at this URL: * http://framework.zend.com/license/new-bsd * If you did not receive a copy of the license and are unable to * obtain it through the world-wide-web, please send an email * to [email protected] so we can send you a copy immediately. * * @category Zend * @package Zend_Console_Getopt * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License * @version $Id: Exception.php 20096 2010-01-06 02:05:09Z bkarwin $ */ /** * @see Zend_Console_Getopt_Exception */ require_once 'Zend/Exception.php'; /** * @category Zend * @package Zend_Console_Getopt * @copyright Copyright (c) 2005-2010 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ class Zend_Console_Getopt_Exception extends Zend_Exception { /** * Usage * * @var string */ protected $usage = ''; /** * Constructor * * @param string $message * @param string $usage * @return void */ public function __construct($message, $usage = '') { $this->usage = $usage; parent::__construct($message); } /** * Returns the usage * * @return string */ public function getUsageMessage() { return $this->usage; } }
{ "pile_set_name": "Github" }
//! [0] QBuffer device; device.setData(myQString.toUtf8()); device.open(QIODevice::ReadOnly); QXmlQuery query; query.setQuery("doc($inputDocument)/query[theDocument]"); query.bindVariable("inputDocument", &device); //! [0]
{ "pile_set_name": "Github" }
*%(basename)s:5: SyntaxError: The requested module '../../mjsunit/modules-skip-star-exports-conflict.mjs' contains conflicting star exports for name 'a' export * from "../../mjsunit/modules-skip-star-exports-conflict.mjs"; ^ SyntaxError: The requested module '../../mjsunit/modules-skip-star-exports-conflict.mjs' contains conflicting star exports for name 'a'
{ "pile_set_name": "Github" }
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package v1beta2 import ( v1beta2 "k8s.io/api/apps/v1beta2" v1 "k8s.io/apimachinery/pkg/apis/meta/v1" types "k8s.io/apimachinery/pkg/types" watch "k8s.io/apimachinery/pkg/watch" scheme "k8s.io/client-go/kubernetes/scheme" rest "k8s.io/client-go/rest" ) // DeploymentsGetter has a method to return a DeploymentInterface. // A group's client should implement this interface. type DeploymentsGetter interface { Deployments(namespace string) DeploymentInterface } // DeploymentInterface has methods to work with Deployment resources. type DeploymentInterface interface { Create(*v1beta2.Deployment) (*v1beta2.Deployment, error) Update(*v1beta2.Deployment) (*v1beta2.Deployment, error) UpdateStatus(*v1beta2.Deployment) (*v1beta2.Deployment, error) Delete(name string, options *v1.DeleteOptions) error DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error Get(name string, options v1.GetOptions) (*v1beta2.Deployment, error) List(opts v1.ListOptions) (*v1beta2.DeploymentList, error) Watch(opts v1.ListOptions) (watch.Interface, error) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1beta2.Deployment, err error) DeploymentExpansion } // deployments implements DeploymentInterface type deployments struct { client rest.Interface ns string } // newDeployments returns a Deployments func newDeployments(c *AppsV1beta2Client, namespace string) *deployments { return &deployments{ client: c.RESTClient(), ns: namespace, } } // Get takes name of the deployment, and returns the corresponding deployment object, and an error if there is any. func (c *deployments) Get(name string, options v1.GetOptions) (result *v1beta2.Deployment, err error) { result = &v1beta2.Deployment{} err = c.client.Get(). Namespace(c.ns). Resource("deployments"). Name(name). VersionedParams(&options, scheme.ParameterCodec). Do(). Into(result) return } // List takes label and field selectors, and returns the list of Deployments that match those selectors. func (c *deployments) List(opts v1.ListOptions) (result *v1beta2.DeploymentList, err error) { result = &v1beta2.DeploymentList{} err = c.client.Get(). Namespace(c.ns). Resource("deployments"). VersionedParams(&opts, scheme.ParameterCodec). Do(). Into(result) return } // Watch returns a watch.Interface that watches the requested deployments. func (c *deployments) Watch(opts v1.ListOptions) (watch.Interface, error) { opts.Watch = true return c.client.Get(). Namespace(c.ns). Resource("deployments"). VersionedParams(&opts, scheme.ParameterCodec). Watch() } // Create takes the representation of a deployment and creates it. Returns the server's representation of the deployment, and an error, if there is any. func (c *deployments) Create(deployment *v1beta2.Deployment) (result *v1beta2.Deployment, err error) { result = &v1beta2.Deployment{} err = c.client.Post(). Namespace(c.ns). Resource("deployments"). Body(deployment). Do(). Into(result) return } // Update takes the representation of a deployment and updates it. Returns the server's representation of the deployment, and an error, if there is any. func (c *deployments) Update(deployment *v1beta2.Deployment) (result *v1beta2.Deployment, err error) { result = &v1beta2.Deployment{} err = c.client.Put(). Namespace(c.ns). Resource("deployments"). Name(deployment.Name). Body(deployment). Do(). Into(result) return } // UpdateStatus was generated because the type contains a Status member. // Add a +genclient:noStatus comment above the type to avoid generating UpdateStatus(). func (c *deployments) UpdateStatus(deployment *v1beta2.Deployment) (result *v1beta2.Deployment, err error) { result = &v1beta2.Deployment{} err = c.client.Put(). Namespace(c.ns). Resource("deployments"). Name(deployment.Name). SubResource("status"). Body(deployment). Do(). Into(result) return } // Delete takes name of the deployment and deletes it. Returns an error if one occurs. func (c *deployments) Delete(name string, options *v1.DeleteOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("deployments"). Name(name). Body(options). Do(). Error() } // DeleteCollection deletes a collection of objects. func (c *deployments) DeleteCollection(options *v1.DeleteOptions, listOptions v1.ListOptions) error { return c.client.Delete(). Namespace(c.ns). Resource("deployments"). VersionedParams(&listOptions, scheme.ParameterCodec). Body(options). Do(). Error() } // Patch applies the patch and returns the patched deployment. func (c *deployments) Patch(name string, pt types.PatchType, data []byte, subresources ...string) (result *v1beta2.Deployment, err error) { result = &v1beta2.Deployment{} err = c.client.Patch(pt). Namespace(c.ns). Resource("deployments"). SubResource(subresources...). Name(name). Body(data). Do(). Into(result) return }
{ "pile_set_name": "Github" }
// Copyright (c) Microsoft Corporation. // Licensed under the MIT License. using UnityEngine; namespace Microsoft.MixedReality.Toolkit.Utilities.Solvers { /// <summary> /// ConstantViewSize solver scales to maintain a constant size relative to the view (currently tied to the Camera) /// </summary> [AddComponentMenu("Scripts/MRTK/SDK/ConstantViewSize")] public class ConstantViewSize : Solver { #region ConstantViewSize Parameters [Range(0f, 1f)] [SerializeField] [Tooltip("The object take up this percent vertically in our view (not technically a percent use 0.5 for 50%)")] private float targetViewPercentV = 0.5f; /// <summary> /// The object take up this percent vertically in our view (not technically a percent use 0.5 for 50%) /// </summary> public float TargetViewPercentV { get { return targetViewPercentV; } set { targetViewPercentV = value; } } [SerializeField] [Tooltip("If the object is closer than MinDistance, the distance used is clamped here")] private float minDistance = 0.5f; /// <summary> /// If the object is closer than MinDistance, the distance used is clamped here /// </summary> public float MinDistance { get { return minDistance; } set { minDistance = value; } } [SerializeField] [Tooltip("If the object is farther than MaxDistance, the distance used is clamped here")] private float maxDistance = 3.5f; /// <summary> /// If the object is farther than MaxDistance, the distance used is clamped here /// </summary> public float MaxDistance { get { return maxDistance; } set { maxDistance = value; } } [SerializeField] [Tooltip("Minimum scale value possible (world space scale)")] private float minScale = 0.01f; /// <summary> /// Minimum scale value possible (world space scale) /// </summary> public float MinScale { get { return minScale; } set { minScale = value; } } [SerializeField] [Tooltip("Maximum scale value possible (world space scale)")] private float maxScale = 100f; /// <summary> /// Maximum scale value possible (world space scale) /// </summary> public float MaxScale { get { return maxScale; } set { maxScale = value; } } [SerializeField] [Tooltip("Used for dead zone for scaling")] private float scaleBuffer = 0.01f; /// <summary> /// Used for dead zone for scaling /// </summary> public float ScaleBuffer { get { return scaleBuffer; } set { scaleBuffer = value; } } [SerializeField] [Tooltip("Overrides auto size calculation with provided manual size. If 0, solver calculates size")] private float manualObjectSize = 0; /// <summary> /// Overrides auto size calculation with provided manual size. If 0, solver calculates size /// </summary> public float ManualObjectSize { get { return manualObjectSize; } set { manualObjectSize = value; RecalculateBounds(); } } public ScaleState ScaleState { get; private set; } = ScaleState.Static; /// <summary> /// 0 to 1 between MinScale and MaxScale. If current is less than max, then scaling is being applied. /// This value is subject to inaccuracies due to smoothing/interpolation/momentum. /// </summary> public float CurrentScalePercent { get; private set; } = 1f; /// <summary> /// 0 to 1 between MinDistance and MaxDistance. If current is less than max, object is potentially on a surface [or some other condition like interpolating] (since it may still be on surface, but scale percent may be clamped at max). /// This value is subject to inaccuracies due to smoothing/interpolation/momentum. /// </summary> public float CurrentDistancePercent { get; private set; } = 1f; /// <summary> /// Returns the scale to be applied based on the FOV. This scale will be multiplied by distance as part of /// the final scale calculation, so this is the ratio of vertical fov to distance. /// </summary> public float FovScale { get { float cameraFovRadians = (CameraCache.Main.aspect * CameraCache.Main.fieldOfView) * Mathf.Deg2Rad; float sinFov = Mathf.Sin(cameraFovRadians * 0.5f); return 2f * targetViewPercentV * sinFov / objectSize; } } #endregion private float fovScalar = 1f; private float objectSize = 1f; protected override void Start() { base.Start(); RecalculateBounds(); } /// <inheritdoc /> public override void SolverUpdate() { float lastScalePct = CurrentScalePercent; if (SolverHandler.TransformTarget != null) { // Get current fov each time instead of trying to cache it. Can never count on init order these days fovScalar = FovScale; // Set the linked alt scale ahead of our work. This is an attempt to minimize jittering by having solvers work with an interpolated scale. SolverHandler.AltScale.SetGoal(transform.localScale); // Calculate scale based on distance from view. Do not interpolate so we can appear at a constant size if possible. Borrowed from greybox. Vector3 targetPosition = SolverHandler.TransformTarget.position; float distance = Mathf.Clamp(Vector3.Distance(transform.position, targetPosition), minDistance, maxDistance); float scale = Mathf.Clamp(fovScalar * distance, minScale, maxScale); GoalScale = Vector3.one * scale; // Save some state information for external use CurrentDistancePercent = Mathf.InverseLerp(minDistance, maxDistance, distance); CurrentScalePercent = Mathf.InverseLerp(minScale, maxScale, scale); } float scaleDifference = (CurrentScalePercent - lastScalePct) / SolverHandler.DeltaTime; if (scaleDifference > scaleBuffer) { ScaleState = ScaleState.Growing; } else if (scaleDifference < -scaleBuffer) { ScaleState = ScaleState.Shrinking; } else { ScaleState = ScaleState.Static; } } /// <summary> /// Attempts to calculate the size of the bounds which contains all child renderers for attached GameObject. This information is used in the core solver calculations /// </summary> public void RecalculateBounds() { float baseSize; // If user set object size override apply, otherwise compute baseSize if (manualObjectSize > 0) { baseSize = manualObjectSize; } else { Vector3 cachedScale = transform.root.localScale; transform.root.localScale = Vector3.one; var combinedBounds = new Bounds(transform.position, Vector3.zero); var renderers = GetComponentsInChildren<Renderer>(); for (var i = 0; i < renderers.Length; i++) { combinedBounds.Encapsulate(renderers[i].bounds); } baseSize = combinedBounds.extents.magnitude; transform.root.localScale = cachedScale; } if (baseSize > 0) { objectSize = baseSize; } else { Debug.LogWarning("ConstantViewSize: Object base size calculate was 0, defaulting to 1"); objectSize = 1f; } } } }
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0 /* * Sharp LS037V7DW01 LCD Panel Driver * * Copyright (C) 2019 Texas Instruments Incorporated * * Based on the omapdrm-specific panel-sharp-ls037v7dw01 driver * * Copyright (C) 2013 Texas Instruments Incorporated * Author: Tomi Valkeinen <[email protected]> */ #include <linux/delay.h> #include <linux/gpio/consumer.h> #include <linux/module.h> #include <linux/of.h> #include <linux/platform_device.h> #include <linux/regulator/consumer.h> #include <drm/drm_connector.h> #include <drm/drm_modes.h> #include <drm/drm_panel.h> struct ls037v7dw01_panel { struct drm_panel panel; struct platform_device *pdev; struct regulator *vdd; struct gpio_desc *resb_gpio; /* low = reset active min 20 us */ struct gpio_desc *ini_gpio; /* high = power on */ struct gpio_desc *mo_gpio; /* low = 480x640, high = 240x320 */ struct gpio_desc *lr_gpio; /* high = conventional horizontal scanning */ struct gpio_desc *ud_gpio; /* high = conventional vertical scanning */ }; #define to_ls037v7dw01_device(p) \ container_of(p, struct ls037v7dw01_panel, panel) static int ls037v7dw01_disable(struct drm_panel *panel) { struct ls037v7dw01_panel *lcd = to_ls037v7dw01_device(panel); gpiod_set_value_cansleep(lcd->ini_gpio, 0); gpiod_set_value_cansleep(lcd->resb_gpio, 0); /* Wait at least 5 vsyncs after disabling the LCD. */ msleep(100); return 0; } static int ls037v7dw01_unprepare(struct drm_panel *panel) { struct ls037v7dw01_panel *lcd = to_ls037v7dw01_device(panel); regulator_disable(lcd->vdd); return 0; } static int ls037v7dw01_prepare(struct drm_panel *panel) { struct ls037v7dw01_panel *lcd = to_ls037v7dw01_device(panel); int ret; ret = regulator_enable(lcd->vdd); if (ret < 0) dev_err(&lcd->pdev->dev, "%s: failed to enable regulator\n", __func__); return ret; } static int ls037v7dw01_enable(struct drm_panel *panel) { struct ls037v7dw01_panel *lcd = to_ls037v7dw01_device(panel); /* Wait couple of vsyncs before enabling the LCD. */ msleep(50); gpiod_set_value_cansleep(lcd->resb_gpio, 1); gpiod_set_value_cansleep(lcd->ini_gpio, 1); return 0; } static const struct drm_display_mode ls037v7dw01_mode = { .clock = 19200, .hdisplay = 480, .hsync_start = 480 + 1, .hsync_end = 480 + 1 + 2, .htotal = 480 + 1 + 2 + 28, .vdisplay = 640, .vsync_start = 640 + 1, .vsync_end = 640 + 1 + 1, .vtotal = 640 + 1 + 1 + 1, .type = DRM_MODE_TYPE_DRIVER | DRM_MODE_TYPE_PREFERRED, .flags = DRM_MODE_FLAG_NHSYNC | DRM_MODE_FLAG_NVSYNC, .width_mm = 56, .height_mm = 75, }; static int ls037v7dw01_get_modes(struct drm_panel *panel, struct drm_connector *connector) { struct drm_display_mode *mode; mode = drm_mode_duplicate(connector->dev, &ls037v7dw01_mode); if (!mode) return -ENOMEM; drm_mode_set_name(mode); drm_mode_probed_add(connector, mode); connector->display_info.width_mm = ls037v7dw01_mode.width_mm; connector->display_info.height_mm = ls037v7dw01_mode.height_mm; /* * FIXME: According to the datasheet pixel data is sampled on the * rising edge of the clock, but the code running on the SDP3430 * indicates sampling on the negative edge. This should be tested on a * real device. */ connector->display_info.bus_flags = DRM_BUS_FLAG_DE_HIGH | DRM_BUS_FLAG_SYNC_SAMPLE_POSEDGE | DRM_BUS_FLAG_PIXDATA_SAMPLE_NEGEDGE; return 1; } static const struct drm_panel_funcs ls037v7dw01_funcs = { .disable = ls037v7dw01_disable, .unprepare = ls037v7dw01_unprepare, .prepare = ls037v7dw01_prepare, .enable = ls037v7dw01_enable, .get_modes = ls037v7dw01_get_modes, }; static int ls037v7dw01_probe(struct platform_device *pdev) { struct ls037v7dw01_panel *lcd; lcd = devm_kzalloc(&pdev->dev, sizeof(*lcd), GFP_KERNEL); if (!lcd) return -ENOMEM; platform_set_drvdata(pdev, lcd); lcd->pdev = pdev; lcd->vdd = devm_regulator_get(&pdev->dev, "envdd"); if (IS_ERR(lcd->vdd)) { dev_err(&pdev->dev, "failed to get regulator\n"); return PTR_ERR(lcd->vdd); } lcd->ini_gpio = devm_gpiod_get(&pdev->dev, "enable", GPIOD_OUT_LOW); if (IS_ERR(lcd->ini_gpio)) { dev_err(&pdev->dev, "failed to get enable gpio\n"); return PTR_ERR(lcd->ini_gpio); } lcd->resb_gpio = devm_gpiod_get(&pdev->dev, "reset", GPIOD_OUT_LOW); if (IS_ERR(lcd->resb_gpio)) { dev_err(&pdev->dev, "failed to get reset gpio\n"); return PTR_ERR(lcd->resb_gpio); } lcd->mo_gpio = devm_gpiod_get_index(&pdev->dev, "mode", 0, GPIOD_OUT_LOW); if (IS_ERR(lcd->mo_gpio)) { dev_err(&pdev->dev, "failed to get mode[0] gpio\n"); return PTR_ERR(lcd->mo_gpio); } lcd->lr_gpio = devm_gpiod_get_index(&pdev->dev, "mode", 1, GPIOD_OUT_LOW); if (IS_ERR(lcd->lr_gpio)) { dev_err(&pdev->dev, "failed to get mode[1] gpio\n"); return PTR_ERR(lcd->lr_gpio); } lcd->ud_gpio = devm_gpiod_get_index(&pdev->dev, "mode", 2, GPIOD_OUT_LOW); if (IS_ERR(lcd->ud_gpio)) { dev_err(&pdev->dev, "failed to get mode[2] gpio\n"); return PTR_ERR(lcd->ud_gpio); } drm_panel_init(&lcd->panel, &pdev->dev, &ls037v7dw01_funcs, DRM_MODE_CONNECTOR_DPI); return drm_panel_add(&lcd->panel); } static int ls037v7dw01_remove(struct platform_device *pdev) { struct ls037v7dw01_panel *lcd = platform_get_drvdata(pdev); drm_panel_remove(&lcd->panel); drm_panel_disable(&lcd->panel); drm_panel_unprepare(&lcd->panel); return 0; } static const struct of_device_id ls037v7dw01_of_match[] = { { .compatible = "sharp,ls037v7dw01", }, { /* sentinel */ }, }; MODULE_DEVICE_TABLE(of, ls037v7dw01_of_match); static struct platform_driver ls037v7dw01_driver = { .probe = ls037v7dw01_probe, .remove = ls037v7dw01_remove, .driver = { .name = "panel-sharp-ls037v7dw01", .of_match_table = ls037v7dw01_of_match, }, }; module_platform_driver(ls037v7dw01_driver); MODULE_AUTHOR("Tomi Valkeinen <[email protected]>"); MODULE_DESCRIPTION("Sharp LS037V7DW01 Panel Driver"); MODULE_LICENSE("GPL");
{ "pile_set_name": "Github" }
<section class="share"> <h3>Share</h3> <a aria-label="Share on Twitter" href="https://twitter.com/intent/tweet?text=&quot;{{ page.twitter_text }}&quot;%20{{ site.url }}{{ page.url }}%20via%20&#64;{{ site.twitter_username }}&hashtags={% for tag in page.tags %}{{tag}},{% endfor %}" onclick="window.open(this.href, 'twitter-share', 'width=550,height=235');return false;" title="Share on Twitter"> <svg class="icon icon-twitter"><use xlink:href="#icon-twitter"></use></svg> </a> <a aria-label="Share on Facebook"href="https://www.facebook.com/sharer/sharer.php?u={{ site.url }}{{ page.url }}" onclick="window.open(this.href, 'facebook-share','width=580,height=296');return false;" title="Share on Facebook"> <svg class="icon icon-facebook"><use xlink:href="#icon-facebook"></use></svg> </a> <a aria-label="Share on Google Plus" href="https://plus.google.com/share?url={{ site.url }}{{ page.url }}" onclick="window.open(this.href, 'google-plus-share', 'width=490,height=530');return false;" title="Share on Google+"> <svg class="icon icon-google-plus"><use xlink:href="#icon-google-plus"></use></svg> </a> </section>
{ "pile_set_name": "Github" }
import React, { useState } from 'react';<%&additionalImports%> import { SelectionState, IntegratedSelection, } from '@devexpress/dx-react-grid'; import { Grid, VirtualTable, TableHeaderRow, TableSelection, } from '@devexpress/dx-react-grid-<%&themeName%>'; <%&cssImports%> import { generateRows } from '../../../demo-data/generator'; export default () => { const [columns] = useState([ { name: 'name', title: 'Name' }, { name: 'gender', title: 'Gender' }, { name: 'city', title: 'City' }, { name: 'car', title: 'Car' }, ]); const [rows] = useState(generateRows({ length: 1000 })); const [selection, setSelection] = useState([]); return ( <div> <span> Total rows selected: {' '} {selection.length} </span> <<%&wrapperTag%><%&wrapperAttributes%>> <Grid rows={rows} columns={columns} > <SelectionState selection={selection} onSelectionChange={setSelection} /> <IntegratedSelection /> <VirtualTable /> <TableHeaderRow /> <TableSelection showSelectAll /> </Grid> </<%&wrapperTag%>> </div> ); };
{ "pile_set_name": "Github" }
digraph mygraph { label_foo [shape="record",label="<foo>foo"]; label_foo:foo -> label_foo }
{ "pile_set_name": "Github" }
import 'package:flutter/material.dart'; import 'package:flutter_dojo/common/main_title_widget.dart'; class IconButtonWidget extends StatelessWidget { @override Widget build(BuildContext context) { return ListView( children: <Widget>[ MainTitleWidget('IconButton基本使用'), ButtonBar( alignment: MainAxisAlignment.center, children: <Widget>[ IconButton( icon: Icon(Icons.desktop_mac), onPressed: () {}, tooltip: 'show tooltip', ), IconButton( icon: Icon(Icons.desktop_mac), onPressed: null, ), ], ), MainTitleWidget('Custom IconButton'), IconButton( // 定义图标在IconButton中的定位方式,AlignmentGeometry 如果父Widget尺寸大于child Widget尺寸,这个属性设置会起作用,有很多种对齐方式。 alignment: AlignmentDirectional.center, // 按钮颜色 color: Colors.green, // 如果图标被禁用,则用于按钮内图标的颜色。默认为当前主题的ThemeData.disabledColor disabledColor: Colors.cyan, // 高亮时的背景色 highlightColor: Colors.yellow, // 按钮内图标的大小 icon: Icon(Icons.print), // 图标尺寸 iconSize: 50, // 按钮内部,墨汁飞溅的颜色,点击按钮时的渐变背景色,当你不设置高亮背景时才会看的更清楚 splashColor: Colors.blue, padding: EdgeInsets.only(bottom: 5.0, top: 5.0, left: 30.0, right: 30.0), // 描述按下按钮时将发生的操作的文本 tooltip: 'show tooltip', // IconButton 的点击事件 onPressed: () {}, ), ], ); } }
{ "pile_set_name": "Github" }
# Custom TFX Component Example - Presto # Introduction This package shows how to compose a custom component in [TensorFlow Extended (TFX)](https://tensorflow.org/tfx). In the following example, a custom ExampleGen component is used to read in data from [Presto](https://prestodb.github.io) using a [Python API](https://github.com/prestodb/presto-python-client/). ## Disclaimer This package only serves as a demonstration of how to compose a custom component and should not be relied on for production use. ## Prerequisites * Linux or MacOS * Python 2.7, 3.5, or 3.6 * Git ### Required packages * [Apache Beam](https://beam.apache.org/) is used for pipeline orchestration. * [PrestoPythonClient](https://pypi.org/project/presto-python-client/) * [TensorFlow](https://tensorflow.org) is used for model training, evaluation and inference. * [TFX](https://pypi.org/project/tfx/) # Try It Out While it is not mandantory, this example is recommended to be tried in a virtual environment. ```bash cd python -m virtualenv -p python3.6 tfx_env source tfx_env/bin/activate ``` ## Step 0: Setup Environment First install the required packages. ```bash pip install apache-beam pip install tensorflow pip install tfx cd tfx/tfx/examples/custom_components/presto_example_gen pip install -e . ``` ExampleGen's custom configuration protobuf requires a protobuf compiler that is at least version [3.6.0](http://google.github.io/proto-lens/installing-protoc.html). ## Step 1: Setup Presto on Google Cloud Platform [optional] Skip this section if a Presto engine is already running. While this example assumes the following setup, step 3 also demonstrates how to use Presto ExampleGen for all configurations. First, install the Presto service on a Cloud Dataproc cluster. Follow this [tutorial](https://cloud.google.com/dataproc/docs/tutorials/presto-dataproc) up to and not including the "Prepare data" section. Instead of using GCP's dataset, [upload](https://cloud.google.com/storage/docs/uploading-objects) the TFX Chicago Taxi Trips [dataset](https://github.com/tensorflow/tfx/tree/master/tfx/examples/chicago_taxi_pipeline/data/simple) to the Cloud Storage bucket at `gs://${BUCKET_NAME}/chicago_taxi_trips/csv/`. Then, create a Hive external table that are backed by the CSV files. ``` gcloud dataproc jobs submit hive \ --cluster presto-cluster \ --execute " CREATE EXTERNAL TABLE chicago_taxi_trips_csv( pickup_community_area INT, fare FLOAT, trip_start_month INT, trip_start_hour INT, trip_start_day INT, trip_start_timestamp INT, pickup_latitude FLOAT, pickup_longitude FLOAT, dropoff_latitude FLOAT, dropoff_longitude FLOAT, trip_miles FLOAT, pickup_census_tract INT, dropoff_census_tract FLOAT, payment_type STRING, company STRING, trip_seconds FLOAT, dropoff_community_area FLOAT, tips FLOAT) ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' STORED AS TEXTFILE location 'gs://${BUCKET_NAME}/chicago_taxi_trips/csv/' tblproperties ('skip.header.line.count'='1');" ``` Create a Hive external table `chicago_taxi_trips_parquet` that stores the same data but in Parquet format for better query performance. ``` gcloud dataproc jobs submit hive \ --cluster presto-cluster \ --execute " CREATE EXTERNAL TABLE chicago_taxi_trips_parquet( pickup_community_area INT, fare FLOAT, trip_start_month INT, trip_start_hour INT, trip_start_day INT, trip_start_timestamp INT, pickup_latitude FLOAT, pickup_longitude FLOAT, dropoff_latitude FLOAT, dropoff_longitude FLOAT, trip_miles FLOAT, pickup_census_tract INT, dropoff_census_tract FLOAT, payment_type STRING, company STRING, trip_seconds FLOAT, dropoff_community_area FLOAT, tips FLOAT) STORED AS PARQUET location 'gs://${BUCKET_NAME}/chicago_taxi_trips/parquet/';" ``` Load the data from the Hive CSV table into the Hive Parquet table. ``` gcloud dataproc jobs submit hive \ --cluster presto-cluster \ --execute " INSERT OVERWRITE TABLE chicago_taxi_trips_parquet SELECT * FROM chicago_taxi_trips_csv;" ``` Verify that the data loaded correctly. This command should read 15000. ``` gcloud dataproc jobs submit hive \ --cluster presto-cluster \ --execute "SELECT COUNT(*) FROM chicago_taxi_trips_parquet;" ``` To verify that the Presto service is available and accessible, follow the steps outlined under "Run queries" in the GCP [tutorial](https://cloud.google.com/dataproc/docs/tutorials/presto-dataproc#run_queries). ## Step 3: Use Presto ExampleGen The Presto ExampleGen can be plugged into the pipeline like any other ExampleGen. The provided example Presto [pipeline](https://github.com/tensorflow/tfx/blob/master/tfx/examples/custom_components/presto_example_gen/example/taxi_pipeline_presto.py) follows closely to the example [pipeline](https://github.com/tensorflow/tfx/blob/master/tfx/examples/chicago_taxi_pipeline/taxi_pipeline_beam.py) that reads from the CSV file. Instead of CsvExampleGen, the following code snippet is used. The main difference is that a connection configuration is required. ```python from tfx.examples.custom_components.presto_example_gen.proto import presto_config_pb2 from tfx.examples.custom_components.presto_example_gen.presto_component.component import PrestoExampleGen presto_config = presto_config_pb2.PrestoConnConfig(host='localhost', port=8080) example_gen = PrestoExampleGen(presto_config, query='SELECT * FROM chicago_taxi_trips_parquet') ``` The [connection configuration](https://github.com/tensorflow/tfx/blob/master/tfx/examples/custom_components/presto_example_gen/proto/presto_config.proto) is a protobuf that is based off the Presto Python API. Usage may vary depending on how the Presto service is set up.
{ "pile_set_name": "Github" }
@model Xms.Web.Customize.Models.CreateBusinessFlowModel <div class="panel panel-default"> <div class="panel-heading"> <h3 class="panel-title"> <a data-toggle="collapse" href="#collapseTwo"> <strong>@app.PrivilegeTree?.LastOrDefault().DisplayName</strong> </a> </h3> </div> <div id="collapseTwo" class="panel-collapse collapse in"> <div class="panel-body"> <div class="cntr"> <form action="/@app.OrganizationUniqueName/customize/@app.ControllerName/@app.ActionName" method="post" id="editform" class="form-horizontal" role="form"> @Html.AntiForgeryToken() @Html.ValidationSummary() <input type="hidden" name="StepData" id="StepData" value="" /> <div class="container-fluid"> <div class="form-group col-sm-12"> @Html.LabelFor(x => x.Name, "名称", new { @class = "col-sm-2 control-label" }) <div class="col-sm-10"> @Html.TextBoxFor(x => x.Name, new { @class = "form-control required" }) </div> </div> <div class="form-group col-sm-12"> <label class="col-sm-2 control-label">实体</label> <div class="col-sm-10"> @Html.HiddenFor(x => x.EntityId) <select class="form-control" id="EntitySel" onchange="$('#EntityId').val($(this).val())"></select> </div> </div> <div class="form-group col-sm-12"> @Html.LabelFor(x => x.Description, "描述", new { @class = "col-sm-2 control-label" }) <div class="col-sm-10"> @Html.TextAreaFor(x => x.Description, new { @class = "form-control" }) </div> </div> </div> <div id="bussinessWrap" class="bussinessWrap"> <ul class="pcc-tab ul-clr" id="entityMenu" data-bind=" foreach: { data: entityList.list, as: 'item' }"> <li> <i class="layui-icon layui-icon-right icon-sm-right-arrow">&#xe602;</i> </li> <li class="menu-item" data-bind="css:{active:item.isShow()}"> <span data-bind="text:item.entityname(),attr:{'data-id':item.entityid()},click:entityHandle"></span> </li> </ul> <div class="dropdown add-business-btn " style="display: inline;"> <span class="dropdown-toggle" data-toggle="dropdown"> <span class="addminus-config"> <i class="glyphicon glyphicon-th-list icon-addminus-config hv addEntityRelationBtn" id="addEntityRelationBtn"></i> </span> <span class="addEntityRelationBtn">选项</span> </span> <div class="dropdown-menu add-business-menu"> <ul class="menu business-list" id="entityMenu" data-bind="foreach: entityMenulist.list"> <li> <a href="#" data-bind="attr:{'data-id':key,'data-name':name},click:addStep"><span data-bind="text:value,attr:{'data-id':key}"></span></a> </li> </ul> <div class="placeRemover" data-bind="visible:entityList.isDelete,click:function(){entityList.remove();entityMenulist.remove();}"> <i class="glyphicon glyphicon-trash"></i> 删除最后一个实体</div> </div> </div> <div class="stage-items" data-bind=" foreach: { data: entityList.list, as: 'litem' }"> <table class="pcc-tbl wMax" data-bind="visible:litem.isShow"> @*<thead> <tr> <th width="200" colspan="2"> <span>阶段</span> <i class="icon-plus hv add-stage-btn active" id="add-stage-btn" data-bind="click:function(){litem.addItems()}"></i> </th> <th class="lbor" width="140"> <i class="icon-line"></i> <span>阶段类别</span> </th> <th class="lbor resizeW-first" colspan="3"> <table cellpadding="0" cellspacing="0" class="stepItems"> <tbody> <tr> <td class="lbor" width=" 33.33%"> <span>步骤</span> <i class="icon-plus add-step-btn hv active" id="add-step-btn" data-bind="click:function(){litem.addLastStep('','');}"></i> </td> <td class="lbor resizeW-second" width="33.33%"><span>字段</span></td> <td class="lbor resizeW-third" width="33.33%"><span>必填</span></td> </tr> </tbody> </table> </th> </tr> </thead>*@ <tbody> <tr id="stepItems" class="stepItems" data-bind="foreach:{ data: litem.items, as: 'kitem' }"> <td class="step-item" data-bind="event:{'click':stageHandler},css:{active:kitem.isShow()}"> <table> <tr> <td width="13"> <i class="icon-arrow-first"></i> <i class="icon-arrow-white"></i> </td> <td> <div class="arrow-center"> <table> <tr> <td width="20"> <a class="btn btn-link delstageBtn" data-bind="click:litem.delStage"><i class="glyphicon glyphicon-trash "></i></a> </td> <td> <span data-bind="text:kitem.name,event:{'mouseenter':nameMouseEnter}"></span> <input type="text" class="c-hide" name="step-name" data-bind="value:kitem.name,event:{'mouseout':nameMouseOut}" placeholder="阶段名称" /> </td> </tr> </table> </div> </td> <td width="13"> <i class="icon-arrow-forward"></i> </td> </tr> <tr class="step-fields"> <td colspan="3"> <table> <tr> <td colspan="3" class="pos-rt"> <div class="row stepItems fieldItems" data-bind="foreach:{ data: kitem.steps, as: 'step' }"> <div class="col-sm-4 pb-2"> <div class="col-sm-4 setW-first control-label"> <table> <tr> <td> <span data-bind="text:step.displayname(),event:{'mouseenter':nameMouseEnter}"></span> <input type="text" class="c-hide field-label-input" name="step-name" data-bind="value:step.displayname(),event:{'mouseout':nameMouseOut}" /> </td> <td width="10"> <span class="pull-right " title="是否必填" data-bind="attr:{'class':step.isrequired?'text-danger':'field-required'},click:function(obj,e){ e = e || window.event;var target = e.target || e.srcElement;$(target).next().click();if($(target).hasClass('field-required')){$(target).removeClass('field-required').addClass('text-danger');}else{$(target).removeClass('text-danger').addClass('field-required');}}">*</span> <input class="hidden" type="checkbox" data-bind="checked:step.isrequired" /> </td> </tr> </table> </div> <div class="col-sm-8"> <table> <tr> <td> <span class="form-control" data-bind="text:step.attrname(),event:{'mouseenter':attrMouseEnter}"></span> <select class="attr-selector c-hide form-control" name="step-attribute" data-bind=" options:step.options(), optionsText: 'optionText' , optionsValue:'optionValue', event:{'change':attrChange,'mouseout':attrMouseOut} "> <option value=""></option> </select> </td> <td width="10"><a class="delstepBtn" data-bind="click:kitem.delStep"><i class="glyphicon glyphicon-trash "></i></a></td> </tr> </table> </div> </div> <div class="col-sm-4 btn btn-default field-add" id="add-step-btn" data-bind="click:function(){litem.addStep('','');}"> <i class=" glyphicon glyphicon-plus"></i> &nbsp;添加步骤 </div> </div> </td> </tr> </table> </td> </tr> </table> </td> <td class="step-item-add"> <div class="arrow-add"> <a class="btn btn-link move-left" data-bind="click:litem.moveUp"> <i class="glyphicon glyphicon-arrow-left"></i> </a> <a class="btn btn-link move-right" data-bind="click:litem.moveDown"> <i class="glyphicon glyphicon-arrow-right"></i> </a> <a class="btn btn-link item-add" id="add-stage-btn" data-bind="click:function(){litem.addItems()}"> <i class=" glyphicon glyphicon-plus"></i> &nbsp;添加阶段 </a> </div> </td> </tr> @*<tr class="step-item" data-bind="click:stageHandler"> <td width="38"></td> <td width="162"> <span data-bind="text:kitem.name,event:{'mouseenter':nameMouseEnter}"></span> <input type="text" class="c-hide" name="step-name" data-bind="value:kitem.name,event:{'mouseout':nameMouseOut}" /> </td> <td><span data-bind="text:kitem.stagetype"></span></td> <td colspan="3" class="pos-rt"> <table cellpadding="0" cellspacing="0" class="stepItems"> <tbody data-bind="foreach:{ data: kitem.steps, as: 'step' }"> <tr> <td class="setW-first" width="33.33%"> <span data-bind="text:step.displayname(),event:{'mouseenter':nameMouseEnter}"></span> <input type="text" class="c-hide" name="step-name" data-bind="value:step.displayname(),event:{'mouseout':nameMouseOut}" /> </td> <td class="setW-second" width="33.33%"> <span data-bind="text:step.attrname(),event:{'mouseenter':attrMouseEnter}"></span> <select class="attr-selector c-hide" name="step-attribute" data-bind=" options:step.options(), optionsText: 'optionText' , optionsValue:'optionValue', event:{'change':attrChange,'mouseout':attrMouseOut} "> <option value=""></option> </select> </td> <td class="setW-third" width="33.33%"> <input type="checkbox" data-bind="checked:step.isrequired" /> <div class="delstepBtn" data-bind="click:kitem.delStep">x</div> </td> </tr> </tbody> </table> <div class="delstepBtn" data-bind="click:litem.delStage">x</div> </td> </tr>*@ </tbody> @*<tfoot> <tr> <td colspan="4" width="200"> <ul class="ul-clr clr"> <li> <i class="icon-move-up move-up-btn" data-bind="click:litem.moveUp"></i> </li> <li> <i class="icon-move-down move-down-btn" data-bind="click:litem.moveDown"></i> </li> <li class="move-status">移动</li> </ul> </td> </tr> </tfoot>*@ </table> </div> </div> <footer class="footer"></footer> <nav class="navbar navbar-default navbar-fixed-bottom" role="navigation" id="body-footer"> <div class="container"> <div class="navbar-form navbar-right" id="body-footer-content"> <button type="submit" class="btn btn-primary"><span class="glyphicon glyphicon-saved"></span> 保存</button> <button type="reset" class="btn btn-default"><span class="glyphicon glyphicon-refresh"></span> 重置</button> </div> </div> </nav> </form> </div> </div> </div> </div> @section Header { <link href="/content/css/[email protected]" rel="stylesheet" /> } @section Scripts { <script src="/content/js/[email protected]"></script> <script> var PAGE_TYPE = 'CREATE'; </script> <script src="/content/js/[email protected]"></script> }
{ "pile_set_name": "Github" }
<!DOCTYPE html> <html> <head> <meta charset="utf-8" /> <title>Doodle.js - API: doodle.geom.createMatrix</title> <link href="./style/reset.css" rel="stylesheet" type="text/css"> <link href="./style/main.css" rel="stylesheet" type="text/css"> <link href="http://fonts.googleapis.com/css?family=Droid+Sans+Mono&subset=latin" rel="stylesheet" type="text/css"> <script type="text/javascript" src="http://ajax.googleapis.com/ajax/libs/jquery/1.4.2/jquery.min.js"></script> <script type="text/javascript" src="./style/main.js"></script> </head> <body> <nav> <ul> <li>ColorFilter</li> <li>Display</li> <li>ElementNode</li> <li>Emitter</li> <li>Event</li> <li>FontStyle</li> <li>FontVariant</li> <li>FontWeight</li> <li>GradientType</li> <li>Graphics</li> <li>Image</li> <li>Keyboard</li> <li>KeyboardEvent</li> <li>Layer</li> <li>LineCap</li> <li>LineJoin</li> <li>Matrix</li> <li>MouseEvent</li> <li>Node</li> <li>Pattern</li> <li>Point</li> <li>Rectangle</li> <li>Sprite</li> <li>Text</li> <li>TextAlign</li> <li>TextBaseline</li> <li>TextEvent</li> <li>TouchEvent</li> <li>UIEvent</li> <li>utils</li> <li>utils.debug</li></ul> </nav> <div class="main"> <h1>doodle.geom.createMatrix</h1> <section> <h2>Description:</h2> <dl> <dt><dfn>Matrix</dfn> (a:number, b:number, c:number, d:number, tx:number, ty:number) :Matrix</dt> <dd> <p></p> <details> <h3>Inherits</h3> <ol> <li>Object</li> </ol> <h3>Parameters</h3> <ol> <li>a:number &#8211; <span class='param_annotation'>[optional]</span> </li> <li>b:number &#8211; <span class='param_annotation'>[optional]</span> </li> <li>c:number &#8211; <span class='param_annotation'>[optional]</span> </li> <li>d:number &#8211; <span class='param_annotation'>[optional]</span> </li> <li>tx:number &#8211; <span class='param_annotation'>[optional]</span> </li> <li>ty:number &#8211; <span class='param_annotation'>[optional]</span> </li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> <li>SyntaxError &#8211; </li> </ul> </details> </dd> </dl> </section> <section> <h2>Properties</h2> <dl> <dt><dfn>rotation</dfn> :number</dt> <dd> <p>Return the angle of rotation in radians.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; radians</li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>tx</dfn> :number</dt> <dd> <p>The distance by which to translate each point along the x axis.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>ty</dfn> :number</dt> <dd> <p>The distance by which to translate each point along the y axis.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>c</dfn> :number</dt> <dd> <p>The value that affects the positioning of pixels along the x axis when rotating or skewing an image.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>a</dfn> :number</dt> <dd> <p>The value that affects the positioning of pixels along the x axis when scaling or rotating an image.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>b</dfn> :number</dt> <dd> <p>The value that affects the positioning of pixels along the y axis when rotating or skewing an image.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>d</dfn> :number</dt> <dd> <p>The value that affects the positioning of pixels along the y axis when scaling or rotating an image.</p> <details> <h3>Returns</h3> <ul> <li>number &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> </section> <section> <h2>Methods</h2> <dl> <dt><dfn>add</dfn> (m:Matrix) :Matrix</dt> <dd> <p>Add a matrix with the current matrix.</p> <details> <h3>Parameters</h3> <ol> <li>m:Matrix &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>deltaRotate</dfn> (angle:number) :Matrix</dt> <dd> <p>Applies a rotation transformation to the Matrix object, ignore translation.</p> <details> <h3>Parameters</h3> <ol> <li>angle:number &#8211; The rotation angle in radians.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>rotate</dfn> (angle:number) :Matrix</dt> <dd> <p>Applies a rotation transformation to the Matrix object.</p> <details> <h3>Parameters</h3> <ol> <li>angle:number &#8211; The rotation angle in radians.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>deltaScale</dfn> (sx:number, sy:number) :Matrix</dt> <dd> <p>Applies a scaling transformation to the matrix, ignores translation.</p> <details> <h3>Parameters</h3> <ol> <li>sx:number &#8211; A multiplier used to scale the object along the x axis.</li> <li>sy:number &#8211; A multiplier used to scale the object along the y axis.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>scale</dfn> (sx:number, sy:number) :Matrix</dt> <dd> <p>Applies a scaling transformation to the matrix.</p> <details> <h3>Parameters</h3> <ol> <li>sx:number &#8211; A multiplier used to scale the object along the x axis.</li> <li>sy:number &#8211; A multiplier used to scale the object along the y axis.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>deltaTransformPoint</dfn> (pt:Point) :Point</dt> <dd> <p>Given a point in the pretransform coordinate space, returns the coordinates of that point after the transformation occurs. Unlike 'transformPoint', does not consider translation.</p> <details> <h3>Parameters</h3> <ol> <li>pt:Point &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>Point &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>multiply</dfn> (m:Matrix) :Matrix</dt> <dd> <p>Multiplies a matrix with the current matrix, effectively combining the geometric effects of the two.</p> <details> <h3>Parameters</h3> <ol> <li>m:Matrix &#8211; The matrix to be concatenated to the source matrix.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>id</dfn> () :string</dt> <dd> <p></p> <details> <h3>Returns</h3> <ul> <li>string &#8211; </li> </ul> </details> </dd> <dt><dfn>matchInternalPointWithExternal</dfn> () :void</dt> <dd> <p></p> <details> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>rotateAroundExternalPoint</dfn> () :void</dt> <dd> <p></p> <details> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>rotateAroundInternalPoint</dfn> () :void</dt> <dd> <p></p> <details> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>skew</dfn> (skewX:number, skewY:number) :Matrix</dt> <dd> <p></p> <details> <h3>Parameters</h3> <ol> <li>skewX:number &#8211; </li> <li>skewY:number &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>invert</dfn> () :Matrix</dt> <dd> <p>Performs the opposite transformation of the original matrix.</p> <details> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> </details> </dd> <dt><dfn>toArray</dfn> () :Array</dt> <dd> <p>Returns an array value containing the properties of the Matrix object.</p> <details> <h3>Returns</h3> <ul> <li>Array &#8211; </li> </ul> </details> </dd> <dt><dfn>clone</dfn> () :Matrix</dt> <dd> <p>Returns a new Matrix object that is a clone of this matrix, with an exact copy of the contained object.</p> <details> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> </details> </dd> <dt><dfn>toString</dfn> () :string</dt> <dd> <p>Returns a text value listing the properties of the Matrix object.</p> <details> <h3>Returns</h3> <ul> <li>string &#8211; </li> </ul> </details> </dd> <dt><dfn>transformPoint</dfn> (pt:Point) :Point</dt> <dd> <p>Returns the result of applying the geometric transformation represented by the Matrix object to the specified point.</p> <details> <h3>Parameters</h3> <ol> <li>pt:Point &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>Point &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>identity</dfn> () :Matrix</dt> <dd> <p>Sets each matrix property to a value that causes a null transformation.</p> <details> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> </details> </dd> <dt><dfn>compose</dfn> (a:number, b:number, c:number, d:number, tx:number, ty:number) :Matrix</dt> <dd> <p>Set values of this matrix with the specified parameters.</p> <details> <h3>Parameters</h3> <ol> <li>a:number &#8211; The value that affects the positioning of pixels along the x axis when scaling or rotating an image.</li> <li>b:number &#8211; The value that affects the positioning of pixels along the y axis when rotating or skewing an image.</li> <li>c:number &#8211; The value that affects the positioning of pixels along the x axis when rotating or skewing an image.</li> <li>d:number &#8211; The value that affects the positioning of pixels along the y axis when scaling or rotating an image.</li> <li>tx:number &#8211; The distance by which to translate each point along the x axis.</li> <li>ty:number &#8211; The distance by which to translate each point along the y axis.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>deltaSkew</dfn> (skewX:number, skewY:number) :Matrix</dt> <dd> <p>Skew matrix and ignore translation.</p> <details> <h3>Parameters</h3> <ol> <li>skewX:number &#8211; </li> <li>skewY:number &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>equals</dfn> (m:Matrix) :boolean</dt> <dd> <p>Test if matrix is equal to this one.</p> <details> <h3>Parameters</h3> <ol> <li>m:Matrix &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>boolean &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>translate</dfn> (dx:number, dy:number) :Matrix</dt> <dd> <p>Translates the matrix along the x and y axes.</p> <details> <h3>Parameters</h3> <ol> <li>dx:number &#8211; The amount of movement along the x axis to the right, in pixels.</li> <li>dy:number &#8211; The amount of movement down along the y axis, in pixels.</li> </ol> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> <dt><dfn>interpolate</dfn> () :Matrix</dt> <dd> <p>Update matrix 'in-between' this and another matrix given a value of t bewteen 0 and 1.</p> <details> <h3>Returns</h3> <ul> <li>Matrix &#8211; </li> </ul> <h3>Throws</h3> <ul> <li>TypeError &#8211; </li> </ul> </details> </dd> </section> <section> <h2>Class Functions</h2> <dl> <dt><dfn>isMatrix</dfn> (m:Object) :boolean</dt> <dd> <p>Check if a given object contains a numeric matrix properties. Does not check if a matrix is actually a doodle.geom.matrix.</p> <details> <h3>Parameters</h3> <ol> <li>m:Object &#8211; </li> </ol> <h3>Returns</h3> <ul> <li>boolean &#8211; </li> </ul> </details> </dd> </section> </div> <footer> <a href="http://github.com/billyist/doodle-js/blob/master/BSD-LICENSE.txt">Copyright</a> <a href="http://www.lamberta.org">Billy Lamberta</a> </footer> </body> </html>
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0 /* * Generate devlist.h from the Zorro ID file. * * (c) 2000 Geert Uytterhoeven <[email protected]> * * Based on the PCI version: * * (c) 1999--2000 Martin Mares <[email protected]> */ #include <stdio.h> #include <string.h> #define MAX_NAME_SIZE 63 static void pq(FILE *f, const char *c) { while (*c) { if (*c == '"') fprintf(f, "\\\""); else fputc(*c, f); c++; } } int main(void) { char line[1024], *c, *bra, manuf[8]; int manufs = 0; int mode = 0; int lino = 0; int manuf_len = 0; FILE *devf; devf = fopen("devlist.h", "w"); if (!devf) { fprintf(stderr, "Cannot create output file!\n"); return 1; } while (fgets(line, sizeof(line)-1, stdin)) { lino++; if ((c = strchr(line, '\n'))) *c = 0; if (!line[0] || line[0] == '#') continue; if (line[0] == '\t') { switch (mode) { case 1: if (strlen(line) > 5 && line[5] == ' ') { c = line + 5; while (*c == ' ') *c++ = 0; if (manuf_len + strlen(c) + 1 > MAX_NAME_SIZE) { /* Too long, try cutting off long description */ bra = strchr(c, '['); if (bra && bra > c && bra[-1] == ' ') bra[-1] = 0; if (manuf_len + strlen(c) + 1 > MAX_NAME_SIZE) { fprintf(stderr, "Line %d: Product name too long\n", lino); return 1; } } fprintf(devf, "\tPRODUCT(%s,%s,\"", manuf, line+1); pq(devf, c); fputs("\")\n", devf); } else goto err; break; default: goto err; } } else if (strlen(line) > 4 && line[4] == ' ') { c = line + 4; while (*c == ' ') *c++ = 0; if (manufs) fputs("ENDMANUF()\n\n", devf); manufs++; strcpy(manuf, line); manuf_len = strlen(c); if (manuf_len + 24 > MAX_NAME_SIZE) { fprintf(stderr, "Line %d: manufacturer name too long\n", lino); return 1; } fprintf(devf, "MANUF(%s,\"", manuf); pq(devf, c); fputs("\")\n", devf); mode = 1; } else { err: fprintf(stderr, "Line %d: Syntax error in mode %d: %s\n", lino, mode, line); return 1; } } fputs("ENDMANUF()\n\ \n\ #undef MANUF\n\ #undef PRODUCT\n\ #undef ENDMANUF\n", devf); fclose(devf); return 0; }
{ "pile_set_name": "Github" }
{ "Jasno": ["title", "clear"], "Možnosť veľmi slabých zrážok": ["title", "possible-very-light-precipitation"], "Slabé zrážky": ["title", "very-light-precipitation"], "Možnosť slabých zrážok": ["title", "possible-light-precipitation"], "Slabé zrážky": ["title", "light-precipitation"], "Zrážky": ["title", "medium-precipitation"], "Silné zrážky": ["title", "heavy-precipitation"], "Možnosť mrholenia": ["title", "possible-very-light-rain"], "Mrholenie": ["title", "very-light-rain"], "Možnosť slabého dažďa": ["title", "possible-light-rain"], "Slabý dážď": ["title", "light-rain"], "Dážď": ["title", "medium-rain"], "Vydatný dážď": ["title", "heavy-rain"], "Možnosť slabého dažďa so snehom": ["title", "possible-very-light-sleet"], "Slabý dážď so snehom": ["title", "very-light-sleet"], "Možnosť slabého dažďa so snehom": ["title", "possible-light-sleet"], "Slabý dážď so snehom": ["title", "light-sleet"], "Dážď so snehom": ["title", "medium-sleet"], "Vydatný dážď so snehom": ["title", "heavy-sleet"], "Možnosť slabého sneženia": ["title", "possible-very-light-snow"], "Slabé sneženie": ["title", "very-light-snow"], "Možnosť slabého sneženia": ["title", "possible-light-snow"], "Slabé sneženie": ["title", "light-snow"], "Sneženie": ["title", "medium-snow"], "Vydatné sneženie": ["title", "heavy-snow"], "Veterno": ["title", "medium-wind"], "Silný vietor": ["title", "heavy-wind"], "Hmlisto": ["title", "fog"], "Prevažne zamračené": ["title", "medium-clouds"], "Zamračené": ["title", "heavy-clouds"], "Nízka vlhkosť a slabý vietor": ["title", ["and", "low-humidity", "light-wind"]], "Mrholenie a silný vietor": ["title", ["and", "very-light-rain", "heavy-wind"]], "Vysoká vlhkosť a čiastočne zamračené": ["title", ["and", "high-humidity", "light-clouds"]], "Jasno hodinu.": ["sentence", ["for-hour", "clear"]], "Slabé sneženie o 35 min.": ["sentence", ["starting-in", "very-light-snow", ["minutes", 35]]], "Slabý dážď skončí o 15 min.": ["sentence", ["stopping-in", "light-rain", ["minutes", 15]]], "Vydatný dážď so snehom o 20 min., skončí o 30 min. neskôr.": ["sentence", ["starting-then-stopping-later", "heavy-sleet", ["minutes", 20], ["minutes", 30]]], "Dážď skončí o 25 min. a začne znovu o 8 min. neskôr.": ["sentence", ["stopping-then-starting-later", "medium-rain", ["minutes", 25], ["minutes", 8]]], "Počas dňa prevažne zamračené.": ["sentence", ["for-day", "medium-clouds"]], "Od rána slabý dážď so snehom.": ["sentence", ["starting", "very-light-sleet", "morning"]], "Veterno až do dnešnej noci.": ["sentence", ["until", "medium-wind", "today-night"]], "Silné zrážky až do popoludnia.": ["sentence", ["until", "heavy-precipitation", "afternoon"]], "Popoludní slabý vietor.": ["sentence", ["during", "light-wind", "afternoon"]], "Dnes neskoro večer a zajtra ráno sneženie.": ["sentence", ["during", "medium-snow", ["and", "later-today-evening", "tomorrow-morning"]]], "Vydatný dážď až do dnešného dopoludnia, ktorý začne znovu dnes večer.": ["sentence", ["until-starting-again", "heavy-rain", "later-today-morning", "today-evening"]], "Od dnešnej neskorej noci zamračené a zajtra popoludní vydatné sneženie.": ["sentence", ["starting-continuing-until", "heavy-clouds", "evening", "night"]], "Od popoludnia čiastočne zamračené a pretrvá až do večera.": ["sentence", ["starting-continuing-until", "light-clouds", "afternoon", "evening"]], "Dnes podvečer slabý dážď so snehom a zajtra ráno hmlisto.": ["sentence", ["and", ["during", "light-sleet", "later-today-afternoon"], ["during", "fog", "tomorrow-morning"]]], "Od dnešného rána silný vietor, ktorý pretrvá až do dnešného popoludnia a zajtra ráno dážď so snehom.": ["sentence", ["and", ["starting-continuing-until", "heavy-wind", "today-morning", "today-afternoon"], ["during", "medium-sleet", "tomorrow-morning"]]], "Od dnešnej neskorej noci zamračené a zajtra popoludní vydatné sneženie.": ["sentence", ["and", ["starting", "heavy-clouds", "later-today-night"], ["during", "heavy-snow", "tomorrow-afternoon"]]], "Dnes v noci nízka vlhkosť a od zajtrajšieho večera slabé zrážky, ktoré pretrvajú až do zajtrajšej noci.": ["sentence", ["and", ["during", "low-humidity", "today-night"], ["starting-continuing-until", "light-precipitation", "tomorrow-evening", "tomorrow-night"]]], "V noci sneženie (5 in).": ["sentence", ["during", ["parenthetical", "medium-snow", ["inches", 5]], "night"]], "Dnes dopoludnia slabé sneženie (2 cm).": ["sentence", ["during", ["parenthetical", "light-snow", ["centimeters", 2]], "later-today-morning"]], "Počas dňa vydatné sneženie (8-12 in).": ["sentence", ["for-day", ["parenthetical", "heavy-snow", ["inches", ["range", 8, 12]]]]], "Popoludní sneženie (menej ako 1 cm).": ["sentence", ["during", ["parenthetical", "medium-snow", ["less-than", ["centimeters", 1]]], "afternoon"]], "Bez zrážok počas týždňa, zajtra s teplotným maximom 85°F.": ["sentence", ["with", ["for-week", "no-precipitation"], ["temperatures-peaking", ["fahrenheit", 85], "tomorrow"]]], "Zmiešané zrážky cez víkend, vo štvrtok s teplotami stúpajúcimi k 32°C.": ["sentence", ["with", ["over-weekend", "mixed-precipitation"], ["temperatures-rising", ["celsius", 32], "thursday"]]], "V pondelok mrholenie, v piatok s teplotným minimom 15°F.": ["sentence", ["with", ["during", "very-light-rain", "monday"], ["temperatures-valleying", ["fahrenheit", 15], "friday"]]], "V utorok a budúcu stredu slabé sneženie, v nedeľu s teplotami klesajúcimi k 0°C.": ["sentence", ["with", ["during", "light-snow", ["and", "tuesday", "next-wednesday"]], ["temperatures-falling", ["celsius", 0], "sunday"]]], "Od dnes do soboty zrážky, v pondelok s teplotným maximom 100°F.": ["sentence", ["with", ["during", "medium-precipitation", ["through", "today", "saturday"]], ["temperatures-peaking", ["fahrenheit", 100], "monday"]]], "Počas dňa zmiešané zrážky (1-3 in snehu).": ["sentence", ["for-day", ["parenthetical", "mixed-precipitation", ["inches", ["range", 1, 3]]]]], "Vydatné sneženie (1-3 in)": ["title", ["parenthetical", "heavy-snow", ["inches", ["range", 1, 3]]]], "Vydatné sneženie (3-5 cm)": ["title", ["parenthetical", "heavy-snow", ["centimeters", ["range", 3, 5]]]], "Možnosť búrok": ["title", "possible-thunderstorm"], "Vydatný dážď a búrky": ["title", ["and", "heavy-rain", "thunderstorm"]], "Búrky až do budúceho pondelka.": ["sentence", ["until", "thunderstorm", "next-monday"]], "Predpoveď na ďalšiu hodinu je dočasne nedostupná, pretože všetky radarové stanice v okolí sú v režime offline.": ["sentence",["next-hour-forecast-status", "temporarily-unavailable", "station-offline"]], "Predpoveď na ďalšiu hodinu je čiastočne nedostupná, pretože vznikli medzery v pokrytí radarovými stanicami v okolí.": ["sentence",["next-hour-forecast-status", "partially-unavailable", "station-incomplete"]], "Predpoveď na ďalšiu hodinu je nedostupná, pretože všetky radarové stanice v okolí sú v režime offline.": ["sentence",["next-hour-forecast-status", "unavailable", "station-offline"]] }
{ "pile_set_name": "Github" }
/**************************************************************************** ** ** Copyright (C) 2016 The Qt Company Ltd. ** Contact: https://www.qt.io/licensing/ ** ** This file is part of the Qt Data Visualization module of the Qt Toolkit. ** ** $QT_BEGIN_LICENSE:GPL$ ** Commercial License Usage ** Licensees holding valid commercial Qt licenses may use this file in ** accordance with the commercial license agreement provided with the ** Software or, alternatively, in accordance with the terms contained in ** a written agreement between you and The Qt Company. For licensing terms ** and conditions see https://www.qt.io/terms-conditions. For further ** information use the contact form at https://www.qt.io/contact-us. ** ** GNU General Public License Usage ** Alternatively, this file may be used under the terms of the GNU ** General Public License version 3 or (at your option) any later version ** approved by the KDE Free Qt Foundation. The licenses are as published by ** the Free Software Foundation and appearing in the file LICENSE.GPL3 ** included in the packaging of this file. Please review the following ** information to ensure the GNU General Public License requirements will ** be met: https://www.gnu.org/licenses/gpl-3.0.html. ** ** $QT_END_LICENSE$ ** ****************************************************************************/ import QtQuick 2.0 import QtDataVisualization 1.2 Bars3D { width: 300 height: 300 Bar3DSeries { ItemModelBarDataProxy { itemModel: ListModel { ListElement{ row: "row 1"; column: "column 1"; value: "1"; } ListElement{ row: "row 1"; column: "column 2"; value: "2"; } ListElement{ row: "row 1"; column: "column 3"; value: "3"; } } rowRole: "row" columnRole: "column" valueRole: "value" } } }
{ "pile_set_name": "Github" }
package v1alpha1 import ( "github.com/manifoldco/heighliner/internal/k8sutils" corev1 "k8s.io/api/core/v1" apiextv1beta1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" ) // VersionedMicroservice represents the combined state of different components // in time which form a single Microservice. // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type VersionedMicroservice struct { metav1.TypeMeta `json:",inline"` metav1.ObjectMeta `json:"metadata"` Spec VersionedMicroserviceSpec `json:"spec"` } // VersionedMicroserviceList is a list of VersionedMicroservices. // +k8s:deepcopy-gen:interfaces=k8s.io/apimachinery/pkg/runtime.Object type VersionedMicroserviceList struct { metav1.TypeMeta `json:",inline"` metav1.ListMeta `json:"metadata"` Items []VersionedMicroservice `json:"items"` } // VersionedMicroserviceSpec represents the specification for a // VersionedMicroservice. type VersionedMicroserviceSpec struct { Availability *AvailabilityPolicySpec `json:"availability,omitempty"` Config *ConfigPolicySpec `json:"config,omitempty"` Security *SecurityPolicySpec `json:"security,omitempty"` Containers []corev1.Container `json:"containers"` ImagePullSecrets []corev1.LocalObjectReference `json:"imagePullSecrets"` } // VersionedMicroserviceValidationSchema represents the OpenAPIV3Scheme which // defines the validation for the VersionedMicroserviceSpec. var VersionedMicroserviceValidationSchema = apiextv1beta1.JSONSchemaProps{ Properties: map[string]apiextv1beta1.JSONSchemaProps{ "availability": AvailabilityPolicyValidationSchema, "config": *ConfigPolicyValidationSchema.OpenAPIV3Schema, "security": *SecurityPolicyValidationSchema.OpenAPIV3Schema, "containers": { MinItems: k8sutils.PtrInt64(1), }, }, Required: []string{ "containers", }, }
{ "pile_set_name": "Github" }
<?php namespace React\Stream; use Evenement\EventEmitterInterface; /** * @event data * @event end * @event error * @event close */ interface ReadableStreamInterface extends EventEmitterInterface { public function isReadable(); public function pause(); public function resume(); public function pipe(WritableStreamInterface $dest, array $options = array()); public function close(); }
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0+ /* * Standard Hot Plug Controller Driver * * Copyright (C) 1995,2001 Compaq Computer Corporation * Copyright (C) 2001 Greg Kroah-Hartman ([email protected]) * Copyright (C) 2001 IBM Corp. * Copyright (C) 2003-2004 Intel Corporation * * All rights reserved. * * Send feedback to <[email protected]>, <[email protected]> * */ #include <linux/module.h> #include <linux/kernel.h> #include <linux/types.h> #include <linux/slab.h> #include <linux/pci.h> #include "../pci.h" #include "shpchp.h" static void interrupt_event_handler(struct work_struct *work); static int shpchp_enable_slot(struct slot *p_slot); static int shpchp_disable_slot(struct slot *p_slot); static int queue_interrupt_event(struct slot *p_slot, u32 event_type) { struct event_info *info; info = kmalloc(sizeof(*info), GFP_ATOMIC); if (!info) return -ENOMEM; info->event_type = event_type; info->p_slot = p_slot; INIT_WORK(&info->work, interrupt_event_handler); queue_work(p_slot->wq, &info->work); return 0; } u8 shpchp_handle_attention_button(u8 hp_slot, struct controller *ctrl) { struct slot *p_slot; u32 event_type; /* Attention Button Change */ ctrl_dbg(ctrl, "Attention button interrupt received\n"); p_slot = shpchp_find_slot(ctrl, hp_slot + ctrl->slot_device_offset); p_slot->hpc_ops->get_adapter_status(p_slot, &(p_slot->presence_save)); /* * Button pressed - See if need to TAKE ACTION!!! */ ctrl_info(ctrl, "Button pressed on Slot(%s)\n", slot_name(p_slot)); event_type = INT_BUTTON_PRESS; queue_interrupt_event(p_slot, event_type); return 0; } u8 shpchp_handle_switch_change(u8 hp_slot, struct controller *ctrl) { struct slot *p_slot; u8 getstatus; u32 event_type; /* Switch Change */ ctrl_dbg(ctrl, "Switch interrupt received\n"); p_slot = shpchp_find_slot(ctrl, hp_slot + ctrl->slot_device_offset); p_slot->hpc_ops->get_adapter_status(p_slot, &(p_slot->presence_save)); p_slot->hpc_ops->get_latch_status(p_slot, &getstatus); ctrl_dbg(ctrl, "Card present %x Power status %x\n", p_slot->presence_save, p_slot->pwr_save); if (getstatus) { /* * Switch opened */ ctrl_info(ctrl, "Latch open on Slot(%s)\n", slot_name(p_slot)); event_type = INT_SWITCH_OPEN; if (p_slot->pwr_save && p_slot->presence_save) { event_type = INT_POWER_FAULT; ctrl_err(ctrl, "Surprise Removal of card\n"); } } else { /* * Switch closed */ ctrl_info(ctrl, "Latch close on Slot(%s)\n", slot_name(p_slot)); event_type = INT_SWITCH_CLOSE; } queue_interrupt_event(p_slot, event_type); return 1; } u8 shpchp_handle_presence_change(u8 hp_slot, struct controller *ctrl) { struct slot *p_slot; u32 event_type; /* Presence Change */ ctrl_dbg(ctrl, "Presence/Notify input change\n"); p_slot = shpchp_find_slot(ctrl, hp_slot + ctrl->slot_device_offset); /* * Save the presence state */ p_slot->hpc_ops->get_adapter_status(p_slot, &(p_slot->presence_save)); if (p_slot->presence_save) { /* * Card Present */ ctrl_info(ctrl, "Card present on Slot(%s)\n", slot_name(p_slot)); event_type = INT_PRESENCE_ON; } else { /* * Not Present */ ctrl_info(ctrl, "Card not present on Slot(%s)\n", slot_name(p_slot)); event_type = INT_PRESENCE_OFF; } queue_interrupt_event(p_slot, event_type); return 1; } u8 shpchp_handle_power_fault(u8 hp_slot, struct controller *ctrl) { struct slot *p_slot; u32 event_type; /* Power fault */ ctrl_dbg(ctrl, "Power fault interrupt received\n"); p_slot = shpchp_find_slot(ctrl, hp_slot + ctrl->slot_device_offset); if (!(p_slot->hpc_ops->query_power_fault(p_slot))) { /* * Power fault Cleared */ ctrl_info(ctrl, "Power fault cleared on Slot(%s)\n", slot_name(p_slot)); p_slot->status = 0x00; event_type = INT_POWER_FAULT_CLEAR; } else { /* * Power fault */ ctrl_info(ctrl, "Power fault on Slot(%s)\n", slot_name(p_slot)); event_type = INT_POWER_FAULT; /* set power fault status for this board */ p_slot->status = 0xFF; ctrl_info(ctrl, "Power fault bit %x set\n", hp_slot); } queue_interrupt_event(p_slot, event_type); return 1; } /* The following routines constitute the bulk of the hotplug controller logic */ static int change_bus_speed(struct controller *ctrl, struct slot *p_slot, enum pci_bus_speed speed) { int rc = 0; ctrl_dbg(ctrl, "Change speed to %d\n", speed); rc = p_slot->hpc_ops->set_bus_speed_mode(p_slot, speed); if (rc) { ctrl_err(ctrl, "%s: Issue of set bus speed mode command failed\n", __func__); return WRONG_BUS_FREQUENCY; } return rc; } static int fix_bus_speed(struct controller *ctrl, struct slot *pslot, u8 flag, enum pci_bus_speed asp, enum pci_bus_speed bsp, enum pci_bus_speed msp) { int rc = 0; /* * If other slots on the same bus are occupied, we cannot * change the bus speed. */ if (flag) { if (asp < bsp) { ctrl_err(ctrl, "Speed of bus %x and adapter %x mismatch\n", bsp, asp); rc = WRONG_BUS_FREQUENCY; } return rc; } if (asp < msp) { if (bsp != asp) rc = change_bus_speed(ctrl, pslot, asp); } else { if (bsp != msp) rc = change_bus_speed(ctrl, pslot, msp); } return rc; } /** * board_added - Called after a board has been added to the system. * @p_slot: target &slot * * Turns power on for the board. * Configures board. */ static int board_added(struct slot *p_slot) { u8 hp_slot; u8 slots_not_empty = 0; int rc = 0; enum pci_bus_speed asp, bsp, msp; struct controller *ctrl = p_slot->ctrl; struct pci_bus *parent = ctrl->pci_dev->subordinate; hp_slot = p_slot->device - ctrl->slot_device_offset; ctrl_dbg(ctrl, "%s: p_slot->device, slot_offset, hp_slot = %d, %d ,%d\n", __func__, p_slot->device, ctrl->slot_device_offset, hp_slot); /* Power on slot without connecting to bus */ rc = p_slot->hpc_ops->power_on_slot(p_slot); if (rc) { ctrl_err(ctrl, "Failed to power on slot\n"); return -1; } if ((ctrl->pci_dev->vendor == 0x8086) && (ctrl->pci_dev->device == 0x0332)) { rc = p_slot->hpc_ops->set_bus_speed_mode(p_slot, PCI_SPEED_33MHz); if (rc) { ctrl_err(ctrl, "%s: Issue of set bus speed mode command failed\n", __func__); return WRONG_BUS_FREQUENCY; } /* turn on board, blink green LED, turn off Amber LED */ rc = p_slot->hpc_ops->slot_enable(p_slot); if (rc) { ctrl_err(ctrl, "Issue of Slot Enable command failed\n"); return rc; } } rc = p_slot->hpc_ops->get_adapter_speed(p_slot, &asp); if (rc) { ctrl_err(ctrl, "Can't get adapter speed or bus mode mismatch\n"); return WRONG_BUS_FREQUENCY; } bsp = ctrl->pci_dev->subordinate->cur_bus_speed; msp = ctrl->pci_dev->subordinate->max_bus_speed; /* Check if there are other slots or devices on the same bus */ if (!list_empty(&ctrl->pci_dev->subordinate->devices)) slots_not_empty = 1; ctrl_dbg(ctrl, "%s: slots_not_empty %d, adapter_speed %d, bus_speed %d, max_bus_speed %d\n", __func__, slots_not_empty, asp, bsp, msp); rc = fix_bus_speed(ctrl, p_slot, slots_not_empty, asp, bsp, msp); if (rc) return rc; /* turn on board, blink green LED, turn off Amber LED */ rc = p_slot->hpc_ops->slot_enable(p_slot); if (rc) { ctrl_err(ctrl, "Issue of Slot Enable command failed\n"); return rc; } /* Wait for ~1 second */ msleep(1000); ctrl_dbg(ctrl, "%s: slot status = %x\n", __func__, p_slot->status); /* Check for a power fault */ if (p_slot->status == 0xFF) { /* power fault occurred, but it was benign */ ctrl_dbg(ctrl, "%s: Power fault\n", __func__); rc = POWER_FAILURE; p_slot->status = 0; goto err_exit; } if (shpchp_configure_device(p_slot)) { ctrl_err(ctrl, "Cannot add device at %04x:%02x:%02x\n", pci_domain_nr(parent), p_slot->bus, p_slot->device); goto err_exit; } p_slot->status = 0; p_slot->is_a_board = 0x01; p_slot->pwr_save = 1; p_slot->hpc_ops->green_led_on(p_slot); return 0; err_exit: /* turn off slot, turn on Amber LED, turn off Green LED */ rc = p_slot->hpc_ops->slot_disable(p_slot); if (rc) { ctrl_err(ctrl, "%s: Issue of Slot Disable command failed\n", __func__); return rc; } return(rc); } /** * remove_board - Turns off slot and LEDs * @p_slot: target &slot */ static int remove_board(struct slot *p_slot) { struct controller *ctrl = p_slot->ctrl; u8 hp_slot; int rc; if (shpchp_unconfigure_device(p_slot)) return(1); hp_slot = p_slot->device - ctrl->slot_device_offset; p_slot = shpchp_find_slot(ctrl, hp_slot + ctrl->slot_device_offset); ctrl_dbg(ctrl, "%s: hp_slot = %d\n", __func__, hp_slot); /* Change status to shutdown */ if (p_slot->is_a_board) p_slot->status = 0x01; /* turn off slot, turn on Amber LED, turn off Green LED */ rc = p_slot->hpc_ops->slot_disable(p_slot); if (rc) { ctrl_err(ctrl, "%s: Issue of Slot Disable command failed\n", __func__); return rc; } rc = p_slot->hpc_ops->set_attention_status(p_slot, 0); if (rc) { ctrl_err(ctrl, "Issue of Set Attention command failed\n"); return rc; } p_slot->pwr_save = 0; p_slot->is_a_board = 0; return 0; } struct pushbutton_work_info { struct slot *p_slot; struct work_struct work; }; /** * shpchp_pushbutton_thread - handle pushbutton events * @work: &struct work_struct to be handled * * Scheduled procedure to handle blocking stuff for the pushbuttons. * Handles all pending events and exits. */ static void shpchp_pushbutton_thread(struct work_struct *work) { struct pushbutton_work_info *info = container_of(work, struct pushbutton_work_info, work); struct slot *p_slot = info->p_slot; mutex_lock(&p_slot->lock); switch (p_slot->state) { case POWEROFF_STATE: mutex_unlock(&p_slot->lock); shpchp_disable_slot(p_slot); mutex_lock(&p_slot->lock); p_slot->state = STATIC_STATE; break; case POWERON_STATE: mutex_unlock(&p_slot->lock); if (shpchp_enable_slot(p_slot)) p_slot->hpc_ops->green_led_off(p_slot); mutex_lock(&p_slot->lock); p_slot->state = STATIC_STATE; break; default: break; } mutex_unlock(&p_slot->lock); kfree(info); } void shpchp_queue_pushbutton_work(struct work_struct *work) { struct slot *p_slot = container_of(work, struct slot, work.work); struct pushbutton_work_info *info; info = kmalloc(sizeof(*info), GFP_KERNEL); if (!info) { ctrl_err(p_slot->ctrl, "%s: Cannot allocate memory\n", __func__); return; } info->p_slot = p_slot; INIT_WORK(&info->work, shpchp_pushbutton_thread); mutex_lock(&p_slot->lock); switch (p_slot->state) { case BLINKINGOFF_STATE: p_slot->state = POWEROFF_STATE; break; case BLINKINGON_STATE: p_slot->state = POWERON_STATE; break; default: kfree(info); goto out; } queue_work(p_slot->wq, &info->work); out: mutex_unlock(&p_slot->lock); } static int update_slot_info (struct slot *slot) { struct hotplug_slot_info *info; int result; info = kmalloc(sizeof(*info), GFP_KERNEL); if (!info) return -ENOMEM; slot->hpc_ops->get_power_status(slot, &(info->power_status)); slot->hpc_ops->get_attention_status(slot, &(info->attention_status)); slot->hpc_ops->get_latch_status(slot, &(info->latch_status)); slot->hpc_ops->get_adapter_status(slot, &(info->adapter_status)); result = pci_hp_change_slot_info(slot->hotplug_slot, info); kfree (info); return result; } /* * Note: This function must be called with slot->lock held */ static void handle_button_press_event(struct slot *p_slot) { u8 getstatus; struct controller *ctrl = p_slot->ctrl; switch (p_slot->state) { case STATIC_STATE: p_slot->hpc_ops->get_power_status(p_slot, &getstatus); if (getstatus) { p_slot->state = BLINKINGOFF_STATE; ctrl_info(ctrl, "PCI slot #%s - powering off due to button press\n", slot_name(p_slot)); } else { p_slot->state = BLINKINGON_STATE; ctrl_info(ctrl, "PCI slot #%s - powering on due to button press\n", slot_name(p_slot)); } /* blink green LED and turn off amber */ p_slot->hpc_ops->green_led_blink(p_slot); p_slot->hpc_ops->set_attention_status(p_slot, 0); queue_delayed_work(p_slot->wq, &p_slot->work, 5*HZ); break; case BLINKINGOFF_STATE: case BLINKINGON_STATE: /* * Cancel if we are still blinking; this means that we * press the attention again before the 5 sec. limit * expires to cancel hot-add or hot-remove */ ctrl_info(ctrl, "Button cancel on Slot(%s)\n", slot_name(p_slot)); cancel_delayed_work(&p_slot->work); if (p_slot->state == BLINKINGOFF_STATE) p_slot->hpc_ops->green_led_on(p_slot); else p_slot->hpc_ops->green_led_off(p_slot); p_slot->hpc_ops->set_attention_status(p_slot, 0); ctrl_info(ctrl, "PCI slot #%s - action canceled due to button press\n", slot_name(p_slot)); p_slot->state = STATIC_STATE; break; case POWEROFF_STATE: case POWERON_STATE: /* * Ignore if the slot is on power-on or power-off state; * this means that the previous attention button action * to hot-add or hot-remove is undergoing */ ctrl_info(ctrl, "Button ignore on Slot(%s)\n", slot_name(p_slot)); update_slot_info(p_slot); break; default: ctrl_warn(ctrl, "Not a valid state\n"); break; } } static void interrupt_event_handler(struct work_struct *work) { struct event_info *info = container_of(work, struct event_info, work); struct slot *p_slot = info->p_slot; mutex_lock(&p_slot->lock); switch (info->event_type) { case INT_BUTTON_PRESS: handle_button_press_event(p_slot); break; case INT_POWER_FAULT: ctrl_dbg(p_slot->ctrl, "%s: Power fault\n", __func__); p_slot->hpc_ops->set_attention_status(p_slot, 1); p_slot->hpc_ops->green_led_off(p_slot); break; default: update_slot_info(p_slot); break; } mutex_unlock(&p_slot->lock); kfree(info); } static int shpchp_enable_slot (struct slot *p_slot) { u8 getstatus = 0; int rc, retval = -ENODEV; struct controller *ctrl = p_slot->ctrl; /* Check to see if (latch closed, card present, power off) */ mutex_lock(&p_slot->ctrl->crit_sect); rc = p_slot->hpc_ops->get_adapter_status(p_slot, &getstatus); if (rc || !getstatus) { ctrl_info(ctrl, "No adapter on slot(%s)\n", slot_name(p_slot)); goto out; } rc = p_slot->hpc_ops->get_latch_status(p_slot, &getstatus); if (rc || getstatus) { ctrl_info(ctrl, "Latch open on slot(%s)\n", slot_name(p_slot)); goto out; } rc = p_slot->hpc_ops->get_power_status(p_slot, &getstatus); if (rc || getstatus) { ctrl_info(ctrl, "Already enabled on slot(%s)\n", slot_name(p_slot)); goto out; } p_slot->is_a_board = 1; /* We have to save the presence info for these slots */ p_slot->hpc_ops->get_adapter_status(p_slot, &(p_slot->presence_save)); p_slot->hpc_ops->get_power_status(p_slot, &(p_slot->pwr_save)); ctrl_dbg(ctrl, "%s: p_slot->pwr_save %x\n", __func__, p_slot->pwr_save); p_slot->hpc_ops->get_latch_status(p_slot, &getstatus); if (((p_slot->ctrl->pci_dev->vendor == PCI_VENDOR_ID_AMD) || (p_slot->ctrl->pci_dev->device == PCI_DEVICE_ID_AMD_POGO_7458)) && p_slot->ctrl->num_slots == 1) { /* handle amd pogo errata; this must be done before enable */ amd_pogo_errata_save_misc_reg(p_slot); retval = board_added(p_slot); /* handle amd pogo errata; this must be done after enable */ amd_pogo_errata_restore_misc_reg(p_slot); } else retval = board_added(p_slot); if (retval) { p_slot->hpc_ops->get_adapter_status(p_slot, &(p_slot->presence_save)); p_slot->hpc_ops->get_latch_status(p_slot, &getstatus); } update_slot_info(p_slot); out: mutex_unlock(&p_slot->ctrl->crit_sect); return retval; } static int shpchp_disable_slot (struct slot *p_slot) { u8 getstatus = 0; int rc, retval = -ENODEV; struct controller *ctrl = p_slot->ctrl; if (!p_slot->ctrl) return -ENODEV; /* Check to see if (latch closed, card present, power on) */ mutex_lock(&p_slot->ctrl->crit_sect); rc = p_slot->hpc_ops->get_adapter_status(p_slot, &getstatus); if (rc || !getstatus) { ctrl_info(ctrl, "No adapter on slot(%s)\n", slot_name(p_slot)); goto out; } rc = p_slot->hpc_ops->get_latch_status(p_slot, &getstatus); if (rc || getstatus) { ctrl_info(ctrl, "Latch open on slot(%s)\n", slot_name(p_slot)); goto out; } rc = p_slot->hpc_ops->get_power_status(p_slot, &getstatus); if (rc || !getstatus) { ctrl_info(ctrl, "Already disabled on slot(%s)\n", slot_name(p_slot)); goto out; } retval = remove_board(p_slot); update_slot_info(p_slot); out: mutex_unlock(&p_slot->ctrl->crit_sect); return retval; } int shpchp_sysfs_enable_slot(struct slot *p_slot) { int retval = -ENODEV; struct controller *ctrl = p_slot->ctrl; mutex_lock(&p_slot->lock); switch (p_slot->state) { case BLINKINGON_STATE: cancel_delayed_work(&p_slot->work); case STATIC_STATE: p_slot->state = POWERON_STATE; mutex_unlock(&p_slot->lock); retval = shpchp_enable_slot(p_slot); mutex_lock(&p_slot->lock); p_slot->state = STATIC_STATE; break; case POWERON_STATE: ctrl_info(ctrl, "Slot %s is already in powering on state\n", slot_name(p_slot)); break; case BLINKINGOFF_STATE: case POWEROFF_STATE: ctrl_info(ctrl, "Already enabled on slot %s\n", slot_name(p_slot)); break; default: ctrl_err(ctrl, "Not a valid state on slot %s\n", slot_name(p_slot)); break; } mutex_unlock(&p_slot->lock); return retval; } int shpchp_sysfs_disable_slot(struct slot *p_slot) { int retval = -ENODEV; struct controller *ctrl = p_slot->ctrl; mutex_lock(&p_slot->lock); switch (p_slot->state) { case BLINKINGOFF_STATE: cancel_delayed_work(&p_slot->work); case STATIC_STATE: p_slot->state = POWEROFF_STATE; mutex_unlock(&p_slot->lock); retval = shpchp_disable_slot(p_slot); mutex_lock(&p_slot->lock); p_slot->state = STATIC_STATE; break; case POWEROFF_STATE: ctrl_info(ctrl, "Slot %s is already in powering off state\n", slot_name(p_slot)); break; case BLINKINGON_STATE: case POWERON_STATE: ctrl_info(ctrl, "Already disabled on slot %s\n", slot_name(p_slot)); break; default: ctrl_err(ctrl, "Not a valid state on slot %s\n", slot_name(p_slot)); break; } mutex_unlock(&p_slot->lock); return retval; }
{ "pile_set_name": "Github" }
(*===-- llvm_scalar_opts.ml - LLVM Ocaml Interface -------------*- OCaml -*-===* * * The LLVM Compiler Infrastructure * * This file is distributed under the University of Illinois Open Source * License. See LICENSE.TXT for details. * *===----------------------------------------------------------------------===*) external add_constant_propagation : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_constant_propagation" external add_sccp : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_sccp" external add_dead_store_elimination : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_dead_store_elimination" external add_aggressive_dce : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_aggressive_dce" external add_scalar_repl_aggregation : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_scalar_repl_aggregation" external add_scalar_repl_aggregation_ssa : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_scalar_repl_aggregation_ssa" external add_scalar_repl_aggregation_with_threshold : int -> [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_scalar_repl_aggregation_with_threshold" external add_ind_var_simplification : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_ind_var_simplification" external add_instruction_combination : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_instruction_combination" external add_licm : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_licm" external add_loop_unswitch : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_loop_unswitch" external add_loop_unroll : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_loop_unroll" external add_loop_rotation : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_loop_rotation" external add_memory_to_register_promotion : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_memory_to_register_promotion" external add_memory_to_register_demotion : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_memory_to_register_demotion" external add_reassociation : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_reassociation" external add_jump_threading : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_jump_threading" external add_cfg_simplification : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_cfg_simplification" external add_tail_call_elimination : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_tail_call_elimination" external add_gvn : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_gvn" external add_memcpy_opt : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_memcpy_opt" external add_loop_deletion : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_loop_deletion" external add_loop_idiom : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_loop_idiom" external add_lib_call_simplification : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_lib_call_simplification" external add_verifier : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_verifier" external add_correlated_value_propagation : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_correlated_value_propagation" external add_early_cse : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_early_cse" external add_lower_expect_intrinsic : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_lower_expect_intrinsic" external add_type_based_alias_analysis : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_type_based_alias_analysis" external add_basic_alias_analysis : [<Llvm.PassManager.any] Llvm.PassManager.t -> unit = "llvm_add_basic_alias_analysis"
{ "pile_set_name": "Github" }
<?php declare(strict_types=1); namespace bizley\migration\table; use bizley\migration\Schema; use function in_array; final class TinyIntegerColumn extends Column implements ColumnInterface { /** @var array<string> Schemas using length for this column */ private $lengthSchemas = [ Schema::MYSQL, Schema::OCI, ]; /** * Returns length of the column. * @param string|null $schema * @param string|null $engineVersion * @return int|string|null */ public function getLength(string $schema = null, string $engineVersion = null) { return in_array($schema, $this->lengthSchemas, true) ? $this->getSize() : null; } /** * Sets length of the column. * @param string|int|null $value * @param string|null $schema * @param string|null $engineVersion */ public function setLength($value, string $schema = null, string $engineVersion = null): void { if (in_array($schema, $this->lengthSchemas, true)) { $this->setSize($value); $this->setPrecision($value); } } /** * Returns default column definition. * @return string */ public function getDefinition(): string { return 'tinyInteger({renderLength})'; } }
{ "pile_set_name": "Github" }
// // Copyright (c) Microsoft. All rights reserved. // Licensed under the MIT license. See LICENSE.md file in the project root for full license information. // #pragma once #include <stdint.h> #include <vector> #include <boost/noncopyable.hpp> #include "Index.h" #include "CorpusDescriptor.h" #include "BufferedFileReader.h" #include "FileWrapper.h" namespace CNTK { class IndexedSequence { // !!! Please update the Index s_version below if this stucture is modified. size_t key; // Sequence key, uniquely identifies the sequence. uint32_t numberOfSamples; size_t offset; // offset in file. uint32_t size; // size in bytes friend class Index; friend class ChunkDescriptor; public: IndexedSequence& SetKey(size_t value) { key = value; return *this; } IndexedSequence& SetNumberOfSamples(uint32_t value) { numberOfSamples = value; return *this; } IndexedSequence& SetOffset(size_t value) { offset = value; return *this; } IndexedSequence& SetSize(size_t value) { size = static_cast<uint32_t>(value); if (size != value) RuntimeError("Sequence size overflows uint32_t type: %lu vs %u.", value, size); return *this; } }; class IndexBuilder : private boost::noncopyable { struct Prefix { Prefix() = default; Prefix(uint64_t magic, uint64_t version, uint64_t totalNumberOfSequences, uint64_t firstSequenceOffset = sizeof(Prefix)) : magic{ magic }, version{ version }, totalNumberOfSequences{ totalNumberOfSequences }, firstSequenceOffset{ firstSequenceOffset } {} uint64_t magic; uint64_t version; uint64_t totalNumberOfSequences; uint64_t firstSequenceOffset; // this offset is set to the size of prefix for the moment // but eventually, this can be used to append additional staff after prefix, without breaking // back compat. }; public: // Reads the input file, building and index of chunks and corresponding // sequences. Returns input data index (chunk and sequence metadata); std::shared_ptr<Index> Build(); IndexBuilder& SetPrimary(bool primary) { m_primary = primary; return *this; } IndexBuilder& SetChunkSize(size_t size) { m_chunkSize = size; return *this; } IndexBuilder& SetCorpus(CorpusDescriptorPtr corpus) { m_corpus = corpus; return *this; } IndexBuilder& SetBufferSize(size_t size) { m_bufferSize = size; return *this; } IndexBuilder& SetCachingEnabled(bool value) { m_isCacheEnabled = value; return *this; } virtual std::wstring GetCacheFilename() = 0; protected: IndexBuilder(const FileWrapper& input); ~IndexBuilder() = default; virtual void Populate(std::shared_ptr<Index>&) = 0; FileWrapper m_input; CorpusDescriptorPtr m_corpus; size_t m_bufferSize; bool m_primary; size_t m_chunkSize; bool m_isCacheEnabled; static const uint64_t s_version = 1; private: static std::shared_ptr<Index> TryLoadFromCache(const std::wstring& cacheFilename, size_t chunkSize); void WriteIndexCacheAsync(std::shared_ptr<Index>& index); std::shared_ptr<Index> m_index; static const uint64_t s_magic = 0x636e746b5f696478; // 'cntk_idx' }; // A helper class that does a pass over the input file building up // an index consisting of sequence and chunk descriptors (which among // others specify size and file offset of the respective structure). // As opposed to the data deserializer, indexer performs almost no parsing // and therefore is several magnitudes faster. class TextInputIndexBuilder : public IndexBuilder { public: TextInputIndexBuilder(const FileWrapper& input); TextInputIndexBuilder& SetSkipSequenceIds(bool skip) { m_skipSequenceIds = skip; return *this; } TextInputIndexBuilder& SetMainStream(const std::string& name) { m_mainStream = name; return *this; } TextInputIndexBuilder& SetStreamPrefix(char prefix) { m_streamPrefix = prefix; return *this; } virtual std::wstring GetCacheFilename() override; private: // Implementation of the Knuth-Morris-Pratt (linear string search without backup) // algorithm adopted from http://algs4.cs.princeton.edu/53substring/KMPplus.java.html struct KMP { KMP(const std::string& value); std::string pattern; std::vector<int> next; // failure function table }; virtual void Populate(std::shared_ptr<Index>& index) override; size_t m_fileSize; bool m_skipSequenceIds; // true, when input contains one sequence per line // or when sequence id column was ignored during indexing. char m_streamPrefix; // Stream that defines the size of the sequence. std::string m_mainStream; std::unique_ptr<KMP> m_nfa; std::unique_ptr<BufferedFileReader> m_reader; // Returns true if main stream name if found on the current line. bool FindMainStream(); // Invokes either TryGetNumericSequenceId or TryGetSymbolicSequenceId depending // on the specified corpus settings. bool TryGetSequenceId(size_t& id); // Tries to get numeric sequence id. // Throws an exception if a non-numerical is read until the pipe character or // EOF is reached without hitting the pipe character. // Returns false if no numerical characters are found preceding the pipe. // Otherwise, writes sequence id value to the provided reference, returns true. bool TryGetNumericSequenceId(size_t& id); // Same as above but for symbolic ids. // It reads a symbolic key and converts it to numeric id using provided keyToId function. bool TryGetSymbolicSequenceId(size_t& id, std::function<size_t(const std::string&)> keyToId); void PopulateImpl(std::shared_ptr<Index>& index); // Parses input line by line, treating each line as an individual sequence. // Ignores sequence id information, using the line number instead as the id. void PopulateFromLines(std::shared_ptr<Index>& index); }; }
{ "pile_set_name": "Github" }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.beam.runners.dataflow.worker.status; import java.io.IOException; import java.io.PrintWriter; import java.util.ArrayList; import java.util.Arrays; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import org.apache.beam.runners.dataflow.worker.status.DebugCapture.Capturable; import org.apache.beam.vendor.guava.v26_0_jre.com.google.common.annotations.VisibleForTesting; import org.checkerframework.checker.nullness.qual.Nullable; /** Respond to /threadz with the stack traces of all running threads. */ class ThreadzServlet extends BaseStatusServlet implements Capturable { public ThreadzServlet() { super("threadz"); } /** * Class representing the execution state of a thread. * * <p>Can be used in hash maps. */ @VisibleForTesting static class Stack { final StackTraceElement[] elements; final Thread.State state; Stack(StackTraceElement[] elements, Thread.State state) { this.elements = elements; this.state = state; } @Override public int hashCode() { return Objects.hash(Arrays.deepHashCode(elements), state); } @Override public boolean equals(@Nullable Object other) { if (other == this) { return true; } else if (!(other instanceof Stack)) { return false; } else { Stack that = (Stack) other; return state == that.state && Arrays.deepEquals(elements, that.elements); } } } @VisibleForTesting static Map<Stack, List<String>> deduplicateThreadStacks( Map<Thread, StackTraceElement[]> allStacks) { Map<Stack, List<String>> stacks = new HashMap<>(); for (Map.Entry<Thread, StackTraceElement[]> entry : allStacks.entrySet()) { Thread thread = entry.getKey(); if (thread != Thread.currentThread()) { Stack stack = new Stack(entry.getValue(), thread.getState()); List<String> threads = stacks.get(stack); if (threads == null) { threads = new ArrayList<>(); stacks.put(stack, threads); } threads.add(thread.toString()); } } return stacks; } @Override public void doGet(HttpServletRequest request, HttpServletResponse response) throws IOException { response.setContentType("text/plain;charset=utf-8"); response.setStatus(HttpServletResponse.SC_OK); captureData(response.getWriter()); } @Override public String pageName() { return "/threadz"; } @Override public void captureData(PrintWriter writer) { // First, build a map of stacks to the threads that have that stack. Map<Stack, List<String>> stacks = deduplicateThreadStacks(Thread.getAllStackTraces()); // Then, print out each stack along with the threads that share it. Stacks with more threads // are printed first. stacks.entrySet().stream() .sorted(Comparator.comparingInt(e -> -e.getValue().size())) .forEachOrdered( entry -> { Stack stack = entry.getKey(); List<String> threads = entry.getValue(); writer.println( "--- Threads (" + threads.size() + "): " + threads + " State: " + stack.state + " stack: ---"); for (StackTraceElement element : stack.elements) { writer.println(" " + element); } writer.println(); }); } }
{ "pile_set_name": "Github" }
use std::mem; use crate::internal_prelude::*; use crate::h5c::{H5C_cache_decr_mode, H5C_cache_flash_incr_mode, H5C_cache_incr_mode}; pub const H5AC__CURR_CACHE_CONFIG_VERSION: c_int = 1; pub const H5AC__MAX_TRACE_FILE_NAME_LEN: usize = 1024; pub const H5AC_METADATA_WRITE_STRATEGY__PROCESS_0_ONLY: c_int = 0; pub const H5AC_METADATA_WRITE_STRATEGY__DISTRIBUTED: c_int = 1; pub const H5AC__CACHE_IMAGE__ENTRY_AGEOUT__NONE: i32 = -1; pub const H5AC__CACHE_IMAGE__ENTRY_AGEOUT__MAX: i32 = 100; #[repr(C)] #[derive(Copy, Clone)] pub struct H5AC_cache_config_t { pub version: c_int, pub rpt_fcn_enabled: hbool_t, pub open_trace_file: hbool_t, pub close_trace_file: hbool_t, pub trace_file_name: [c_char; H5AC__MAX_TRACE_FILE_NAME_LEN + 1], pub evictions_enabled: hbool_t, pub set_initial_size: hbool_t, pub initial_size: size_t, pub min_clean_fraction: c_double, pub max_size: size_t, pub min_size: size_t, pub epoch_length: c_long, pub incr_mode: H5C_cache_incr_mode, pub lower_hr_threshold: c_double, pub increment: c_double, pub apply_max_increment: hbool_t, pub max_increment: size_t, pub flash_incr_mode: H5C_cache_flash_incr_mode, pub flash_multiple: c_double, pub flash_threshold: c_double, pub decr_mode: H5C_cache_decr_mode, pub upper_hr_threshold: c_double, pub decrement: c_double, pub apply_max_decrement: hbool_t, pub max_decrement: size_t, pub epochs_before_eviction: c_int, pub apply_empty_reserve: hbool_t, pub empty_reserve: c_double, #[cfg(not(hdf5_1_10_0))] pub dirty_bytes_threshold: c_int, #[cfg(hdf5_1_10_0)] pub dirty_bytes_threshold: size_t, pub metadata_write_strategy: c_int, } impl Default for H5AC_cache_config_t { fn default() -> Self { unsafe { mem::zeroed() } } } #[cfg(hdf5_1_10_1)] mod hdf5_1_10_1 { use super::*; #[repr(C)] #[derive(Debug, Copy, Clone)] pub struct H5AC_cache_image_config_t { pub version: c_int, pub generate_image: hbool_t, pub save_resize_status: hbool_t, pub entry_ageout: c_int, } impl Default for H5AC_cache_image_config_t { fn default() -> Self { unsafe { mem::zeroed() } } } } #[cfg(hdf5_1_10_1)] pub use self::hdf5_1_10_1::*;
{ "pile_set_name": "Github" }
stderr of test 'NOT_x_LIKE-triggers-too_many_nested_operators-error.Bug-3871` in directory 'sql/test/BugTracker-2015` itself: # 13:02:54 > # 13:02:54 > "mserver5" "--debug=10" "--set" "gdk_nr_threads=0" "--set" "mapi_open=true" "--set" "mapi_port=31655" "--set" "mapi_usock=/var/tmp/mtest-29864/.s.monetdb.31655" "--set" "monet_prompt=" "--forcemito" "--set" "mal_listing=2" "--dbpath=/Users/jennie/scratch/monet-install/Jul2015/debug/var/MonetDB/mTests_sql_test_BugTracker-2015" "--set" "mal_listing=0" "--set" "embedded_r=yes" # 13:02:54 > # builtin opt gdk_dbpath = /Users/jennie/scratch/monet-install/Jul2015/debug/var/monetdb5/dbfarm/demo # builtin opt gdk_debug = 0 # builtin opt gdk_vmtrim = no # builtin opt monet_prompt = > # builtin opt monet_daemon = no # builtin opt mapi_port = 50000 # builtin opt mapi_open = false # builtin opt mapi_autosense = false # builtin opt sql_optimizer = default_pipe # builtin opt sql_debug = 0 # cmdline opt gdk_nr_threads = 0 # cmdline opt mapi_open = true # cmdline opt mapi_port = 31655 # cmdline opt mapi_usock = /var/tmp/mtest-29864/.s.monetdb.31655 # cmdline opt monet_prompt = # cmdline opt mal_listing = 2 # cmdline opt gdk_dbpath = /Users/jennie/scratch/monet-install/Jul2015/debug/var/MonetDB/mTests_sql_test_BugTracker-2015 # cmdline opt mal_listing = 0 # cmdline opt embedded_r = yes # cmdline opt gdk_debug = 536870922 # 13:02:55 > # 13:02:55 > "mclient" "-lsql" "-ftest" "-Eutf-8" "-i" "-e" "--host=/var/tmp/mtest-29864" "--port=31655" # 13:02:55 > # 13:02:55 > # 13:02:55 > "Done." # 13:02:55 >
{ "pile_set_name": "Github" }
// Copyright (c) André N. Klingsheim. See License.txt in the project root for license information. namespace NWebsec.AspNetCore.Mvc.Csp { /// <summary> /// When applied to a controller or action method, enables the media-src directive for the CSP Report Only header (CSP 1.0). /// </summary> public class CspMediaSrcReportOnlyAttribute : CspMediaSrcAttribute { protected override bool ReportOnly => true; } }
{ "pile_set_name": "Github" }
// Created by cgo -godefs - DO NOT EDIT // cgo -godefs defs_darwin.go package route const ( sysAF_UNSPEC = 0x0 sysAF_INET = 0x2 sysAF_ROUTE = 0x11 sysAF_LINK = 0x12 sysAF_INET6 = 0x1e sysSOCK_RAW = 0x3 sysNET_RT_DUMP = 0x1 sysNET_RT_FLAGS = 0x2 sysNET_RT_IFLIST = 0x3 sysNET_RT_STAT = 0x4 sysNET_RT_TRASH = 0x5 sysNET_RT_IFLIST2 = 0x6 sysNET_RT_DUMP2 = 0x7 sysNET_RT_MAXID = 0xa ) const ( sysCTL_MAXNAME = 0xc sysCTL_UNSPEC = 0x0 sysCTL_KERN = 0x1 sysCTL_VM = 0x2 sysCTL_VFS = 0x3 sysCTL_NET = 0x4 sysCTL_DEBUG = 0x5 sysCTL_HW = 0x6 sysCTL_MACHDEP = 0x7 sysCTL_USER = 0x8 sysCTL_MAXID = 0x9 ) const ( sysRTM_VERSION = 0x5 sysRTM_ADD = 0x1 sysRTM_DELETE = 0x2 sysRTM_CHANGE = 0x3 sysRTM_GET = 0x4 sysRTM_LOSING = 0x5 sysRTM_REDIRECT = 0x6 sysRTM_MISS = 0x7 sysRTM_LOCK = 0x8 sysRTM_OLDADD = 0x9 sysRTM_OLDDEL = 0xa sysRTM_RESOLVE = 0xb sysRTM_NEWADDR = 0xc sysRTM_DELADDR = 0xd sysRTM_IFINFO = 0xe sysRTM_NEWMADDR = 0xf sysRTM_DELMADDR = 0x10 sysRTM_IFINFO2 = 0x12 sysRTM_NEWMADDR2 = 0x13 sysRTM_GET2 = 0x14 sysRTA_DST = 0x1 sysRTA_GATEWAY = 0x2 sysRTA_NETMASK = 0x4 sysRTA_GENMASK = 0x8 sysRTA_IFP = 0x10 sysRTA_IFA = 0x20 sysRTA_AUTHOR = 0x40 sysRTA_BRD = 0x80 sysRTAX_DST = 0x0 sysRTAX_GATEWAY = 0x1 sysRTAX_NETMASK = 0x2 sysRTAX_GENMASK = 0x3 sysRTAX_IFP = 0x4 sysRTAX_IFA = 0x5 sysRTAX_AUTHOR = 0x6 sysRTAX_BRD = 0x7 sysRTAX_MAX = 0x8 ) const ( sizeofIfMsghdrDarwin15 = 0x70 sizeofIfaMsghdrDarwin15 = 0x14 sizeofIfmaMsghdrDarwin15 = 0x10 sizeofIfMsghdr2Darwin15 = 0xa0 sizeofIfmaMsghdr2Darwin15 = 0x14 sizeofIfDataDarwin15 = 0x60 sizeofIfData64Darwin15 = 0x80 sizeofRtMsghdrDarwin15 = 0x5c sizeofRtMsghdr2Darwin15 = 0x5c sizeofRtMetricsDarwin15 = 0x38 sizeofSockaddrStorage = 0x80 sizeofSockaddrInet = 0x10 sizeofSockaddrInet6 = 0x1c )
{ "pile_set_name": "Github" }
app ==== How to deploy? ---- It's very simple to have a test of apps in this folder: * Clone the whole git repository. * Make a symbolic link of 'snsapi' and other essential subdirectoreis to the app working directory. i.e. (from the path of this document) ``` cd forwarder ln -s ../../snsapi ln -s ../../auxiliary ``` * Refer to the app's document for the commands. List of Apps ---- Here's a list of apps. Please refer to their own directory for detailed information. * hellosns * forwarder * mysofa
{ "pile_set_name": "Github" }
/* * jcapistd.c * * Copyright (C) 1994-1996, Thomas G. Lane. * This file is part of the Independent JPEG Group's software. * For conditions of distribution and use, see the accompanying README file. * * This file contains application interface code for the compression half * of the JPEG library. These are the "standard" API routines that are * used in the normal full-compression case. They are not used by a * transcoding-only application. Note that if an application links in * jpeg_start_compress, it will end up linking in the entire compressor. * We thus must separate this file from jcapimin.c to avoid linking the * whole compression library into a transcoder. */ #define JPEG_INTERNALS #include "jinclude.h" #include "jpeglib.h" /* * Compression initialization. * Before calling this, all parameters and a data destination must be set up. * * We require a write_all_tables parameter as a failsafe check when writing * multiple datastreams from the same compression object. Since prior runs * will have left all the tables marked sent_table=TRUE, a subsequent run * would emit an abbreviated stream (no tables) by default. This may be what * is wanted, but for safety's sake it should not be the default behavior: * programmers should have to make a deliberate choice to emit abbreviated * images. Therefore the documentation and examples should encourage people * to pass write_all_tables=TRUE; then it will take active thought to do the * wrong thing. */ GLOBAL(void) jpeg_start_compress (j_compress_ptr cinfo, boolean write_all_tables) { if (cinfo->global_state != CSTATE_START) ERREXIT1(cinfo, JERR_BAD_STATE, cinfo->global_state); if (write_all_tables) jpeg_suppress_tables(cinfo, FALSE); /* mark all tables to be written */ /* (Re)initialize error mgr and destination modules */ (*cinfo->err->reset_error_mgr) ((j_common_ptr) cinfo); (*cinfo->dest->init_destination) (cinfo); /* Perform master selection of active modules */ jinit_compress_master(cinfo); /* Set up for the first pass */ (*cinfo->master->prepare_for_pass) (cinfo); /* Ready for application to drive first pass through jpeg_write_scanlines * or jpeg_write_raw_data. */ cinfo->next_scanline = 0; cinfo->global_state = (cinfo->raw_data_in ? CSTATE_RAW_OK : CSTATE_SCANNING); } /* * Write some scanlines of data to the JPEG compressor. * * The return value will be the number of lines actually written. * This should be less than the supplied num_lines only in case that * the data destination module has requested suspension of the compressor, * or if more than image_height scanlines are passed in. * * Note: we warn about excess calls to jpeg_write_scanlines() since * this likely signals an application programmer error. However, * excess scanlines passed in the last valid call are *silently* ignored, * so that the application need not adjust num_lines for end-of-image * when using a multiple-scanline buffer. */ GLOBAL(JDIMENSION) jpeg_write_scanlines (j_compress_ptr cinfo, JSAMPARRAY scanlines, JDIMENSION num_lines) { JDIMENSION row_ctr, rows_left; if (cinfo->global_state != CSTATE_SCANNING) ERREXIT1(cinfo, JERR_BAD_STATE, cinfo->global_state); if (cinfo->next_scanline >= cinfo->image_height) WARNMS(cinfo, JWRN_TOO_MUCH_DATA); /* Call progress monitor hook if present */ if (cinfo->progress != NULL) { cinfo->progress->pass_counter = (long) cinfo->next_scanline; cinfo->progress->pass_limit = (long) cinfo->image_height; (*cinfo->progress->progress_monitor) ((j_common_ptr) cinfo); } /* Give master control module another chance if this is first call to * jpeg_write_scanlines. This lets output of the frame/scan headers be * delayed so that application can write COM, etc, markers between * jpeg_start_compress and jpeg_write_scanlines. */ if (cinfo->master->call_pass_startup) (*cinfo->master->pass_startup) (cinfo); /* Ignore any extra scanlines at bottom of image. */ rows_left = cinfo->image_height - cinfo->next_scanline; if (num_lines > rows_left) num_lines = rows_left; row_ctr = 0; (*cinfo->main->process_data) (cinfo, scanlines, &row_ctr, num_lines); cinfo->next_scanline += row_ctr; return row_ctr; } /* * Alternate entry point to write raw data. * Processes exactly one iMCU row per call, unless suspended. */ GLOBAL(JDIMENSION) jpeg_write_raw_data (j_compress_ptr cinfo, JSAMPIMAGE data, JDIMENSION num_lines) { JDIMENSION lines_per_iMCU_row; if (cinfo->global_state != CSTATE_RAW_OK) ERREXIT1(cinfo, JERR_BAD_STATE, cinfo->global_state); if (cinfo->next_scanline >= cinfo->image_height) { WARNMS(cinfo, JWRN_TOO_MUCH_DATA); return 0; } /* Call progress monitor hook if present */ if (cinfo->progress != NULL) { cinfo->progress->pass_counter = (long) cinfo->next_scanline; cinfo->progress->pass_limit = (long) cinfo->image_height; (*cinfo->progress->progress_monitor) ((j_common_ptr) cinfo); } /* Give master control module another chance if this is first call to * jpeg_write_raw_data. This lets output of the frame/scan headers be * delayed so that application can write COM, etc, markers between * jpeg_start_compress and jpeg_write_raw_data. */ if (cinfo->master->call_pass_startup) (*cinfo->master->pass_startup) (cinfo); /* Verify that at least one iMCU row has been passed. */ lines_per_iMCU_row = cinfo->max_v_samp_factor * DCTSIZE; if (num_lines < lines_per_iMCU_row) ERREXIT(cinfo, JERR_BUFFER_SIZE); /* Directly compress the row. */ if (! (*cinfo->coef->compress_data) (cinfo, data)) { /* If compressor did not consume the whole row, suspend processing. */ return 0; } /* OK, we processed one iMCU row. */ cinfo->next_scanline += lines_per_iMCU_row; return lines_per_iMCU_row; }
{ "pile_set_name": "Github" }
declare interface IMetadataSitePagesCommandSetStrings { Command1: string; Command2: string; } declare module 'MetadataSitePagesCommandSetStrings' { const strings: IMetadataSitePagesCommandSetStrings; export = strings; }
{ "pile_set_name": "Github" }
/***************************************************//** * @file ProtocolHelper.h * @date July 2009 * @author Ocean Optics, Inc. * * Feature instances may look up an implementation object * that matches a particular Protocol. All such implementations * should in some way derive from ProtocolHelper so that * Feature's look up mechanism can return them. It is * expected that each Feature will have a corresponding * interface at the Protocol layer; those interface classes * should derive from this class, and their implementations * will thus extend this as well. * * LICENSE: * * SeaBreeze Copyright (C) 2014, Ocean Optics Inc * * Permission is hereby granted, free of charge, to any person obtaining * a copy of this software and associated documentation files (the * "Software"), to deal in the Software without restriction, including * without limitation the rights to use, copy, modify, merge, publish, * distribute, sublicense, and/or sell copies of the Software, and to * permit persons to whom the Software is furnished to do so, subject * to the following conditions: * * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. * IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY * CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, * TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE * SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. *******************************************************/ #ifndef SEABREEZE_PROTOCOLHELPER_H #define SEABREEZE_PROTOCOLHELPER_H #include "common/protocols/Protocol.h" namespace seabreeze { class ProtocolHelper { public: ProtocolHelper(Protocol *proto); virtual ~ProtocolHelper(); Protocol &getProtocol(); protected: /* Protected for derived classes to use. */ ProtocolHelper(); Protocol *protocol; }; } #endif
{ "pile_set_name": "Github" }
( define ( script-fu-megamek-unit-shading-v3 image original-layer ) ( let* ( (old-bg (car (gimp-palette-get-background))) (layer-source-layer original-layer) (layer-working 0) (layer-to-process 0) (layergroup-unit-shading 0) (layer-white-base 0) (layer-shadow-1-sw-side 0) (layer-highlight-NE-side 0) (layer-enhanced-panel-detail 0) (layergroup-finished-unit 0) (layer-color-details 0) (layer-finished-unit-body 0) (layer-unit-shadow 0) (shadow-x 0) (shadow-y 0) (shadow-blur 0) (shadow-color '(255 255 255)) (shadow-opacity 100) (shadow-resize FALSE) (brightness 0) (contrast 0) ) (gimp-undo-push-group-start image) ;this was a bad idea. will cause the script to error if the image is already in rbg. Better to handle this with bimp. ;(gimp-image-convert-rgb image) ;;;;;;;;;; Create Shading Layer Group ;;;;;;;;;;;; (set! layer-working (car (gimp-layer-group-new image))) (gimp-image-insert-layer image layer-working 0 -1) (gimp-layer-set-name layer-working "Unit Shading") (set! layergroup-unit-shading layer-working) ;;;;;;;;;; Create Shading Layer Group ;;;;;;;;;;;; ;;;;;;;;;;;; Create Processing Layer ;;;;;;;;;;;;; (set! layer-working (car (gimp-layer-new-from-drawable layer-source-layer image))) (gimp-image-insert-layer image layer-working layergroup-unit-shading -1) (gimp-layer-set-name layer-working "To Process") (set! layer-to-process layer-working) (gimp-item-set-visible layer-source-layer FALSE) (gimp-layer-set-name layer-source-layer "Image Source") ;;;;;;;;;;;; Create Processing Layer ;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;; White Base ;;;;;;;;;;;;;;;;;;;; (set! shadow-x 0) (set! shadow-y 0) (set! shadow-blur 0) (set! shadow-color '(255 255 255)) (set! shadow-opacity 100) (script-fu-drop-shadow image layer-to-process shadow-x shadow-y shadow-blur shadow-color shadow-opacity shadow-resize) (set! layer-working (car (gimp-image-get-layer-by-name image "Drop Shadow"))) (set! layer-white-base layer-working) (gimp-layer-set-name layer-working "White Base") (gimp-image-set-active-layer image layer-to-process) ;;;;;;;;;;;;;;;;;; White Base ;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;; Shadow 1 - SW Side ;;;;;;;;;;;;;;;;;;;; (set! shadow-x -3) (set! shadow-y 3) (set! shadow-blur 4) (set! shadow-color '(0 0 0)) (set! shadow-opacity 55) (script-fu-drop-shadow image layer-to-process shadow-x shadow-y shadow-blur shadow-color shadow-opacity shadow-resize) (set! layer-working (car (gimp-image-get-layer-by-name image "Drop Shadow"))) (set! layer-shadow-1-sw-side layer-working) (gimp-layer-set-name layer-working "Shadow 1 - SW Side") (gimp-layer-set-mode layer-working DARKEN-ONLY-MODE) (gimp-image-set-active-layer image layer-to-process) ;;;;;;;;;;;;;;;;;; Shadow 1 - SW Side ;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;; Highlight - NE Side ;;;;;;;;;;;;;;;;;;;; (set! shadow-x 3) (set! shadow-y -3) (set! shadow-blur 4) (set! shadow-color '(255 255 255)) (set! shadow-opacity 100) (script-fu-drop-shadow image layer-to-process shadow-x shadow-y shadow-blur shadow-color shadow-opacity shadow-resize) (set! layer-working (car (gimp-image-get-layer-by-name image "Drop Shadow"))) (set! layer-highlight-NE-side layer-working) (gimp-layer-set-name layer-working "Highlight - NE Side") (gimp-layer-set-mode layer-working LIGHTEN-ONLY-MODE) (gimp-image-set-active-layer image layer-to-process) ;;;;;;;;;;;;;;;;; Highlight - NE Side ;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;; Enhanced Panel Detail ;;;;;;;;;;;;;;;;;;; (set! brightness 0) (set! contrast 85) ;(gimp-item-set-visible layer-to-process FALSE) (set! layer-working (car (gimp-layer-new-from-visible image image "Enhanced Panel Detail"))) (set! layer-enhanced-panel-detail layer-working) (gimp-image-insert-layer image layer-working 0 -1) ;(gimp-image-lower-layer image layer-working) (gimp-brightness-contrast layer-working brightness contrast) ;(gimp-item-set-visible layer-to-process TRUE) ;;;;;;;;;;;;;;;; Enhanced Panel Detail ;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;; White To Alpha ;;;;;;;;;;;;;;;;;;;;; (plug-in-colortoalpha 1 image layer-enhanced-panel-detail '(255 255 255)) (plug-in-colortoalpha 1 image layer-to-process '(255 255 255)) (gimp-image-set-active-layer image layer-to-process) ;;;;;;;;;;;;;;;;;;;;; White To Alpha ;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;; Shading ;;;;;;;;;;;;;;;;;;;;;;;;;; (set! shadow-x 0) (set! shadow-y 0) (set! shadow-blur 3) (set! shadow-color '(0 0 0)) (set! shadow-opacity 43) (script-fu-drop-shadow image layer-to-process shadow-x shadow-y shadow-blur shadow-color shadow-opacity shadow-resize) (set! layer-working (car (gimp-image-get-layer-by-name image "Drop Shadow"))) (gimp-layer-set-name layer-working "Shading") (gimp-layer-set-mode layer-working DARKEN-ONLY-MODE) (gimp-image-set-active-layer image layer-to-process) ;;;;;;;;;;;;;;;;;;;;;;; Shading ;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;; Detail Shading ;;;;;;;;;;;;;;;;;;;;;; (set! shadow-x 0) (set! shadow-y 0) (set! shadow-blur 1) (set! shadow-color '(0 0 0)) (set! shadow-opacity 79) (script-fu-drop-shadow image layer-to-process shadow-x shadow-y shadow-blur shadow-color shadow-opacity shadow-resize) (set! layer-working (car (gimp-image-get-layer-by-name image "Drop Shadow"))) (gimp-layer-set-name layer-working "Detail Shading") (gimp-layer-set-mode layer-working DARKEN-ONLY-MODE) (gimp-image-set-active-layer image layer-to-process) ;;;;;;;;;;;;;;;;;;;; Detail Shading ;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;; Make new layer from composite shading ;;;;;;;;;; (set! brightness 33) (set! contrast 20) (gimp-item-set-visible layer-to-process FALSE) (gimp-item-set-visible layer-enhanced-panel-detail FALSE) (set! layer-working (car (gimp-layer-new-from-visible image image "Composite Shading"))) (gimp-image-insert-layer image layer-working 0 -1) (gimp-image-lower-layer image layer-working) (gimp-brightness-contrast layer-working brightness contrast) ;(gimp-item-set-visible layer-to-process TRUE) (gimp-item-set-visible layer-enhanced-panel-detail TRUE) ;;;;;;;;; Make new layer from composite shading ;;;;;;;;;; ;;; Rebalance visibility of Enhanced Panel Detail Layer ;; ;(set! shadow-opacity 55.0) (set! shadow-opacity 35.0) (gimp-layer-set-opacity layer-enhanced-panel-detail shadow-opacity) ;;; Rebalance visibility of Enhanced Panel Detail Layer ;; ;;;;;;;;;;;;;; Dump The Old Processing Layer ;;;;;;;;;;;;; (gimp-image-remove-layer image layer-to-process) ;;;;;;;;;;;;;; Dump The Old Processing Layer ;;;;;;;;;;;;; ;;;;;;;;;;; Create Finished Layer Group ;;;;;;;;;;; (gimp-image-set-active-layer image layer-source-layer) (set! layer-working (car (gimp-layer-group-new image))) (gimp-image-insert-layer image layer-working 0 -1) (gimp-layer-set-name layer-working "Finished Unit") (gimp-image-raise-item-to-top image layer-working) (set! layergroup-finished-unit layer-working) ;;;;;;;;;;; Create Finished Layer Group ;;;;;;;;;;; ;;;;;;;;;;;;; Make Finished Unit Body Layer ;;;;;;;;;;;;;; (set! layer-working (car (gimp-layer-new-from-visible image image "Finished Unit Body"))) (gimp-image-insert-layer image layer-working 0 -1) (gimp-desaturate-full layer-working DESATURATE-LIGHTNESS) (set! layer-finished-unit-body layer-working) (gimp-item-set-visible layergroup-unit-shading FALSE) ;;;;;;;;;;;;; Make Finished Unit Body Layer ;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;; Make Unit Shadow Layer ;;;;;;;;;;;;;;;;;; (set! shadow-x 0) (set! shadow-y 0) (set! shadow-blur 10) (set! shadow-color '(0 0 2)) (set! shadow-opacity 82) (script-fu-drop-shadow image layer-finished-unit-body shadow-x shadow-y shadow-blur shadow-color shadow-opacity shadow-resize) (set! layer-working (car (gimp-image-get-layer-by-name image "Drop Shadow"))) (gimp-layer-set-name layer-working "Unit Shadow") (gimp-colorize layer-working 214.0 52.0 2.0) (set! layer-unit-shadow layer-working) ;;;;;;;;;;;;;;;; Make Unit Shadow Layer ;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;; Make Color Details Layer ;;;;;;;;;;;;;;;;; (gimp-image-set-active-layer image layer-finished-unit-body) ( set! layer-working ( car ( gimp-layer-new image (car (gimp-image-width image)) (car (gimp-image-height image)) RGBA-IMAGE "Color Details Go Here" 100.0 NORMAL-MODE ) ) ) (gimp-image-insert-layer image layer-working 0 -1) (gimp-layer-set-name layer-working "Color Details Go Here") (set! layer-color-details layer-working) ;;;;;;;;;;;;;;; Make Color Details Layer ;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;; Cleanup ;;;;;;;;;;;;;;;;;;;;;;;;;;; (gimp-palette-set-background old-bg) (gimp-image-set-active-layer image layer-color-details) (gimp-undo-push-group-end image) (gimp-displays-flush) ;;;;;;;;;;;;;;;;;;;;;; Cleanup ;;;;;;;;;;;;;;;;;;;;;;;;;;; ) ) ( script-fu-register "script-fu-megamek-unit-shading-v3" _"_Megamek Unit Shading V3..." _"Turns boring flat MegaMek units into shaded 3d units. By Colonel Sanders Lite" "Colonel Sanders Lite" "I Don't Care" "2016/8/21" "RGBA" SF-IMAGE "Image" 0 SF-DRAWABLE "Drawable" 0 ) (script-fu-menu-register "script-fu-megamek-unit-shading-v3" "<Image>/Filters/Light and Shadow/Shadow")
{ "pile_set_name": "Github" }
.\" Copyright (c) 1991, 1993 .\" The Regents of the University of California. All rights reserved. .\" .\" Redistribution and use in source and binary forms, with or without .\" modification, are permitted provided that the following conditions .\" are met: .\" 1. Redistributions of source code must retain the above copyright .\" notice, this list of conditions and the following disclaimer. .\" 2. Redistributions in binary form must reproduce the above copyright .\" notice, this list of conditions and the following disclaimer in the .\" documentation and/or other materials provided with the distribution. .\" 3. All advertising materials mentioning features or use of this software .\" must display the following acknowledgement: .\" This product includes software developed by the University of .\" California, Berkeley and its contributors. .\" 4. Neither the name of the University nor the names of its contributors .\" may be used to endorse or promote products derived from this software .\" without specific prior written permission. .\" .\" THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND .\" ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE .\" IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE .\" ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE .\" FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL .\" DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS .\" OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) .\" HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT .\" LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY .\" OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF .\" SUCH DAMAGE. .\" .\" @(#)unix.4 8.1 (Berkeley) 6/9/93 .\" .Dd June 9, 1993 .Dt UNIX 4 .Os .Sh NAME .Nm unix .Nd UNIX-domain protocol family .Sh SYNOPSIS .Fd #include <sys/types.h> .Fd #include <sys/un.h> .Sh DESCRIPTION The .Tn UNIX Ns -domain protocol family is a collection of protocols that provides local (on-machine) interprocess communication through the normal .Xr socket 2 mechanisms. The .Tn UNIX Ns -domain family supports the .Dv SOCK_STREAM and .Dv SOCK_DGRAM socket types and uses filesystem pathnames for addressing. .Sh ADDRESSING .Tn UNIX Ns -domain addresses are variable-length filesystem pathnames of at most 104 characters. The include file .Aq Pa sys/un.h defines this address: .Bd -literal -offset indent struct sockaddr_un { u_char sun_len; u_char sun_family; char sun_path[104]; }; .Ed .Pp Binding a name to a .Tn UNIX Ns -domain socket with .Xr bind 2 causes a socket file to be created in the filesystem. This file is .Em not removed when the socket is closed\(em\c .Xr unlink 2 must be used to remove the file. .Pp The .Tn UNIX Ns -domain protocol family does not support broadcast addressing or any form of .Dq wildcard matching on incoming messages. All addresses are absolute- or relative-pathnames of other .Tn UNIX Ns -domain sockets. Normal filesystem access-control mechanisms are also applied when referencing pathnames; e.g., the destination of a .Xr connect 2 or .Xr sendto 2 must be writable. .Sh PROTOCOLS The .Tn UNIX Ns -domain protocol family is comprised of simple transport protocols that support the .Dv SOCK_STREAM and .Dv SOCK_DGRAM abstractions. .Dv SOCK_STREAM sockets also support the communication of .Ux file descriptors through the use of the .Ar msg_control field in the .Ar msg argument to .Xr sendmsg 2 and .Xr recvmsg 2 . .Pp Any valid descriptor may be sent in a message. The file descriptor(s) to be passed are described using a .Ar struct cmsghdr that is defined in the include file .Aq Pa sys/socket.h . The type of the message is .Dv SCM_RIGHTS , and the data portion of the messages is an array of integers representing the file descriptors to be passed. The number of descriptors being passed is defined by the length field of the message; the length field is the sum of the size of the header plus the size of the array of file descriptors. .Pp The received descriptor is a .Em duplicate of the sender's descriptor, as if it were created with a call to .Xr dup 2 . Per-process descriptor flags, set with .Xr fcntl 2 , are .Em not passed to a receiver. Descriptors that are awaiting delivery, or that are purposely not received, are automatically closed by the system when the destination socket is closed. .Sh SEE ALSO .Xr socket 2 , .Xr intro 4 .Rs .%T "An Introductory 4.3 BSD Interprocess Communication Tutorial" .%B PS1 .%N 7 .Re .Rs .%T "An Advanced 4.3 BSD Interprocess Communication Tutorial" .%B PS1 .%N 8 .Re
{ "pile_set_name": "Github" }
{ "vendors.js": "/static/js/vendors.27f5d684.js", "app.js": "/static/js/app.b51d4aa76313865074da.js", "readme.js": "/static/js/readme.032bb26a.js", "index.html": "/index.html" }
{ "pile_set_name": "Github" }
/* Copyright (c) 2007-2008 CSIRO Copyright (c) 2007-2009 Xiph.Org Foundation Written by Jean-Marc Valin */ /** @file pitch.c @brief Pitch analysis */ /* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifdef HAVE_CONFIG_H #include "config.h" #endif #include "pitch.h" #include "os_support.h" #include "modes.h" #include "stack_alloc.h" #include "mathops.h" #include "celt_lpc.h" static void find_best_pitch(opus_val32 *xcorr, opus_val16 *y, int len, int max_pitch, int *best_pitch #ifdef FIXED_POINT , int yshift, opus_val32 maxcorr #endif ) { int i, j; opus_val32 Syy=1; opus_val16 best_num[2]; opus_val32 best_den[2]; #ifdef FIXED_POINT int xshift; xshift = celt_ilog2(maxcorr)-14; #endif best_num[0] = -1; best_num[1] = -1; best_den[0] = 0; best_den[1] = 0; best_pitch[0] = 0; best_pitch[1] = 1; for (j=0;j<len;j++) Syy = ADD32(Syy, SHR32(MULT16_16(y[j],y[j]), yshift)); for (i=0;i<max_pitch;i++) { if (xcorr[i]>0) { opus_val16 num; opus_val32 xcorr16; xcorr16 = EXTRACT16(VSHR32(xcorr[i], xshift)); #ifndef FIXED_POINT /* Considering the range of xcorr16, this should avoid both underflows and overflows (inf) when squaring xcorr16 */ xcorr16 *= 1e-12f; #endif num = MULT16_16_Q15(xcorr16,xcorr16); if (MULT16_32_Q15(num,best_den[1]) > MULT16_32_Q15(best_num[1],Syy)) { if (MULT16_32_Q15(num,best_den[0]) > MULT16_32_Q15(best_num[0],Syy)) { best_num[1] = best_num[0]; best_den[1] = best_den[0]; best_pitch[1] = best_pitch[0]; best_num[0] = num; best_den[0] = Syy; best_pitch[0] = i; } else { best_num[1] = num; best_den[1] = Syy; best_pitch[1] = i; } } } Syy += SHR32(MULT16_16(y[i+len],y[i+len]),yshift) - SHR32(MULT16_16(y[i],y[i]),yshift); Syy = MAX32(1, Syy); } } static void celt_fir5(const opus_val16 *x, const opus_val16 *num, opus_val16 *y, int N, opus_val16 *mem) { int i; opus_val16 num0, num1, num2, num3, num4; opus_val32 mem0, mem1, mem2, mem3, mem4; num0=num[0]; num1=num[1]; num2=num[2]; num3=num[3]; num4=num[4]; mem0=mem[0]; mem1=mem[1]; mem2=mem[2]; mem3=mem[3]; mem4=mem[4]; for (i=0;i<N;i++) { opus_val32 sum = SHL32(EXTEND32(x[i]), SIG_SHIFT); sum = MAC16_16(sum,num0,mem0); sum = MAC16_16(sum,num1,mem1); sum = MAC16_16(sum,num2,mem2); sum = MAC16_16(sum,num3,mem3); sum = MAC16_16(sum,num4,mem4); mem4 = mem3; mem3 = mem2; mem2 = mem1; mem1 = mem0; mem0 = x[i]; y[i] = ROUND16(sum, SIG_SHIFT); } mem[0]=mem0; mem[1]=mem1; mem[2]=mem2; mem[3]=mem3; mem[4]=mem4; } void pitch_downsample(celt_sig * OPUS_RESTRICT x[], opus_val16 * OPUS_RESTRICT x_lp, int len, int C, int arch) { int i; opus_val32 ac[5]; opus_val16 tmp=Q15ONE; opus_val16 lpc[4], mem[5]={0,0,0,0,0}; opus_val16 lpc2[5]; opus_val16 c1 = QCONST16(.8f,15); #ifdef FIXED_POINT int shift; opus_val32 maxabs = celt_maxabs32(x[0], len); if (C==2) { opus_val32 maxabs_1 = celt_maxabs32(x[1], len); maxabs = MAX32(maxabs, maxabs_1); } if (maxabs<1) maxabs=1; shift = celt_ilog2(maxabs)-10; if (shift<0) shift=0; if (C==2) shift++; #endif for (i=1;i<len>>1;i++) x_lp[i] = SHR32(HALF32(HALF32(x[0][(2*i-1)]+x[0][(2*i+1)])+x[0][2*i]), shift); x_lp[0] = SHR32(HALF32(HALF32(x[0][1])+x[0][0]), shift); if (C==2) { for (i=1;i<len>>1;i++) x_lp[i] += SHR32(HALF32(HALF32(x[1][(2*i-1)]+x[1][(2*i+1)])+x[1][2*i]), shift); x_lp[0] += SHR32(HALF32(HALF32(x[1][1])+x[1][0]), shift); } _celt_autocorr(x_lp, ac, NULL, 0, 4, len>>1, arch); /* Noise floor -40 dB */ #ifdef FIXED_POINT ac[0] += SHR32(ac[0],13); #else ac[0] *= 1.0001f; #endif /* Lag windowing */ for (i=1;i<=4;i++) { /*ac[i] *= exp(-.5*(2*M_PI*.002*i)*(2*M_PI*.002*i));*/ #ifdef FIXED_POINT ac[i] -= MULT16_32_Q15(2*i*i, ac[i]); #else ac[i] -= ac[i]*(.008f*i)*(.008f*i); #endif } _celt_lpc(lpc, ac, 4); for (i=0;i<4;i++) { tmp = MULT16_16_Q15(QCONST16(.9f,15), tmp); lpc[i] = MULT16_16_Q15(lpc[i], tmp); } /* Add a zero */ lpc2[0] = lpc[0] + QCONST16(.8f,SIG_SHIFT); lpc2[1] = lpc[1] + MULT16_16_Q15(c1,lpc[0]); lpc2[2] = lpc[2] + MULT16_16_Q15(c1,lpc[1]); lpc2[3] = lpc[3] + MULT16_16_Q15(c1,lpc[2]); lpc2[4] = MULT16_16_Q15(c1,lpc[3]); celt_fir5(x_lp, lpc2, x_lp, len>>1, mem); } #if 0 /* This is a simple version of the pitch correlation that should work well on DSPs like Blackfin and TI C5x/C6x */ #ifdef FIXED_POINT opus_val32 #else void #endif celt_pitch_xcorr(opus_val16 *x, opus_val16 *y, opus_val32 *xcorr, int len, int max_pitch) { int i, j; #ifdef FIXED_POINT opus_val32 maxcorr=1; #endif for (i=0;i<max_pitch;i++) { opus_val32 sum = 0; for (j=0;j<len;j++) sum = MAC16_16(sum, x[j],y[i+j]); xcorr[i] = sum; #ifdef FIXED_POINT maxcorr = MAX32(maxcorr, sum); #endif } #ifdef FIXED_POINT return maxcorr; #endif } #else /* Unrolled version of the pitch correlation -- runs faster on x86 and ARM */ #ifdef FIXED_POINT opus_val32 #else void #endif celt_pitch_xcorr_c(const opus_val16 *_x, const opus_val16 *_y, opus_val32 *xcorr, int len, int max_pitch) { int i,j; /*The EDSP version requires that max_pitch is at least 1, and that _x is 32-bit aligned. Since it's hard to put asserts in assembly, put them here.*/ celt_assert(max_pitch>0); celt_assert((((unsigned char *)_x-(unsigned char *)NULL)&3)==0); #ifdef FIXED_POINT opus_val32 maxcorr=1; #endif for (i=0;i<max_pitch-3;i+=4) { opus_val32 sum[4]={0,0,0,0}; xcorr_kernel(_x, _y+i, sum, len); xcorr[i]=sum[0]; xcorr[i+1]=sum[1]; xcorr[i+2]=sum[2]; xcorr[i+3]=sum[3]; #ifdef FIXED_POINT sum[0] = MAX32(sum[0], sum[1]); sum[2] = MAX32(sum[2], sum[3]); sum[0] = MAX32(sum[0], sum[2]); maxcorr = MAX32(maxcorr, sum[0]); #endif } /* In case max_pitch isn't a multiple of 4, do non-unrolled version. */ for (;i<max_pitch;i++) { opus_val32 sum = 0; for (j=0;j<len;j++) sum = MAC16_16(sum, _x[j],_y[i+j]); xcorr[i] = sum; #ifdef FIXED_POINT maxcorr = MAX32(maxcorr, sum); #endif } #ifdef FIXED_POINT return maxcorr; #endif } #endif void pitch_search(const opus_val16 * OPUS_RESTRICT x_lp, opus_val16 * OPUS_RESTRICT y, int len, int max_pitch, int *pitch, int arch) { int i, j; int lag; int best_pitch[2]={0,0}; VARDECL(opus_val16, x_lp4); VARDECL(opus_val16, y_lp4); VARDECL(opus_val32, xcorr); #ifdef FIXED_POINT opus_val32 maxcorr; opus_val32 xmax, ymax; int shift=0; #endif int offset; SAVE_STACK; celt_assert(len>0); celt_assert(max_pitch>0); lag = len+max_pitch; ALLOC(x_lp4, len>>2, opus_val16); ALLOC(y_lp4, lag>>2, opus_val16); ALLOC(xcorr, max_pitch>>1, opus_val32); /* Downsample by 2 again */ for (j=0;j<len>>2;j++) x_lp4[j] = x_lp[2*j]; for (j=0;j<lag>>2;j++) y_lp4[j] = y[2*j]; #ifdef FIXED_POINT xmax = celt_maxabs16(x_lp4, len>>2); ymax = celt_maxabs16(y_lp4, lag>>2); shift = celt_ilog2(MAX32(1, MAX32(xmax, ymax)))-11; if (shift>0) { for (j=0;j<len>>2;j++) x_lp4[j] = SHR16(x_lp4[j], shift); for (j=0;j<lag>>2;j++) y_lp4[j] = SHR16(y_lp4[j], shift); /* Use double the shift for a MAC */ shift *= 2; } else { shift = 0; } #endif /* Coarse search with 4x decimation */ #ifdef FIXED_POINT maxcorr = #endif celt_pitch_xcorr(x_lp4, y_lp4, xcorr, len>>2, max_pitch>>2, arch); find_best_pitch(xcorr, y_lp4, len>>2, max_pitch>>2, best_pitch #ifdef FIXED_POINT , 0, maxcorr #endif ); /* Finer search with 2x decimation */ #ifdef FIXED_POINT maxcorr=1; #endif for (i=0;i<max_pitch>>1;i++) { opus_val32 sum=0; xcorr[i] = 0; if (abs(i-2*best_pitch[0])>2 && abs(i-2*best_pitch[1])>2) continue; for (j=0;j<len>>1;j++) sum += SHR32(MULT16_16(x_lp[j],y[i+j]), shift); xcorr[i] = MAX32(-1, sum); #ifdef FIXED_POINT maxcorr = MAX32(maxcorr, sum); #endif } find_best_pitch(xcorr, y, len>>1, max_pitch>>1, best_pitch #ifdef FIXED_POINT , shift+1, maxcorr #endif ); /* Refine by pseudo-interpolation */ if (best_pitch[0]>0 && best_pitch[0]<(max_pitch>>1)-1) { opus_val32 a, b, c; a = xcorr[best_pitch[0]-1]; b = xcorr[best_pitch[0]]; c = xcorr[best_pitch[0]+1]; if ((c-a) > MULT16_32_Q15(QCONST16(.7f,15),b-a)) offset = 1; else if ((a-c) > MULT16_32_Q15(QCONST16(.7f,15),b-c)) offset = -1; else offset = 0; } else { offset = 0; } *pitch = 2*best_pitch[0]-offset; RESTORE_STACK; } static const int second_check[16] = {0, 0, 3, 2, 3, 2, 5, 2, 3, 2, 3, 2, 5, 2, 3, 2}; opus_val16 remove_doubling(opus_val16 *x, int maxperiod, int minperiod, int N, int *T0_, int prev_period, opus_val16 prev_gain) { int k, i, T, T0; opus_val16 g, g0; opus_val16 pg; opus_val32 xy,xx,yy,xy2; opus_val32 xcorr[3]; opus_val32 best_xy, best_yy; int offset; int minperiod0; VARDECL(opus_val32, yy_lookup); SAVE_STACK; minperiod0 = minperiod; maxperiod /= 2; minperiod /= 2; *T0_ /= 2; prev_period /= 2; N /= 2; x += maxperiod; if (*T0_>=maxperiod) *T0_=maxperiod-1; T = T0 = *T0_; ALLOC(yy_lookup, maxperiod+1, opus_val32); dual_inner_prod(x, x, x-T0, N, &xx, &xy); yy_lookup[0] = xx; yy=xx; for (i=1;i<=maxperiod;i++) { yy = yy+MULT16_16(x[-i],x[-i])-MULT16_16(x[N-i],x[N-i]); yy_lookup[i] = MAX32(0, yy); } yy = yy_lookup[T0]; best_xy = xy; best_yy = yy; #ifdef FIXED_POINT { opus_val32 x2y2; int sh, t; x2y2 = 1+HALF32(MULT32_32_Q31(xx,yy)); sh = celt_ilog2(x2y2)>>1; t = VSHR32(x2y2, 2*(sh-7)); g = g0 = VSHR32(MULT16_32_Q15(celt_rsqrt_norm(t), xy),sh+1); } #else g = g0 = xy/celt_sqrt(1+xx*yy); #endif /* Look for any pitch at T/k */ for (k=2;k<=15;k++) { int T1, T1b; opus_val16 g1; opus_val16 cont=0; opus_val16 thresh; T1 = (2*T0+k)/(2*k); if (T1 < minperiod) break; /* Look for another strong correlation at T1b */ if (k==2) { if (T1+T0>maxperiod) T1b = T0; else T1b = T0+T1; } else { T1b = (2*second_check[k]*T0+k)/(2*k); } dual_inner_prod(x, &x[-T1], &x[-T1b], N, &xy, &xy2); xy += xy2; yy = yy_lookup[T1] + yy_lookup[T1b]; #ifdef FIXED_POINT { opus_val32 x2y2; int sh, t; x2y2 = 1+MULT32_32_Q31(xx,yy); sh = celt_ilog2(x2y2)>>1; t = VSHR32(x2y2, 2*(sh-7)); g1 = VSHR32(MULT16_32_Q15(celt_rsqrt_norm(t), xy),sh+1); } #else g1 = xy/celt_sqrt(1+2.f*xx*1.f*yy); #endif if (abs(T1-prev_period)<=1) cont = prev_gain; else if (abs(T1-prev_period)<=2 && 5*k*k < T0) cont = HALF32(prev_gain); else cont = 0; thresh = MAX16(QCONST16(.3f,15), MULT16_16_Q15(QCONST16(.7f,15),g0)-cont); /* Bias against very high pitch (very short period) to avoid false-positives due to short-term correlation */ if (T1<3*minperiod) thresh = MAX16(QCONST16(.4f,15), MULT16_16_Q15(QCONST16(.85f,15),g0)-cont); else if (T1<2*minperiod) thresh = MAX16(QCONST16(.5f,15), MULT16_16_Q15(QCONST16(.9f,15),g0)-cont); if (g1 > thresh) { best_xy = xy; best_yy = yy; T = T1; g = g1; } } best_xy = MAX32(0, best_xy); if (best_yy <= best_xy) pg = Q15ONE; else pg = SHR32(frac_div32(best_xy,best_yy+1),16); for (k=0;k<3;k++) { int T1 = T+k-1; xy = 0; for (i=0;i<N;i++) xy = MAC16_16(xy, x[i], x[i-T1]); xcorr[k] = xy; } if ((xcorr[2]-xcorr[0]) > MULT16_32_Q15(QCONST16(.7f,15),xcorr[1]-xcorr[0])) offset = 1; else if ((xcorr[0]-xcorr[2]) > MULT16_32_Q15(QCONST16(.7f,15),xcorr[1]-xcorr[2])) offset = -1; else offset = 0; if (pg > g) pg = g; *T0_ = 2*T+offset; if (*T0_<minperiod0) *T0_=minperiod0; RESTORE_STACK; return pg; }
{ "pile_set_name": "Github" }
typescriptGlobal: - Array typescriptArrayStaticMethod: - from - isArray - of typescriptArrayMethod: - concat - copyWithin - entries - every - fill - filter - find - findIndex - forEach - indexOf - includes - join - keys - lastIndexOf - map - pop - push - reduce - reduceRight - reverse - shift - slice - some - sort - splice - toLocaleString - toSource - toString - unshift
{ "pile_set_name": "Github" }
#pragma warning disable CS1591 using System; namespace MediaBrowser.Controller.Persistence { public class MediaAttachmentQuery { /// <summary> /// Gets or sets the index. /// </summary> /// <value>The index.</value> public int? Index { get; set; } /// <summary> /// Gets or sets the item identifier. /// </summary> /// <value>The item identifier.</value> public Guid ItemId { get; set; } } }
{ "pile_set_name": "Github" }
<component name="libraryTable"> <library name="Gradle: androidx.collection:collection:1.1.0@jar"> <CLASSES> <root url="jar://$USER_HOME$/.gradle/caches/modules-2/files-2.1/androidx.collection/collection/1.1.0/1f27220b47669781457de0d600849a5de0e89909/collection-1.1.0.jar!/" /> </CLASSES> <JAVADOC /> <SOURCES> <root url="jar://$USER_HOME$/.gradle/caches/modules-2/files-2.1/androidx.collection/collection/1.1.0/bae67b0019fbb38498198fcc2d0282a340b71c5b/collection-1.1.0-sources.jar!/" /> </SOURCES> </library> </component>
{ "pile_set_name": "Github" }