text
stringlengths
2
100k
meta
dict
bin_PROGRAMS = otool if ISDARWIN otool_LDADD = \ $(top_builddir)/libstuff/libstuff.la \ $(PTHREAD_FLAGS) $(CXXABI_LIB) $(DL_LIB) -lobjc else otool_LDADD = \ $(top_builddir)/libstuff/libstuff.la \ $(top_builddir)/libobjc2/libobjc.la \ $(PTHREAD_FLAGS) $(CXXABI_LIB) $(DL_LIB) $(REALLOCF_LIB) $(GCC_LIB) endif if ISDARWIN otool_CFLAGS = -I$(top_srcdir)/include -I$(top_srcdir)/include/foreign -I$(top_srcdir)/libstuff $(WARNINGS) $(LTO_DEF) -D_DARWIN_C_SOURCE -D__DARWIN_UNIX03 $(ENDIAN_FLAG) -DOTOOL -DEFI_SUPPORT else otool_CFLAGS = -I$(top_srcdir)/include -I$(top_srcdir)/include/foreign -I$(top_srcdir)/libstuff -I$(top_srcdir)/libobjc2 $(WARNINGS) $(LTO_DEF) -D_DARWIN_C_SOURCE -D__DARWIN_UNIX03 $(ENDIAN_FLAG) -DOTOOL -DEFI_SUPPORT endif otool_SOURCES = \ main.c \ $(top_srcdir)/libstuff/ofile.c \ ofile_print.c \ m68k_disasm.c \ i860_disasm.c \ m88k_disasm.c \ i386_disasm.c \ ppc_disasm.c \ hppa_disasm.c \ sparc_disasm.c \ arm_disasm.c \ print_objc.c \ print_objc2_32bit.c \ print_objc2_64bit.c \ print_bitcode.c \ coff_print.c \ arm64_disasm.c \ dyld_bind_info.c
{ "pile_set_name": "Github" }
/* * Copyright 2015-2020 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.ec2.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.Request; import com.amazonaws.services.ec2.model.transform.DeleteCustomerGatewayRequestMarshaller; /** * <p> * Contains the parameters for DeleteCustomerGateway. * </p> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DeleteCustomerGatewayRequest extends AmazonWebServiceRequest implements Serializable, Cloneable, DryRunSupportedRequest<DeleteCustomerGatewayRequest> { /** * <p> * The ID of the customer gateway. * </p> */ private String customerGatewayId; /** * Default constructor for DeleteCustomerGatewayRequest object. Callers should use the setter or fluent setter * (with...) methods to initialize the object after creating it. */ public DeleteCustomerGatewayRequest() { } /** * Constructs a new DeleteCustomerGatewayRequest object. Callers should use the setter or fluent setter (with...) * methods to initialize any additional object members. * * @param customerGatewayId * The ID of the customer gateway. */ public DeleteCustomerGatewayRequest(String customerGatewayId) { setCustomerGatewayId(customerGatewayId); } /** * <p> * The ID of the customer gateway. * </p> * * @param customerGatewayId * The ID of the customer gateway. */ public void setCustomerGatewayId(String customerGatewayId) { this.customerGatewayId = customerGatewayId; } /** * <p> * The ID of the customer gateway. * </p> * * @return The ID of the customer gateway. */ public String getCustomerGatewayId() { return this.customerGatewayId; } /** * <p> * The ID of the customer gateway. * </p> * * @param customerGatewayId * The ID of the customer gateway. * @return Returns a reference to this object so that method calls can be chained together. */ public DeleteCustomerGatewayRequest withCustomerGatewayId(String customerGatewayId) { setCustomerGatewayId(customerGatewayId); return this; } /** * This method is intended for internal use only. Returns the marshaled request configured with additional * parameters to enable operation dry-run. */ @Override public Request<DeleteCustomerGatewayRequest> getDryRunRequest() { Request<DeleteCustomerGatewayRequest> request = new DeleteCustomerGatewayRequestMarshaller().marshall(this); request.addParameter("DryRun", Boolean.toString(true)); return request; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getCustomerGatewayId() != null) sb.append("CustomerGatewayId: ").append(getCustomerGatewayId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DeleteCustomerGatewayRequest == false) return false; DeleteCustomerGatewayRequest other = (DeleteCustomerGatewayRequest) obj; if (other.getCustomerGatewayId() == null ^ this.getCustomerGatewayId() == null) return false; if (other.getCustomerGatewayId() != null && other.getCustomerGatewayId().equals(this.getCustomerGatewayId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getCustomerGatewayId() == null) ? 0 : getCustomerGatewayId().hashCode()); return hashCode; } @Override public DeleteCustomerGatewayRequest clone() { return (DeleteCustomerGatewayRequest) super.clone(); } }
{ "pile_set_name": "Github" }
/* * librdkafka - Apache Kafka C library * * Copyright (c) 2012,2013 Magnus Edenhill * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ // FIXME: Revise this documentation: /** * This file implements the consumer offset storage. * It currently supports local file storage and broker OffsetCommit storage, * not zookeeper. * * Regardless of commit method (file, broker, ..) this is how it works: * - When rdkafka, or the application, depending on if auto.offset.commit * is enabled or not, calls rd_kafka_offset_store() with an offset to store, * all it does is set rktp->rktp_stored_offset to this value. * This can happen from any thread and is locked by the rktp lock. * - The actual commit/write of the offset to its backing store (filesystem) * is performed by the main rdkafka thread and scheduled at the configured * auto.commit.interval.ms interval. * - The write is performed in the main rdkafka thread (in a blocking manner * for file based offsets) and once the write has * succeeded rktp->rktp_committed_offset is updated to the new value. * - If offset.store.sync.interval.ms is configured the main rdkafka thread * will also make sure to fsync() each offset file accordingly. (file) */ #include "rdkafka_int.h" #include "rdkafka_topic.h" #include "rdkafka_partition.h" #include "rdkafka_offset.h" #include "rdkafka_broker.h" #include <stdio.h> #include <sys/types.h> #include <fcntl.h> #ifdef _MSC_VER #include <io.h> #include <share.h> #include <sys/stat.h> #include <Shlwapi.h> typedef int mode_t; #endif /** * Convert an absolute or logical offset to string. */ const char *rd_kafka_offset2str (int64_t offset) { static RD_TLS char ret[16][32]; static RD_TLS int i = 0; i = (i + 1) % 16; if (offset >= 0) rd_snprintf(ret[i], sizeof(ret[i]), "%"PRId64, offset); else if (offset == RD_KAFKA_OFFSET_BEGINNING) return "BEGINNING"; else if (offset == RD_KAFKA_OFFSET_END) return "END"; else if (offset == RD_KAFKA_OFFSET_STORED) return "STORED"; else if (offset == RD_KAFKA_OFFSET_INVALID) return "INVALID"; else if (offset <= RD_KAFKA_OFFSET_TAIL_BASE) rd_snprintf(ret[i], sizeof(ret[i]), "TAIL(%lld)", llabs(offset - RD_KAFKA_OFFSET_TAIL_BASE)); else rd_snprintf(ret[i], sizeof(ret[i]), "%"PRId64"?", offset); return ret[i]; } static void rd_kafka_offset_file_close (rd_kafka_toppar_t *rktp) { if (!rktp->rktp_offset_fp) return; fclose(rktp->rktp_offset_fp); rktp->rktp_offset_fp = NULL; } #ifndef _MSC_VER /** * Linux version of open callback providing racefree CLOEXEC. */ int rd_kafka_open_cb_linux (const char *pathname, int flags, mode_t mode, void *opaque) { #ifdef O_CLOEXEC return open(pathname, flags|O_CLOEXEC, mode); #else return rd_kafka_open_cb_generic(pathname, flags, mode, opaque); #endif } #endif /** * Fallback version of open_cb NOT providing racefree CLOEXEC, * but setting CLOEXEC after file open (if FD_CLOEXEC is defined). */ int rd_kafka_open_cb_generic (const char *pathname, int flags, mode_t mode, void *opaque) { #ifndef _MSC_VER int fd; int on = 1; fd = open(pathname, flags, mode); if (fd == -1) return -1; #ifdef FD_CLOEXEC fcntl(fd, F_SETFD, FD_CLOEXEC, &on); #endif return fd; #else int fd; if (_sopen_s(&fd, pathname, flags, _SH_DENYNO, mode) != 0) return -1; return fd; #endif } static int rd_kafka_offset_file_open (rd_kafka_toppar_t *rktp) { rd_kafka_t *rk = rktp->rktp_rkt->rkt_rk; int fd; #ifndef _MSC_VER mode_t mode = 0644; #else mode_t mode = _S_IREAD|_S_IWRITE; #endif if ((fd = rk->rk_conf.open_cb(rktp->rktp_offset_path, O_CREAT|O_RDWR, mode, rk->rk_conf.opaque)) == -1) { rd_kafka_op_err(rktp->rktp_rkt->rkt_rk, RD_KAFKA_RESP_ERR__FS, "%s [%"PRId32"]: " "Failed to open offset file %s: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_offset_path, rd_strerror(errno)); return -1; } rktp->rktp_offset_fp = #ifndef _MSC_VER fdopen(fd, "r+"); #else _fdopen(fd, "r+"); #endif return 0; } static int64_t rd_kafka_offset_file_read (rd_kafka_toppar_t *rktp) { char buf[22]; char *end; int64_t offset; size_t r; if (fseek(rktp->rktp_offset_fp, 0, SEEK_SET) == -1) { rd_kafka_op_err(rktp->rktp_rkt->rkt_rk, RD_KAFKA_RESP_ERR__FS, "%s [%"PRId32"]: " "Seek (for read) failed on offset file %s: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_offset_path, rd_strerror(errno)); rd_kafka_offset_file_close(rktp); return RD_KAFKA_OFFSET_INVALID; } r = fread(buf, 1, sizeof(buf) - 1, rktp->rktp_offset_fp); if (r == 0) { rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: offset file (%s) is empty", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_offset_path); return RD_KAFKA_OFFSET_INVALID; } buf[r] = '\0'; offset = strtoull(buf, &end, 10); if (buf == end) { rd_kafka_op_err(rktp->rktp_rkt->rkt_rk, RD_KAFKA_RESP_ERR__FS, "%s [%"PRId32"]: " "Unable to parse offset in %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_offset_path); return RD_KAFKA_OFFSET_INVALID; } rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: Read offset %"PRId64" from offset " "file (%s)", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, offset, rktp->rktp_offset_path); return offset; } /** * Sync/flush offset file. */ static int rd_kafka_offset_file_sync (rd_kafka_toppar_t *rktp) { if (!rktp->rktp_offset_fp) return 0; rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "SYNC", "%s [%"PRId32"]: offset file sync", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition); #ifndef _MSC_VER (void)fflush(rktp->rktp_offset_fp); (void)fsync(fileno(rktp->rktp_offset_fp)); // FIXME #else // FIXME // FlushFileBuffers(_get_osfhandle(fileno(rktp->rktp_offset_fp))); #endif return 0; } /** * Write offset to offset file. * * Locality: toppar's broker thread */ static rd_kafka_resp_err_t rd_kafka_offset_file_commit (rd_kafka_toppar_t *rktp) { rd_kafka_itopic_t *rkt = rktp->rktp_rkt; int attempt; rd_kafka_resp_err_t err = RD_KAFKA_RESP_ERR_NO_ERROR; int64_t offset = rktp->rktp_stored_offset; for (attempt = 0 ; attempt < 2 ; attempt++) { char buf[22]; int len; if (!rktp->rktp_offset_fp) if (rd_kafka_offset_file_open(rktp) == -1) continue; if (fseek(rktp->rktp_offset_fp, 0, SEEK_SET) == -1) { rd_kafka_op_err(rktp->rktp_rkt->rkt_rk, RD_KAFKA_RESP_ERR__FS, "%s [%"PRId32"]: " "Seek failed on offset file %s: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_offset_path, rd_strerror(errno)); err = RD_KAFKA_RESP_ERR__FS; rd_kafka_offset_file_close(rktp); continue; } len = rd_snprintf(buf, sizeof(buf), "%"PRId64"\n", offset); if (fwrite(buf, 1, len, rktp->rktp_offset_fp) < 1) { rd_kafka_op_err(rktp->rktp_rkt->rkt_rk, RD_KAFKA_RESP_ERR__FS, "%s [%"PRId32"]: " "Failed to write offset %"PRId64" to " "offset file %s: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, offset, rktp->rktp_offset_path, rd_strerror(errno)); err = RD_KAFKA_RESP_ERR__FS; rd_kafka_offset_file_close(rktp); continue; } /* Need to flush before truncate to preserve write ordering */ (void)fflush(rktp->rktp_offset_fp); /* Truncate file */ #ifdef _MSC_VER if (_chsize_s(_fileno(rktp->rktp_offset_fp), len) == -1) ; /* Ignore truncate failures */ #else if (ftruncate(fileno(rktp->rktp_offset_fp), len) == -1) ; /* Ignore truncate failures */ #endif rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: wrote offset %"PRId64" to " "file %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, offset, rktp->rktp_offset_path); rktp->rktp_committed_offset = offset; /* If sync interval is set to immediate we sync right away. */ if (rkt->rkt_conf.offset_store_sync_interval_ms == 0) rd_kafka_offset_file_sync(rktp); return RD_KAFKA_RESP_ERR_NO_ERROR; } return err; } /** * Enqueue offset_commit_cb op, if configured. * */ void rd_kafka_offset_commit_cb_op (rd_kafka_t *rk, rd_kafka_resp_err_t err, const rd_kafka_topic_partition_list_t *offsets) { rd_kafka_op_t *rko; if (!(rk->rk_conf.enabled_events & RD_KAFKA_EVENT_OFFSET_COMMIT)) return; rko = rd_kafka_op_new(RD_KAFKA_OP_OFFSET_COMMIT|RD_KAFKA_OP_REPLY); rd_kafka_op_set_prio(rko, RD_KAFKA_PRIO_HIGH); rko->rko_err = err; rko->rko_u.offset_commit.cb = rk->rk_conf.offset_commit_cb;/*maybe NULL*/ rko->rko_u.offset_commit.opaque = rk->rk_conf.opaque; if (offsets) rko->rko_u.offset_commit.partitions = rd_kafka_topic_partition_list_copy(offsets); rd_kafka_q_enq(rk->rk_rep, rko); } /** * Commit a list of offsets asynchronously. Response will be queued on 'replyq'. * Optional \p cb will be set on requesting op. * * Makes a copy of \p offsets (may be NULL for current assignment) */ static rd_kafka_resp_err_t rd_kafka_commit0 (rd_kafka_t *rk, const rd_kafka_topic_partition_list_t *offsets, rd_kafka_toppar_t *rktp, rd_kafka_replyq_t replyq, void (*cb) (rd_kafka_t *rk, rd_kafka_resp_err_t err, rd_kafka_topic_partition_list_t *offsets, void *opaque), void *opaque, const char *reason) { rd_kafka_cgrp_t *rkcg; rd_kafka_op_t *rko; if (!(rkcg = rd_kafka_cgrp_get(rk))) return RD_KAFKA_RESP_ERR__UNKNOWN_GROUP; rko = rd_kafka_op_new(RD_KAFKA_OP_OFFSET_COMMIT); rko->rko_u.offset_commit.reason = rd_strdup(reason); rko->rko_replyq = replyq; rko->rko_u.offset_commit.cb = cb; rko->rko_u.offset_commit.opaque = opaque; if (rktp) rko->rko_rktp = rd_kafka_toppar_keep(rktp); if (offsets) rko->rko_u.offset_commit.partitions = rd_kafka_topic_partition_list_copy(offsets); rd_kafka_q_enq(rkcg->rkcg_ops, rko); return RD_KAFKA_RESP_ERR_NO_ERROR; } /** * NOTE: 'offsets' may be NULL, see official documentation. */ rd_kafka_resp_err_t rd_kafka_commit (rd_kafka_t *rk, const rd_kafka_topic_partition_list_t *offsets, int async) { rd_kafka_cgrp_t *rkcg; rd_kafka_resp_err_t err; rd_kafka_q_t *repq = NULL; rd_kafka_replyq_t rq = RD_KAFKA_NO_REPLYQ; if (!(rkcg = rd_kafka_cgrp_get(rk))) return RD_KAFKA_RESP_ERR__UNKNOWN_GROUP; if (!async) { repq = rd_kafka_q_new(rk); rq = RD_KAFKA_REPLYQ(repq, 0); } err = rd_kafka_commit0(rk, offsets, NULL, rq, NULL, NULL, "manual"); if (!err && !async) err = rd_kafka_q_wait_result(repq, RD_POLL_INFINITE); if (!async) rd_kafka_q_destroy_owner(repq); return err; } rd_kafka_resp_err_t rd_kafka_commit_message (rd_kafka_t *rk, const rd_kafka_message_t *rkmessage, int async) { rd_kafka_topic_partition_list_t *offsets; rd_kafka_topic_partition_t *rktpar; rd_kafka_resp_err_t err; if (rkmessage->err) return RD_KAFKA_RESP_ERR__INVALID_ARG; offsets = rd_kafka_topic_partition_list_new(1); rktpar = rd_kafka_topic_partition_list_add( offsets, rd_kafka_topic_name(rkmessage->rkt), rkmessage->partition); rktpar->offset = rkmessage->offset+1; err = rd_kafka_commit(rk, offsets, async); rd_kafka_topic_partition_list_destroy(offsets); return err; } rd_kafka_resp_err_t rd_kafka_commit_queue (rd_kafka_t *rk, const rd_kafka_topic_partition_list_t *offsets, rd_kafka_queue_t *rkqu, void (*cb) (rd_kafka_t *rk, rd_kafka_resp_err_t err, rd_kafka_topic_partition_list_t *offsets, void *opaque), void *opaque) { rd_kafka_q_t *rkq; rd_kafka_resp_err_t err; if (!rd_kafka_cgrp_get(rk)) return RD_KAFKA_RESP_ERR__UNKNOWN_GROUP; if (rkqu) rkq = rkqu->rkqu_q; else rkq = rd_kafka_q_new(rk); err = rd_kafka_commit0(rk, offsets, NULL, RD_KAFKA_REPLYQ(rkq, 0), cb, opaque, "manual"); if (!rkqu) { rd_kafka_op_t *rko = rd_kafka_q_pop_serve(rkq, RD_POLL_INFINITE, 0, RD_KAFKA_Q_CB_FORCE_RETURN, NULL, NULL); if (!rko) err = RD_KAFKA_RESP_ERR__TIMED_OUT; else { if (cb) cb(rk, rko->rko_err, rko->rko_u.offset_commit.partitions, opaque); err = rko->rko_err; rd_kafka_op_destroy(rko); } if (rkqu) rd_kafka_q_destroy(rkq); else rd_kafka_q_destroy_owner(rkq); } return err; } /** * Called when a broker commit is done. * * Locality: toppar handler thread * Locks: none */ static void rd_kafka_offset_broker_commit_cb (rd_kafka_t *rk, rd_kafka_resp_err_t err, rd_kafka_topic_partition_list_t *offsets, void *opaque) { shptr_rd_kafka_toppar_t *s_rktp; rd_kafka_toppar_t *rktp; rd_kafka_topic_partition_t *rktpar; if (offsets->cnt == 0) { rd_kafka_dbg(rk, TOPIC, "OFFSETCOMMIT", "No offsets to commit (commit_cb)"); return; } rktpar = &offsets->elems[0]; if (!(s_rktp = rd_kafka_topic_partition_list_get_toppar(rk, rktpar))) { rd_kafka_dbg(rk, TOPIC, "OFFSETCOMMIT", "No local partition found for %s [%"PRId32"] " "while parsing OffsetCommit response " "(offset %"PRId64", error \"%s\")", rktpar->topic, rktpar->partition, rktpar->offset, rd_kafka_err2str(rktpar->err)); return; } rktp = rd_kafka_toppar_s2i(s_rktp); if (!err) err = rktpar->err; rd_kafka_toppar_offset_commit_result(rktp, err, offsets); rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: offset %"PRId64" committed: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktpar->offset, rd_kafka_err2str(err)); rktp->rktp_committing_offset = 0; rd_kafka_toppar_lock(rktp); if (rktp->rktp_flags & RD_KAFKA_TOPPAR_F_OFFSET_STORE_STOPPING) rd_kafka_offset_store_term(rktp, err); rd_kafka_toppar_unlock(rktp); rd_kafka_toppar_destroy(s_rktp); } static rd_kafka_resp_err_t rd_kafka_offset_broker_commit (rd_kafka_toppar_t *rktp, const char *reason) { rd_kafka_topic_partition_list_t *offsets; rd_kafka_topic_partition_t *rktpar; rd_kafka_assert(rktp->rktp_rkt->rkt_rk, rktp->rktp_cgrp != NULL); rd_kafka_assert(rktp->rktp_rkt->rkt_rk, rktp->rktp_flags & RD_KAFKA_TOPPAR_F_OFFSET_STORE); rktp->rktp_committing_offset = rktp->rktp_stored_offset; offsets = rd_kafka_topic_partition_list_new(1); rktpar = rd_kafka_topic_partition_list_add( offsets, rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition); rktpar->offset = rktp->rktp_committing_offset; rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSETCMT", "%.*s [%"PRId32"]: committing offset %"PRId64": %s", RD_KAFKAP_STR_PR(rktp->rktp_rkt->rkt_topic), rktp->rktp_partition, rktp->rktp_committing_offset, reason); rd_kafka_commit0(rktp->rktp_rkt->rkt_rk, offsets, rktp, RD_KAFKA_REPLYQ(rktp->rktp_ops, 0), rd_kafka_offset_broker_commit_cb, NULL, reason); rd_kafka_topic_partition_list_destroy(offsets); return RD_KAFKA_RESP_ERR__IN_PROGRESS; } /** * Commit offset to backing store. * This might be an async operation. * * Locality: toppar handler thread */ static rd_kafka_resp_err_t rd_kafka_offset_commit (rd_kafka_toppar_t *rktp, const char *reason) { if (1) // FIXME rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: commit: " "stored offset %"PRId64" > committed offset %"PRId64"?", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_stored_offset, rktp->rktp_committed_offset); /* Already committed */ if (rktp->rktp_stored_offset <= rktp->rktp_committed_offset) return RD_KAFKA_RESP_ERR_NO_ERROR; /* Already committing (for async ops) */ if (rktp->rktp_stored_offset <= rktp->rktp_committing_offset) return RD_KAFKA_RESP_ERR__PREV_IN_PROGRESS; switch (rktp->rktp_rkt->rkt_conf.offset_store_method) { case RD_KAFKA_OFFSET_METHOD_FILE: return rd_kafka_offset_file_commit(rktp); case RD_KAFKA_OFFSET_METHOD_BROKER: return rd_kafka_offset_broker_commit(rktp, reason); default: /* UNREACHABLE */ return RD_KAFKA_RESP_ERR__INVALID_ARG; } } /** * Sync offset backing store. This is only used for METHOD_FILE. * * Locality: rktp's broker thread. */ rd_kafka_resp_err_t rd_kafka_offset_sync (rd_kafka_toppar_t *rktp) { switch (rktp->rktp_rkt->rkt_conf.offset_store_method) { case RD_KAFKA_OFFSET_METHOD_FILE: return rd_kafka_offset_file_sync(rktp); default: return RD_KAFKA_RESP_ERR__INVALID_ARG; } } /** * Store offset. * Typically called from application code. * * NOTE: No locks must be held. */ rd_kafka_resp_err_t rd_kafka_offset_store (rd_kafka_topic_t *app_rkt, int32_t partition, int64_t offset) { rd_kafka_itopic_t *rkt = rd_kafka_topic_a2i(app_rkt); shptr_rd_kafka_toppar_t *s_rktp; /* Find toppar */ rd_kafka_topic_rdlock(rkt); if (!(s_rktp = rd_kafka_toppar_get(rkt, partition, 0/*!ua_on_miss*/))) { rd_kafka_topic_rdunlock(rkt); return RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION; } rd_kafka_topic_rdunlock(rkt); rd_kafka_offset_store0(rd_kafka_toppar_s2i(s_rktp), offset+1, 1/*lock*/); rd_kafka_toppar_destroy(s_rktp); return RD_KAFKA_RESP_ERR_NO_ERROR; } rd_kafka_resp_err_t rd_kafka_offsets_store (rd_kafka_t *rk, rd_kafka_topic_partition_list_t *offsets) { int i; int ok_cnt = 0; if (rk->rk_conf.enable_auto_offset_store) return RD_KAFKA_RESP_ERR__INVALID_ARG; for (i = 0 ; i < offsets->cnt ; i++) { rd_kafka_topic_partition_t *rktpar = &offsets->elems[i]; shptr_rd_kafka_toppar_t *s_rktp; s_rktp = rd_kafka_topic_partition_get_toppar(rk, rktpar); if (!s_rktp) { rktpar->err = RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION; continue; } rd_kafka_offset_store0(rd_kafka_toppar_s2i(s_rktp), rktpar->offset, 1/*lock*/); rd_kafka_toppar_destroy(s_rktp); rktpar->err = RD_KAFKA_RESP_ERR_NO_ERROR; ok_cnt++; } return offsets->cnt > 0 && ok_cnt == 0 ? RD_KAFKA_RESP_ERR__UNKNOWN_PARTITION : RD_KAFKA_RESP_ERR_NO_ERROR; } /** * Decommissions the use of an offset file for a toppar. * The file content will not be touched and the file will not be removed. */ static rd_kafka_resp_err_t rd_kafka_offset_file_term (rd_kafka_toppar_t *rktp) { rd_kafka_resp_err_t err = RD_KAFKA_RESP_ERR_NO_ERROR; /* Sync offset file if the sync is intervalled (> 0) */ if (rktp->rktp_rkt->rkt_conf.offset_store_sync_interval_ms > 0) { rd_kafka_offset_file_sync(rktp); rd_kafka_timer_stop(&rktp->rktp_rkt->rkt_rk->rk_timers, &rktp->rktp_offset_sync_tmr, 1/*lock*/); } rd_kafka_offset_file_close(rktp); rd_free(rktp->rktp_offset_path); rktp->rktp_offset_path = NULL; return err; } static rd_kafka_op_res_t rd_kafka_offset_reset_op_cb (rd_kafka_t *rk, rd_kafka_q_t *rkq, rd_kafka_op_t *rko) { rd_kafka_toppar_t *rktp = rd_kafka_toppar_s2i(rko->rko_rktp); rd_kafka_toppar_lock(rktp); rd_kafka_offset_reset(rktp, rko->rko_u.offset_reset.offset, rko->rko_err, rko->rko_u.offset_reset.reason); rd_kafka_toppar_unlock(rktp); return RD_KAFKA_OP_RES_HANDLED; } /** * Take action when the offset for a toppar becomes unusable. * * Locality: toppar handler thread * Locks: toppar_lock() MUST be held */ void rd_kafka_offset_reset (rd_kafka_toppar_t *rktp, int64_t err_offset, rd_kafka_resp_err_t err, const char *reason) { int64_t offset = RD_KAFKA_OFFSET_INVALID; rd_kafka_op_t *rko; /* Enqueue op for toppar handler thread if we're on the wrong thread. */ if (!thrd_is_current(rktp->rktp_rkt->rkt_rk->rk_thread)) { rd_kafka_op_t *rko = rd_kafka_op_new(RD_KAFKA_OP_OFFSET_RESET | RD_KAFKA_OP_CB); rko->rko_op_cb = rd_kafka_offset_reset_op_cb; rko->rko_err = err; rko->rko_rktp = rd_kafka_toppar_keep(rktp); rko->rko_u.offset_reset.offset = err_offset; rko->rko_u.offset_reset.reason = rd_strdup(reason); rd_kafka_q_enq(rktp->rktp_ops, rko); return; } if (err_offset == RD_KAFKA_OFFSET_INVALID || err) offset = rktp->rktp_rkt->rkt_conf.auto_offset_reset; else offset = err_offset; if (offset == RD_KAFKA_OFFSET_INVALID) { /* Error, auto.offset.reset tells us to error out. */ rko = rd_kafka_op_new(RD_KAFKA_OP_CONSUMER_ERR); rko->rko_err = err; rko->rko_u.err.offset = err_offset; rko->rko_u.err.errstr = rd_strdup(reason); rko->rko_rktp = rd_kafka_toppar_keep(rktp); rd_kafka_q_enq(rktp->rktp_fetchq, rko); rd_kafka_toppar_set_fetch_state( rktp, RD_KAFKA_TOPPAR_FETCH_NONE); } else { /* Query logical offset */ rktp->rktp_query_offset = offset; rd_kafka_toppar_set_fetch_state( rktp, RD_KAFKA_TOPPAR_FETCH_OFFSET_QUERY); } rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: offset reset (at offset %s) " "to %s: %s: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rd_kafka_offset2str(err_offset), rd_kafka_offset2str(offset), reason, rd_kafka_err2str(err)); if (rktp->rktp_fetch_state == RD_KAFKA_TOPPAR_FETCH_OFFSET_QUERY) rd_kafka_toppar_offset_request(rktp, rktp->rktp_query_offset, 0); } /** * Escape any special characters in filename 'in' and write escaped * string to 'out' (of max size out_size). */ static char *mk_esc_filename (const char *in, char *out, size_t out_size) { const char *s = in; char *o = out; while (*s) { const char *esc; size_t esclen; switch (*s) { case '/': /* linux */ esc = "%2F"; esclen = strlen(esc); break; case ':': /* osx, windows */ esc = "%3A"; esclen = strlen(esc); break; case '\\': /* windows */ esc = "%5C"; esclen = strlen(esc); break; default: esc = s; esclen = 1; break; } if ((size_t)((o + esclen + 1) - out) >= out_size) { /* No more space in output string, truncate. */ break; } while (esclen-- > 0) *(o++) = *(esc++); s++; } *o = '\0'; return out; } static void rd_kafka_offset_sync_tmr_cb (rd_kafka_timers_t *rkts, void *arg) { rd_kafka_toppar_t *rktp = arg; rd_kafka_offset_sync(rktp); } /** * Prepare a toppar for using an offset file. * * Locality: rdkafka main thread * Locks: toppar_lock(rktp) must be held */ static void rd_kafka_offset_file_init (rd_kafka_toppar_t *rktp) { char spath[4096]; const char *path = rktp->rktp_rkt->rkt_conf.offset_store_path; int64_t offset = RD_KAFKA_OFFSET_INVALID; if (rd_kafka_path_is_dir(path)) { char tmpfile[1024]; char escfile[4096]; /* Include group.id in filename if configured. */ if (!RD_KAFKAP_STR_IS_NULL(rktp->rktp_rkt->rkt_rk->rk_group_id)) rd_snprintf(tmpfile, sizeof(tmpfile), "%s-%"PRId32"-%.*s.offset", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, RD_KAFKAP_STR_PR(rktp->rktp_rkt->rkt_rk-> rk_group_id)); else rd_snprintf(tmpfile, sizeof(tmpfile), "%s-%"PRId32".offset", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition); /* Escape filename to make it safe. */ mk_esc_filename(tmpfile, escfile, sizeof(escfile)); rd_snprintf(spath, sizeof(spath), "%s%s%s", path, path[strlen(path)-1] == '/' ? "" : "/", escfile); path = spath; } rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: using offset file %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, path); rktp->rktp_offset_path = rd_strdup(path); /* Set up the offset file sync interval. */ if (rktp->rktp_rkt->rkt_conf.offset_store_sync_interval_ms > 0) rd_kafka_timer_start(&rktp->rktp_rkt->rkt_rk->rk_timers, &rktp->rktp_offset_sync_tmr, rktp->rktp_rkt->rkt_conf. offset_store_sync_interval_ms * 1000ll, rd_kafka_offset_sync_tmr_cb, rktp); if (rd_kafka_offset_file_open(rktp) != -1) { /* Read offset from offset file. */ offset = rd_kafka_offset_file_read(rktp); } if (offset != RD_KAFKA_OFFSET_INVALID) { /* Start fetching from offset */ rktp->rktp_stored_offset = offset; rktp->rktp_committed_offset = offset; rd_kafka_toppar_next_offset_handle(rktp, offset); } else { /* Offset was not usable: perform offset reset logic */ rktp->rktp_committed_offset = RD_KAFKA_OFFSET_INVALID; rd_kafka_offset_reset(rktp, RD_KAFKA_OFFSET_INVALID, RD_KAFKA_RESP_ERR__FS, "non-readable offset file"); } } /** * Terminate broker offset store */ static rd_kafka_resp_err_t rd_kafka_offset_broker_term (rd_kafka_toppar_t *rktp){ return RD_KAFKA_RESP_ERR_NO_ERROR; } /** * Prepare a toppar for using broker offset commit (broker 0.8.2 or later). * When using KafkaConsumer (high-level consumer) this functionality is * disabled in favour of the cgrp commits for the entire set of subscriptions. */ static void rd_kafka_offset_broker_init (rd_kafka_toppar_t *rktp) { if (!rd_kafka_is_simple_consumer(rktp->rktp_rkt->rkt_rk)) return; rd_kafka_offset_reset(rktp, RD_KAFKA_OFFSET_STORED, 0, "query broker for offsets"); } /** * Terminates toppar's offset store, this is the finalizing step after * offset_store_stop(). * * Locks: rd_kafka_toppar_lock() MUST be held. */ void rd_kafka_offset_store_term (rd_kafka_toppar_t *rktp, rd_kafka_resp_err_t err) { rd_kafka_resp_err_t err2; rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "STORETERM", "%s [%"PRId32"]: offset store terminating", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition); rktp->rktp_flags &= ~RD_KAFKA_TOPPAR_F_OFFSET_STORE_STOPPING; rd_kafka_timer_stop(&rktp->rktp_rkt->rkt_rk->rk_timers, &rktp->rktp_offset_commit_tmr, 1/*lock*/); switch (rktp->rktp_rkt->rkt_conf.offset_store_method) { case RD_KAFKA_OFFSET_METHOD_FILE: err2 = rd_kafka_offset_file_term(rktp); break; case RD_KAFKA_OFFSET_METHOD_BROKER: err2 = rd_kafka_offset_broker_term(rktp); break; case RD_KAFKA_OFFSET_METHOD_NONE: err2 = RD_KAFKA_RESP_ERR_NO_ERROR; break; } /* Prioritize the input error (probably from commit), fall * back on termination error. */ if (!err) err = err2; rd_kafka_toppar_fetch_stopped(rktp, err); } /** * Stop toppar's offset store, committing the final offsets, etc. * * Returns RD_KAFKA_RESP_ERR_NO_ERROR on success, * RD_KAFKA_RESP_ERR__IN_PROGRESS if the term triggered an * async operation (e.g., broker offset commit), or * any other error in case of immediate failure. * * The offset layer will call rd_kafka_offset_store_term() when * the offset management has been fully stopped for this partition. * * Locks: rd_kafka_toppar_lock() MUST be held. */ rd_kafka_resp_err_t rd_kafka_offset_store_stop (rd_kafka_toppar_t *rktp) { rd_kafka_resp_err_t err = RD_KAFKA_RESP_ERR_NO_ERROR; if (!(rktp->rktp_flags & RD_KAFKA_TOPPAR_F_OFFSET_STORE)) goto done; rktp->rktp_flags |= RD_KAFKA_TOPPAR_F_OFFSET_STORE_STOPPING; rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: stopping offset store " "(stored offset %"PRId64 ", committed offset %"PRId64", EOF offset %"PRId64")", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rktp->rktp_stored_offset, rktp->rktp_committed_offset, rktp->rktp_offsets_fin.eof_offset); /* Store end offset for empty partitions */ if (rktp->rktp_rkt->rkt_rk->rk_conf.enable_auto_offset_store && rktp->rktp_stored_offset == RD_KAFKA_OFFSET_INVALID && rktp->rktp_offsets_fin.eof_offset > 0) rd_kafka_offset_store0(rktp, rktp->rktp_offsets_fin.eof_offset, 0/*no lock*/); /* Commit offset to backing store. * This might be an async operation. */ if (rd_kafka_is_simple_consumer(rktp->rktp_rkt->rkt_rk) && rktp->rktp_stored_offset > rktp->rktp_committed_offset) err = rd_kafka_offset_commit(rktp, "offset store stop"); /* If stop is in progress (async commit), return now. */ if (err == RD_KAFKA_RESP_ERR__IN_PROGRESS) return err; done: /* Stop is done */ rd_kafka_offset_store_term(rktp, err); return RD_KAFKA_RESP_ERR_NO_ERROR; } static void rd_kafka_offset_auto_commit_tmr_cb (rd_kafka_timers_t *rkts, void *arg) { rd_kafka_toppar_t *rktp = arg; rd_kafka_offset_commit(rktp, "auto commit timer"); } void rd_kafka_offset_query_tmr_cb (rd_kafka_timers_t *rkts, void *arg) { rd_kafka_toppar_t *rktp = arg; rd_kafka_toppar_lock(rktp); rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "Topic %s [%"PRId32"]: timed offset query for %s in " "state %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, rd_kafka_offset2str(rktp->rktp_query_offset), rd_kafka_fetch_states[rktp->rktp_fetch_state]); rd_kafka_toppar_offset_request(rktp, rktp->rktp_query_offset, 0); rd_kafka_toppar_unlock(rktp); } /** * Initialize toppar's offset store. * * Locality: toppar handler thread */ void rd_kafka_offset_store_init (rd_kafka_toppar_t *rktp) { static const char *store_names[] = { "none", "file", "broker" }; rd_kafka_dbg(rktp->rktp_rkt->rkt_rk, TOPIC, "OFFSET", "%s [%"PRId32"]: using offset store method: %s", rktp->rktp_rkt->rkt_topic->str, rktp->rktp_partition, store_names[rktp->rktp_rkt->rkt_conf.offset_store_method]); /* The committed offset is unknown at this point. */ rktp->rktp_committed_offset = RD_KAFKA_OFFSET_INVALID; /* Set up the commit interval (for simple consumer). */ if (rd_kafka_is_simple_consumer(rktp->rktp_rkt->rkt_rk) && rktp->rktp_rkt->rkt_conf.auto_commit_interval_ms > 0) rd_kafka_timer_start(&rktp->rktp_rkt->rkt_rk->rk_timers, &rktp->rktp_offset_commit_tmr, rktp->rktp_rkt->rkt_conf. auto_commit_interval_ms * 1000ll, rd_kafka_offset_auto_commit_tmr_cb, rktp); switch (rktp->rktp_rkt->rkt_conf.offset_store_method) { case RD_KAFKA_OFFSET_METHOD_FILE: rd_kafka_offset_file_init(rktp); break; case RD_KAFKA_OFFSET_METHOD_BROKER: rd_kafka_offset_broker_init(rktp); break; case RD_KAFKA_OFFSET_METHOD_NONE: break; default: /* NOTREACHED */ return; } rktp->rktp_flags |= RD_KAFKA_TOPPAR_F_OFFSET_STORE; }
{ "pile_set_name": "Github" }
using System; using System.Collections.Generic; using System.IO; using System.Net; using System.Text; using SimpleJSON; using XUnity.AutoTranslator.Plugin.Core; using XUnity.AutoTranslator.Plugin.Core.Configuration; using XUnity.AutoTranslator.Plugin.Core.Constants; using XUnity.AutoTranslator.Plugin.Core.Endpoints; using XUnity.AutoTranslator.Plugin.Core.Endpoints.Http; using XUnity.AutoTranslator.Plugin.Core.Extensions; using XUnity.AutoTranslator.Plugin.Core.Utilities; using XUnity.AutoTranslator.Plugin.Core.Web; namespace YandexTranslate { internal class YandexTranslateEndpoint : HttpEndpoint { private static readonly HashSet<string> SupportedLanguages = new HashSet<string> { "az", "sq", "am", "en", "ar", "hy", "af", "eu", "ba", "be", "bn", "my", "bg", "bs", "cy", "hu", "vi", "ht", "gl", "nl", "mrj", "el", "ka", "gu", "da", "he", "yi", "id", "ga", "it", "is", "es", "kk", "kn", "ca", "ky", "zh", "ko", "xh", "km", "lo", "la", "lv", "lt", "lb", "mg", "ms", "ml", "mt", "mk", "mi", "mr", "mhr", "mn", "de", "ne", "no", "pa", "pap", "fa", "pl", "pt", "ro", "ru", "ceb", "sr", "si", "sk", "sl", "sw", "su", "tg", "th", "tl", "ta", "tt", "te", "tr", "udm", "uz", "uk", "ur", "fi", "fr", "hi", "hr", "cs", "sv", "gd", "et", "eo", "jv", "ja" }; private static readonly string HttpsServicePointTemplateUrl = "https://translate.yandex.net/api/v1.5/tr.json/translate?key={3}&text={2}&lang={0}-{1}&format=plain"; private string _key; public override string Id => "YandexTranslate"; public override string FriendlyName => "Yandex Translate"; private string FixLanguage( string lang ) { switch( lang ) { case "zh-CN": case "zh-Hans": return "zh"; default: return lang; } } public override void Initialize( IInitializationContext context ) { _key = context.GetOrCreateSetting( "Yandex", "YandexAPIKey", "" ); context.DisableCertificateChecksFor( "translate.yandex.net" ); // if the plugin cannot be enabled, simply throw so the user cannot select the plugin if( string.IsNullOrEmpty( _key ) ) throw new EndpointInitializationException( "The YandexTranslate endpoint requires an API key which has not been provided." ); if( !SupportedLanguages.Contains( FixLanguage( context.SourceLanguage ) ) ) throw new EndpointInitializationException( $"The source language '{context.SourceLanguage}' is not supported." ); if( !SupportedLanguages.Contains( FixLanguage( context.DestinationLanguage ) ) ) throw new EndpointInitializationException( $"The destination language '{context.DestinationLanguage}' is not supported." ); } public override void OnCreateRequest( IHttpRequestCreationContext context ) { var request = new XUnityWebRequest( string.Format( HttpsServicePointTemplateUrl, FixLanguage( context.SourceLanguage ), FixLanguage( context.DestinationLanguage ), WwwHelper.EscapeUrl( context.UntranslatedText ), _key ) ); request.Headers[ HttpRequestHeader.Accept ] = "*/*"; request.Headers[ HttpRequestHeader.AcceptCharset ] = "UTF-8"; context.Complete( request ); } public override void OnExtractTranslation( IHttpTranslationExtractionContext context ) { var data = context.Response.Data; var obj = JSON.Parse( data ); var code = obj.AsObject[ "code" ].ToString(); if( code != "200" ) context.Fail( "Received bad response code: " + code ); var token = obj.AsObject[ "text" ].ToString(); var translation = JsonHelper.Unescape( token.Substring( 2, token.Length - 4 ) ); if( string.IsNullOrEmpty( translation ) ) context.Fail( "Received no translation." ); context.Complete( translation ); } } }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2009-2020 Lightbend Inc. <https://www.lightbend.com> */ package akka.cluster.typed.internal.receptionist import scala.concurrent.duration._ import akka.actor.Address import akka.actor.typed.{ ActorRef, Behavior } import akka.actor.typed.internal.receptionist.{ AbstractServiceKey, ReceptionistBehaviorProvider, ReceptionistMessages } import akka.actor.typed.receptionist.Receptionist.Command import akka.actor.typed.receptionist.ServiceKey import akka.actor.typed.scaladsl.{ ActorContext, Behaviors, LoggerOps } import akka.actor.typed.scaladsl.adapter._ import akka.annotation.InternalApi import akka.cluster.{ Cluster, ClusterEvent, UniqueAddress } import akka.cluster.ClusterEvent.ClusterDomainEvent import akka.cluster.ClusterEvent.ClusterShuttingDown import akka.cluster.ClusterEvent.MemberJoined import akka.cluster.ClusterEvent.MemberRemoved import akka.cluster.ClusterEvent.MemberUp import akka.cluster.ClusterEvent.MemberWeaklyUp import akka.cluster.ClusterEvent.ReachabilityEvent import akka.cluster.ClusterEvent.ReachableMember import akka.cluster.ClusterEvent.UnreachableMember import akka.cluster.ddata.{ ORMultiMap, ORMultiMapKey, Replicator } import akka.cluster.ddata.SelfUniqueAddress import akka.remote.AddressUidExtension import akka.util.TypedMultiMap // just to provide a log class /** INTERNAL API */ @InternalApi private[typed] final class ClusterReceptionist /** INTERNAL API */ @InternalApi private[typed] object ClusterReceptionist extends ReceptionistBehaviorProvider { type SubscriptionsKV[K <: AbstractServiceKey] = ActorRef[ReceptionistMessages.Listing[K#Protocol]] type SubscriptionRegistry = TypedMultiMap[AbstractServiceKey, SubscriptionsKV] type DDataKey = ORMultiMapKey[ServiceKey[_], Entry] final val EmptyORMultiMap = ORMultiMap.empty[ServiceKey[_], Entry] override val name = "clusterReceptionist" // values contain system uid to make it possible to discern actors at the same // path in different incarnations of a cluster node final case class Entry(ref: ActorRef[_], systemUid: Long)(val createdTimestamp: Long) { def uniqueAddress(selfAddress: Address): UniqueAddress = if (ref.path.address.hasLocalScope) UniqueAddress(selfAddress, systemUid) else UniqueAddress(ref.path.address, systemUid) override def toString: String = s"${ref.path.toString}#${ref.path.uid} @ $systemUid" } private sealed trait InternalCommand extends Command private final case class LocalServiceActorTerminated[T](ref: ActorRef[T]) extends InternalCommand private final case class SubscriberTerminated[T](ref: ActorRef[ReceptionistMessages.Listing[T]]) extends InternalCommand private final case class NodeAdded(addresses: UniqueAddress) extends InternalCommand private final case class NodeRemoved(addresses: UniqueAddress) extends InternalCommand private final case class NodeUnreachable(addresses: UniqueAddress) extends InternalCommand private final case class NodeReachable(addresses: UniqueAddress) extends InternalCommand private final case class ChangeFromReplicator(key: DDataKey, value: ORMultiMap[ServiceKey[_], Entry]) extends InternalCommand private case object RemoveTick extends InternalCommand private case object PruneTombstonesTick extends InternalCommand /** * @param registry The last seen state from the replicator - only updated when we get an update from th replicator * @param servicesPerActor needed since an actor can implement several services * @param tombstones Local actors that were stopped and should not be re-added to the available set of actors * for a key. * @param subscriptions Locally subscriptions, not replicated */ final case class State( registry: ShardedServiceRegistry, servicesPerActor: Map[ActorRef[_], Set[AbstractServiceKey]], tombstones: Map[ActorRef[_], Set[(AbstractServiceKey, Deadline)]], subscriptions: SubscriptionRegistry) { /** tombstone all services actor is registered for */ def addTombstone(actor: ActorRef[_], deadline: Deadline): State = { servicesPerActor.getOrElse(actor, Set.empty).foldLeft(this) { (state, key) => state.addTombstone(actor.asInstanceOf[ActorRef[key.Protocol]], key.asServiceKey, deadline) } } /** tombstone specific service actor is registered for */ def addTombstone[T](actor: ActorRef[T], serviceKey: ServiceKey[T], deadline: Deadline): State = { val newTombsonesForActor = tombstones.getOrElse(actor, Set.empty) + (serviceKey -> deadline) copy(tombstones = tombstones.updated(actor, newTombsonesForActor)) } def hasTombstone[T](serviceKey: ServiceKey[T])(actorRef: ActorRef[T]): Boolean = tombstones.nonEmpty && tombstones.getOrElse(actorRef, Set.empty).exists { case (key, _) => key == serviceKey } def pruneTombstones(): State = { if (tombstones.isEmpty) this else { val newTombstones: Map[ActorRef[_], Set[(AbstractServiceKey, Deadline)]] = tombstones.foldLeft(tombstones) { case (acc, (actorRef, entries)) => val entriesToKeep = entries.filter { case (_, deadline) => deadline.hasTimeLeft() } if (entriesToKeep.size == entries.size) acc else if (entriesToKeep.isEmpty) acc - actorRef else acc.updated(actorRef, entriesToKeep) } if (newTombstones eq tombstones) this else copy(tombstones = newTombstones) } } /** * @return (reachable-nodes, all) */ def activeActorRefsFor[T]( key: ServiceKey[T], selfUniqueAddress: UniqueAddress): (Set[ActorRef[T]], Set[ActorRef[T]]) = { val ddataKey = registry.ddataKeyFor(key) val entries = registry.serviceRegistries(ddataKey).entriesFor(key) val selfAddress = selfUniqueAddress.address val reachable = Set.newBuilder[ActorRef[T]] val all = Set.newBuilder[ActorRef[T]] entries.foreach { entry => val entryAddress = entry.uniqueAddress(selfAddress) val ref = entry.ref.asInstanceOf[ActorRef[key.Protocol]] if (registry.nodes.contains(entryAddress) && !hasTombstone(key)(ref)) { all += ref if (!registry.unreachable.contains(entryAddress)) { reachable += ref } } } (reachable.result(), all.result()) } def addLocalService[T](serviceInstance: ActorRef[T], key: ServiceKey[T]): State = { val newServicesPerActor = servicesPerActor.updated(serviceInstance, servicesPerActor.getOrElse(serviceInstance, Set.empty) + key) // if the service was previously registered and unregistered we need to remove it from the tombstones val tombstonesForActor = tombstones.getOrElse(serviceInstance, Set.empty) val newTombstones = if (tombstonesForActor.isEmpty) tombstones else tombstones.updated(serviceInstance, tombstonesForActor.filterNot(_._1 == key)) copy(servicesPerActor = newServicesPerActor, tombstones = newTombstones) } def removeLocalService[T](serviceInstance: ActorRef[T], key: ServiceKey[T], tombstoneDeadline: Deadline): State = { val newServicesForActor = servicesPerActor.get(serviceInstance) match { case Some(keys) => val newKeys = keys - key if (newKeys.isEmpty) servicesPerActor - serviceInstance else servicesPerActor.updated(serviceInstance, newKeys) case None => throw new IllegalArgumentException( s"Trying to remove $serviceInstance for $key but that has never been registered") } addTombstone(serviceInstance, key, tombstoneDeadline).copy(servicesPerActor = newServicesForActor) } def removeSubscriber(subscriber: ActorRef[ReceptionistMessages.Listing[Any]]): ClusterReceptionist.State = copy(subscriptions = subscriptions.valueRemoved(subscriber)) } // captures setup/dependencies so we can avoid doing it over and over again final class Setup(ctx: ActorContext[Command]) { val classicSystem = ctx.system.toClassic val settings = ClusterReceptionistSettings(ctx.system) val selfSystemUid = AddressUidExtension(classicSystem).longAddressUid lazy val keepTombstonesFor = cluster.settings.PruneGossipTombstonesAfter match { case f: FiniteDuration => f case _ => throw new IllegalStateException("Cannot actually happen") } val cluster = Cluster(classicSystem) // don't use DistributedData.selfUniqueAddress here, because that will initialize extension, which // isn't used otherwise by the ClusterReceptionist implicit val selfNodeAddress: SelfUniqueAddress = SelfUniqueAddress(cluster.selfUniqueAddress) val replicator = ctx.actorOf(Replicator.props(settings.replicatorSettings), "replicator") def newTombstoneDeadline() = Deadline(keepTombstonesFor) def selfUniqueAddress: UniqueAddress = cluster.selfUniqueAddress } override def behavior: Behavior[Command] = Behaviors.setup { ctx => ctx.setLoggerName(classOf[ClusterReceptionist]) Behaviors.withTimers { timers => val setup = new Setup(ctx) // include selfUniqueAddress so that it can be used locally before joining cluster val initialRegistry = ShardedServiceRegistry(setup.settings.distributedKeyCount).addNode(setup.selfUniqueAddress) // subscribe to changes from other nodes val replicatorMessageAdapter: ActorRef[Replicator.ReplicatorMessage] = ctx.messageAdapter[Replicator.ReplicatorMessage] { case changed: Replicator.Changed[_] @unchecked => ChangeFromReplicator( changed.key.asInstanceOf[DDataKey], changed.dataValue.asInstanceOf[ORMultiMap[ServiceKey[_], Entry]]) } initialRegistry.allDdataKeys.foreach(key => setup.replicator ! Replicator.Subscribe(key, replicatorMessageAdapter.toClassic)) // keep track of cluster members // remove entries when members are removed val clusterEventMessageAdapter: ActorRef[ClusterDomainEvent] = ctx.messageAdapter[ClusterDomainEvent] { case MemberJoined(member) => NodeAdded(member.uniqueAddress) case MemberWeaklyUp(member) => NodeAdded(member.uniqueAddress) case MemberUp(member) => NodeAdded(member.uniqueAddress) case MemberRemoved(member, _) => NodeRemoved(member.uniqueAddress) case UnreachableMember(member) => NodeUnreachable(member.uniqueAddress) case ReachableMember(member) => NodeReachable(member.uniqueAddress) case ClusterShuttingDown => NodeRemoved(setup.cluster.selfUniqueAddress) case other => throw new IllegalStateException(s"Unexpected ClusterDomainEvent $other. Please report bug.") } setup.cluster.subscribe( clusterEventMessageAdapter.toClassic, ClusterEvent.InitialStateAsEvents, classOf[MemberJoined], classOf[MemberWeaklyUp], classOf[MemberUp], classOf[MemberRemoved], classOf[ReachabilityEvent], ClusterShuttingDown.getClass) // also periodic cleanup in case removal from ORMultiMap is skipped due to concurrent update, // which is possible for OR CRDTs - done with an adapter to leverage the existing NodesRemoved message timers.startTimerWithFixedDelay(RemoveTick, setup.settings.pruningInterval) // default tombstone keepalive is 24h (based on prune-gossip-tombstones-after) and keeping the actorrefs // around isn't very costly so don't prune often timers.startTimerWithFixedDelay(PruneTombstonesTick, setup.keepTombstonesFor / 24) val initialState = State( registry = initialRegistry, servicesPerActor = Map.empty, tombstones = Map.empty, subscriptions = TypedMultiMap.empty[AbstractServiceKey, SubscriptionsKV]) behavior(setup, initialState) } } def behavior(setup: Setup, state: State): Behavior[Command] = Behaviors.setup { ctx => import setup._ def isLeader = { cluster.state.leader.contains(cluster.selfAddress) } def nodesRemoved(addresses: Set[UniqueAddress], onlyRemoveOldEntries: Boolean): Unit = { // ok to update from several nodes but more efficient to try to do it from one node def isOnRemovedNode(entry: Entry): Boolean = addresses(entry.uniqueAddress(setup.selfUniqueAddress.address)) val now = System.currentTimeMillis() // it possible that an entry is added before MemberJoined is visible and such entries should not be removed def isOld(entry: Entry): Boolean = (now - entry.createdTimestamp) >= settings.pruneRemovedOlderThan.toMillis val removals = { state.registry.allServices.foldLeft(Map.empty[AbstractServiceKey, Set[Entry]]) { case (acc, (key, entries)) => val removedEntries = entries.filter(entry => isOnRemovedNode(entry) && (!onlyRemoveOldEntries || isOld(entry))) if (removedEntries.isEmpty) acc // no change else acc + (key -> removedEntries) } } if (removals.nonEmpty) { if (ctx.log.isDebugEnabled) ctx.log.debugN( "ClusterReceptionist [{}] - Node(s) removed [{}], updating registry removing entries: [{}]", cluster.selfAddress, addresses.mkString(","), removals .map { case (key, entries) => key.asServiceKey.id -> entries.mkString("[", ", ", "]") } .mkString(",")) // shard changes over the ddata keys they belong to val removalsPerDdataKey = state.registry.entriesPerDdataKey(removals) removalsPerDdataKey.foreach { case (ddataKey, removalForKey) => replicator ! Replicator.Update(ddataKey, EmptyORMultiMap, settings.writeConsistency) { registry => ServiceRegistry(registry).removeAll(removalForKey).toORMultiMap } } } } def reachabilityChanged(keysForNode: Set[AbstractServiceKey], newState: State): Unit = { notifySubscribers(keysForNode, servicesWereAddedOrRemoved = false, newState) } def notifySubscribers( changedKeys: Set[AbstractServiceKey], servicesWereAddedOrRemoved: Boolean, newState: State): Unit = { changedKeys.foreach { changedKey => val serviceKey = changedKey.asServiceKey val subscribers = newState.subscriptions.get(changedKey) if (subscribers.nonEmpty) { val (reachable, all) = newState.activeActorRefsFor(serviceKey, selfUniqueAddress) val listing = ReceptionistMessages.Listing(serviceKey, reachable, all, servicesWereAddedOrRemoved) subscribers.foreach(_ ! listing) } } } def onCommand(cmd: Command): Behavior[Command] = cmd match { case ReceptionistMessages.Register(key, serviceInstance, maybeReplyTo) => if (serviceInstance.path.address.hasLocalScope) { val entry = Entry(serviceInstance, setup.selfSystemUid)(System.currentTimeMillis()) ctx.log .debugN("ClusterReceptionist [{}] - Actor was registered: [{}] [{}]", cluster.selfAddress, key, entry) // actor already watched after one service key registration if (!state.servicesPerActor.contains(serviceInstance)) ctx.watchWith(serviceInstance, LocalServiceActorTerminated(serviceInstance)) maybeReplyTo match { case Some(replyTo) => replyTo ! ReceptionistMessages.Registered(key, serviceInstance) case None => } val ddataKey = state.registry.ddataKeyFor(key) replicator ! Replicator.Update(ddataKey, EmptyORMultiMap, settings.writeConsistency) { registry => ServiceRegistry(registry).addBinding(key, entry).toORMultiMap } behavior(setup, state.addLocalService(serviceInstance, key)) } else { ctx.log.error("ClusterReceptionist [{}] - Register of non-local [{}] is not supported", serviceInstance) Behaviors.same } case ReceptionistMessages.Deregister(key, serviceInstance, maybeReplyTo) => if (serviceInstance.path.address.hasLocalScope) { val entry = Entry(serviceInstance, setup.selfSystemUid)(0L) ctx.log.debugN( "ClusterReceptionist [{}] - Unregister actor: [{}] [{}]", cluster.selfAddress, key.asServiceKey.id, entry) val newState = state.removeLocalService(serviceInstance, key, setup.newTombstoneDeadline()) if (!newState.servicesPerActor.contains(serviceInstance)) { // last service for actor unregistered, stop watching ctx.unwatch(serviceInstance) } maybeReplyTo match { case Some(replyTo) => replyTo ! ReceptionistMessages.Deregistered(key, serviceInstance) case None => } val ddataKey = state.registry.ddataKeyFor(key) replicator ! Replicator.Update(ddataKey, EmptyORMultiMap, settings.writeConsistency) { registry => ServiceRegistry(registry).removeBinding(key, entry).toORMultiMap } // tombstone removals so they are not re-added by merging with other concurrent // registrations for the same key behavior(setup, newState) } else { ctx.log.error("ClusterReceptionist [{}] - Unregistering non-local [{}] is not supported", serviceInstance) Behaviors.same } case ReceptionistMessages.Find(key, replyTo) => val (reachable, all) = state.activeActorRefsFor(key, selfUniqueAddress) replyTo ! ReceptionistMessages.Listing(key.asServiceKey, reachable, all, servicesWereAddedOrRemoved = true) Behaviors.same case ReceptionistMessages.Subscribe(key, subscriber) => if (subscriber.path.address.hasLocalScope) { ctx.watchWith(subscriber, SubscriberTerminated(subscriber)) // immediately reply with initial listings to the new subscriber val listing = { val (reachable, all) = state.activeActorRefsFor(key, selfUniqueAddress) ReceptionistMessages.Listing(key.asServiceKey, reachable, all, servicesWereAddedOrRemoved = true) } subscriber ! listing behavior(setup, state.copy(subscriptions = state.subscriptions.inserted(key)(subscriber))) } else { ctx.log.error("ClusterReceptionist [{}] - Subscriptions from non-local [{}] is not supported", subscriber) Behaviors.same } } def onInternalCommand(cmd: InternalCommand): Behavior[Command] = cmd match { case SubscriberTerminated(subscriber) => behavior(setup, state.removeSubscriber(subscriber)) case LocalServiceActorTerminated(serviceInstance) => val entry = Entry(serviceInstance, setup.selfSystemUid)(0L) // could be empty if there was a race between termination and unregistration val keys = state.servicesPerActor.getOrElse(serviceInstance, Set.empty) ctx.log.debugN( "ClusterReceptionist [{}] - Registered actor terminated: [{}] [{}]", cluster.selfAddress, keys.map(_.asServiceKey.id).mkString(", "), entry) keys.foreach { key => val ddataKey = state.registry.ddataKeyFor(key.asServiceKey) replicator ! Replicator.Update(ddataKey, EmptyORMultiMap, settings.writeConsistency) { registry => ServiceRegistry(registry).removeBinding(key.asServiceKey, entry).toORMultiMap } } // tombstone removals so they are not re-added by merging with other concurrent // registrations for the same key behavior(setup, state.addTombstone(serviceInstance, setup.newTombstoneDeadline())) case ChangeFromReplicator(ddataKey, value) => // every change will come back this way - this is where the local notifications happens val newRegistry = ServiceRegistry(value) val changedKeys = state.registry.collectChangedKeys(ddataKey, newRegistry) val newState = state.copy(registry = state.registry.withServiceRegistry(ddataKey, newRegistry)) if (changedKeys.nonEmpty) { if (ctx.log.isDebugEnabled) { ctx.log.debugN( "ClusterReceptionist [{}] - Change from replicator: [{}], changes: [{}], tombstones [{}]", cluster.selfAddress, newRegistry.entries.entries, changedKeys .map(key => key.asServiceKey.id -> newRegistry.entriesFor(key).mkString("[", ", ", "]")) .mkString(", "), state.tombstones.mkString(", ")) } notifySubscribers(changedKeys, servicesWereAddedOrRemoved = true, newState) changedKeys.foreach { changedKey => val serviceKey = changedKey.asServiceKey // because of how ORMultiMap/ORset works, we could have a case where an actor we removed // is re-introduced because of a concurrent update, in that case we need to re-remove it val tombstonedButReAdded = newRegistry.actorRefsFor(serviceKey).filter(state.hasTombstone(serviceKey)) if (tombstonedButReAdded.nonEmpty) { if (ctx.log.isDebugEnabled) ctx.log.debug2( "ClusterReceptionist [{}] - Saw ActorRefs that were tomstoned [{}], re-removing.", cluster.selfAddress, tombstonedButReAdded.mkString(", ")) replicator ! Replicator.Update(ddataKey, EmptyORMultiMap, settings.writeConsistency) { registry => tombstonedButReAdded .foldLeft(ServiceRegistry(registry)) { (acc, ref) => acc.removeBinding(serviceKey, Entry(ref, setup.selfSystemUid)(0L)) } .toORMultiMap } } } behavior(setup, newState) } else { Behaviors.same } case NodeAdded(uniqueAddress) => if (state.registry.nodes.contains(uniqueAddress)) { Behaviors.same } else { val newState = state.copy(registry = state.registry.addNode(uniqueAddress)) val keysForNode = newState.registry.keysFor(uniqueAddress) if (keysForNode.nonEmpty) { ctx.log.debug2( "ClusterReceptionist [{}] - Node with registered services added [{}]", cluster.selfAddress, uniqueAddress) notifySubscribers(keysForNode, servicesWereAddedOrRemoved = true, newState) } else { ctx.log.debug2("ClusterReceptionist [{}] - Node added [{}]", cluster.selfAddress, uniqueAddress) } behavior(setup, newState) } case NodeRemoved(uniqueAddress) => if (uniqueAddress == selfUniqueAddress) { ctx.log.debug("ClusterReceptionist [{}] - terminated/removed", cluster.selfAddress) // If self cluster node is shutting down our own entries should have been removed via // watch-Terminated or will be removed by other nodes. This point is anyway too late. Behaviors.stopped } else if (state.registry.nodes.contains(uniqueAddress)) { val keysForNode = state.registry.keysFor(uniqueAddress) val newState = state.copy(registry = state.registry.removeNode(uniqueAddress)) if (keysForNode.nonEmpty) { ctx.log.debug2( "ClusterReceptionist [{}] - Node with registered services removed [{}]", cluster.selfAddress, uniqueAddress) notifySubscribers(keysForNode, servicesWereAddedOrRemoved = true, newState) } // Ok to update from several nodes but more efficient to try to do it from one node. if (isLeader) { ctx.log.debug2( "ClusterReceptionist [{}] - Leader node observed removed node [{}]", cluster.selfAddress, uniqueAddress) nodesRemoved(Set(uniqueAddress), onlyRemoveOldEntries = false) } behavior(setup, newState) } else { Behaviors.same } case NodeUnreachable(uniqueAddress) => val keysForNode = state.registry.keysFor(uniqueAddress) val newState = state.copy(registry = state.registry.addUnreachable(uniqueAddress)) if (keysForNode.nonEmpty) { ctx.log.debug2( "ClusterReceptionist [{}] - Node with registered services unreachable [{}]", cluster.selfAddress, uniqueAddress) reachabilityChanged(keysForNode, newState) } behavior(setup, newState) case NodeReachable(uniqueAddress) => val keysForNode = state.registry.keysFor(uniqueAddress) val newState = state.copy(registry = state.registry.removeUnreachable(uniqueAddress)) if (keysForNode.nonEmpty) { ctx.log.debug2( "ClusterReceptionist [{}] - Node with registered services reachable again [{}]", cluster.selfAddress, uniqueAddress) reachabilityChanged(keysForNode, newState) } behavior(setup, newState) case RemoveTick => // ok to update from several nodes but more efficient to try to do it from one node if (isLeader) { val allAddressesInState: Set[UniqueAddress] = state.registry.allUniqueAddressesInState(setup.selfUniqueAddress) val notInCluster = allAddressesInState.diff(state.registry.nodes) if (notInCluster.nonEmpty) { if (ctx.log.isDebugEnabled) ctx.log.debug2( "ClusterReceptionist [{}] - Leader node cleanup tick, removed nodes: [{}]", cluster.selfAddress, notInCluster.mkString(",")) nodesRemoved(notInCluster, onlyRemoveOldEntries = true) } } Behaviors.same case PruneTombstonesTick => val prunedState = state.pruneTombstones() if (prunedState eq state) Behaviors.same else { ctx.log.debug("ClusterReceptionist [{}] - Pruning tombstones", cluster.selfAddress) behavior(setup, prunedState) } } Behaviors.receive[Command] { (_, msg) => msg match { // support two heterogeneous types of messages without union types case cmd: InternalCommand => onInternalCommand(cmd) case cmd: Command => onCommand(cmd) case _ => Behaviors.unhandled } } } }
{ "pile_set_name": "Github" }
<?php /* * This file is part of Twig. * * (c) Fabien Potencier * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ /** * Represents a body node. * * @author Fabien Potencier <[email protected]> */ class Twig_Node_Body extends Twig_Node { } class_alias('Twig_Node_Body', 'Twig\Node\BodyNode', false);
{ "pile_set_name": "Github" }
Who Writes the Docs? ==================== .. datatemplate-video:: :source: /_data/2018.portland.speakers.yaml :template: videos/video-detail.html :key: 7
{ "pile_set_name": "Github" }
@available(OSX 10.7, *) let AVCoreAnimationBeginTimeAtZero: CFTimeInterval @available(OSX 10.7, *) let AVLayerVideoGravityResizeAspect: String @available(OSX 10.7, *) let AVLayerVideoGravityResizeAspectFill: String @available(OSX 10.7, *) let AVLayerVideoGravityResize: String
{ "pile_set_name": "Github" }
/* * Copyright (C) 2016 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "aapt.h" #include "command.h" #include "print.h" #include "util.h" #include <regex> const regex NS_REGEX("( *)N: ([^=]+)=(.*)"); const regex ELEMENT_REGEX("( *)E: ([^ ]+) \\(line=(\\d+)\\)"); const regex ATTR_REGEX("( *)A: ([^\\(=]+)[^=]*=\"([^\"]+)\".*"); const string ANDROID_NS("http://schemas.android.com/apk/res/android"); bool Apk::HasActivity(const string& className) { string fullClassName = full_class_name(package, className); const size_t N = activities.size(); for (size_t i=0; i<N; i++) { if (activities[i] == fullClassName) { return true; } } return false; } struct Attribute { string ns; string name; string value; }; struct Element { Element* parent; string ns; string name; int lineno; vector<Attribute> attributes; vector<Element*> children; /** * Indentation in the xmltree dump. Might not be equal to the distance * from the root because namespace rows (scopes) have their own indentation. */ int depth; Element(); ~Element(); string GetAttr(const string& ns, const string& name) const; void FindElements(const string& ns, const string& name, vector<Element*>* result, bool recurse); }; Element::Element() { } Element::~Element() { const size_t N = children.size(); for (size_t i=0; i<N; i++) { delete children[i]; } } string Element::GetAttr(const string& ns, const string& name) const { const size_t N = attributes.size(); for (size_t i=0; i<N; i++) { const Attribute& attr = attributes[i]; if (attr.ns == ns && attr.name == name) { return attr.value; } } return string(); } void Element::FindElements(const string& ns, const string& name, vector<Element*>* result, bool recurse) { const size_t N = children.size(); for (size_t i=0; i<N; i++) { Element* child = children[i]; if (child->ns == ns && child->name == name) { result->push_back(child); } if (recurse) { child->FindElements(ns, name, result, recurse); } } } struct Scope { Scope* parent; int depth; map<string,string> namespaces; Scope(Scope* parent, int depth); }; Scope::Scope(Scope* p, int d) :parent(p), depth(d) { if (p != NULL) { namespaces = p->namespaces; } } string full_class_name(const string& packageName, const string& className) { if (className.length() == 0) { return ""; } if (className[0] == '.') { return packageName + className; } if (className.find('.') == string::npos) { return packageName + "." + className; } return className; } string pretty_component_name(const string& packageName, const string& className) { if (starts_with(packageName, className)) { size_t pn = packageName.length(); size_t cn = className.length(); if (cn > pn && className[pn] == '.') { return packageName + "/" + string(className, pn, string::npos); } } return packageName + "/" + className; } int inspect_apk(Apk* apk, const string& filename) { // Load the manifest xml Command cmd("aapt2"); cmd.AddArg("dump"); cmd.AddArg("xmltree"); cmd.AddArg(filename); cmd.AddArg("--file"); cmd.AddArg("AndroidManifest.xml"); int err; string output = get_command_output(cmd, &err, false); check_error(err); // Parse the manifest xml Scope* scope = new Scope(NULL, -1); Element* root = NULL; Element* current = NULL; vector<string> lines; split_lines(&lines, output); for (size_t i=0; i<lines.size(); i++) { const string& line = lines[i]; smatch match; if (regex_match(line, match, NS_REGEX)) { int depth = match[1].length() / 2; while (depth < scope->depth) { Scope* tmp = scope; scope = scope->parent; delete tmp; } scope = new Scope(scope, depth); scope->namespaces[match[2]] = match[3]; } else if (regex_match(line, match, ELEMENT_REGEX)) { Element* element = new Element(); string str = match[2]; size_t colon = str.find(':'); if (colon == string::npos) { element->name = str; } else { element->ns = scope->namespaces[string(str, 0, colon)]; element->name.assign(str, colon+1, string::npos); } element->lineno = atoi(match[3].str().c_str()); element->depth = match[1].length() / 2; if (root == NULL) { current = element; root = element; } else { while (element->depth <= current->depth && current->parent != NULL) { current = current->parent; } element->parent = current; current->children.push_back(element); current = element; } } else if (regex_match(line, match, ATTR_REGEX)) { if (current != NULL) { Attribute attr; string str = match[2]; size_t colon = str.rfind(':'); if (colon == string::npos) { attr.name = str; } else { attr.ns.assign(str, 0, colon); attr.name.assign(str, colon+1, string::npos); } attr.value = match[3]; current->attributes.push_back(attr); } } } while (scope != NULL) { Scope* tmp = scope; scope = scope->parent; delete tmp; } // Package name apk->package = root->GetAttr("", "package"); if (apk->package.size() == 0) { print_error("%s:%d: Manifest root element doesn't contain a package attribute", filename.c_str(), root->lineno); delete root; return 1; } // Instrumentation runner vector<Element*> instrumentation; root->FindElements("", "instrumentation", &instrumentation, true); if (instrumentation.size() > 0) { // TODO: How could we deal with multiple instrumentation tags? // We'll just pick the first one. apk->runner = instrumentation[0]->GetAttr(ANDROID_NS, "name"); } // Activities vector<Element*> activities; root->FindElements("", "activity", &activities, true); for (size_t i=0; i<activities.size(); i++) { string name = activities[i]->GetAttr(ANDROID_NS, "name"); if (name.size() == 0) { continue; } apk->activities.push_back(full_class_name(apk->package, name)); } delete root; return 0; }
{ "pile_set_name": "Github" }
namespace Neutronium.MVVMComponents { /// <summary> /// Command that receive no argument /// <seealso cref="ICommand"/> /// </summary> public interface ICommandWithoutParameter : IUpdatableCommand { /// <summary> /// Execute the command /// </summary> void Execute(); /// <summary> /// Determines whether the command can execute in its current /// state. /// </summary> /// <returns> /// true if this command can be executed; otherwise, false. /// </returns> bool CanExecute { get; } } }
{ "pile_set_name": "Github" }
--- next: false --- # 伺服器渲染中介層 (Runtime) ```ts import { ServerMiddleware } from '@nuxt/types' const myServerMiddleware: ServerMiddleware = function (req, res, next) { // 使用 req, res, next } export default myServerMiddleware ```
{ "pile_set_name": "Github" }
// Copyright 2011 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package present import ( "bufio" "bytes" "errors" "fmt" "html/template" "io" "io/ioutil" "log" "net/url" "regexp" "strings" "time" "unicode" "unicode/utf8" ) var ( parsers = make(map[string]ParseFunc) funcs = template.FuncMap{} ) // Template returns an empty template with the action functions in its FuncMap. func Template() *template.Template { return template.New("").Funcs(funcs) } // Render renders the doc to the given writer using the provided template. func (d *Doc) Render(w io.Writer, t *template.Template) error { data := struct { *Doc Template *template.Template PlayEnabled bool NotesEnabled bool }{d, t, PlayEnabled, NotesEnabled} return t.ExecuteTemplate(w, "root", data) } // Render renders the section to the given writer using the provided template. func (s *Section) Render(w io.Writer, t *template.Template) error { data := struct { *Section Template *template.Template PlayEnabled bool }{s, t, PlayEnabled} return t.ExecuteTemplate(w, "section", data) } type ParseFunc func(ctx *Context, fileName string, lineNumber int, inputLine string) (Elem, error) // Register binds the named action, which does not begin with a period, to the // specified parser to be invoked when the name, with a period, appears in the // present input text. func Register(name string, parser ParseFunc) { if len(name) == 0 || name[0] == ';' { panic("bad name in Register: " + name) } parsers["."+name] = parser } // Doc represents an entire document. type Doc struct { Title string Subtitle string Time time.Time Authors []Author TitleNotes []string Sections []Section Tags []string } // Author represents the person who wrote and/or is presenting the document. type Author struct { Elem []Elem } // TextElem returns the first text elements of the author details. // This is used to display the author' name, job title, and company // without the contact details. func (p *Author) TextElem() (elems []Elem) { for _, el := range p.Elem { if _, ok := el.(Text); !ok { break } elems = append(elems, el) } return } // Section represents a section of a document (such as a presentation slide) // comprising a title and a list of elements. type Section struct { Number []int Title string Elem []Elem Notes []string Classes []string Styles []string } // HTMLAttributes for the section func (s Section) HTMLAttributes() template.HTMLAttr { if len(s.Classes) == 0 && len(s.Styles) == 0 { return "" } var class string if len(s.Classes) > 0 { class = fmt.Sprintf(`class=%q`, strings.Join(s.Classes, " ")) } var style string if len(s.Styles) > 0 { style = fmt.Sprintf(`style=%q`, strings.Join(s.Styles, " ")) } return template.HTMLAttr(strings.Join([]string{class, style}, " ")) } // Sections contained within the section. func (s Section) Sections() (sections []Section) { for _, e := range s.Elem { if section, ok := e.(Section); ok { sections = append(sections, section) } } return } // Level returns the level of the given section. // The document title is level 1, main section 2, etc. func (s Section) Level() int { return len(s.Number) + 1 } // FormattedNumber returns a string containing the concatenation of the // numbers identifying a Section. func (s Section) FormattedNumber() string { b := &bytes.Buffer{} for _, n := range s.Number { fmt.Fprintf(b, "%v.", n) } return b.String() } func (s Section) TemplateName() string { return "section" } // Elem defines the interface for a present element. That is, something that // can provide the name of the template used to render the element. type Elem interface { TemplateName() string } // renderElem implements the elem template function, used to render // sub-templates. func renderElem(t *template.Template, e Elem) (template.HTML, error) { var data interface{} = e if s, ok := e.(Section); ok { data = struct { Section Template *template.Template }{s, t} } return execTemplate(t, e.TemplateName(), data) } // pageNum derives a page number from a section. func pageNum(s Section, offset int) int { if len(s.Number) == 0 { return offset } return s.Number[0] + offset } func init() { funcs["elem"] = renderElem funcs["pagenum"] = pageNum } // execTemplate is a helper to execute a template and return the output as a // template.HTML value. func execTemplate(t *template.Template, name string, data interface{}) (template.HTML, error) { b := new(bytes.Buffer) err := t.ExecuteTemplate(b, name, data) if err != nil { return "", err } return template.HTML(b.String()), nil } // Text represents an optionally preformatted paragraph. type Text struct { Lines []string Pre bool } func (t Text) TemplateName() string { return "text" } // List represents a bulleted list. type List struct { Bullet []string } func (l List) TemplateName() string { return "list" } // Lines is a helper for parsing line-based input. type Lines struct { line int // 0 indexed, so has 1-indexed number of last line returned text []string } func readLines(r io.Reader) (*Lines, error) { var lines []string s := bufio.NewScanner(r) for s.Scan() { lines = append(lines, s.Text()) } if err := s.Err(); err != nil { return nil, err } return &Lines{0, lines}, nil } func (l *Lines) next() (text string, ok bool) { for { current := l.line l.line++ if current >= len(l.text) { return "", false } text = l.text[current] // Lines starting with # are comments. if len(text) == 0 || text[0] != '#' { ok = true break } } return } func (l *Lines) back() { l.line-- } func (l *Lines) nextNonEmpty() (text string, ok bool) { for { text, ok = l.next() if !ok { return } if len(text) > 0 { break } } return } // A Context specifies the supporting context for parsing a presentation. type Context struct { // ReadFile reads the file named by filename and returns the contents. ReadFile func(filename string) ([]byte, error) } // ParseMode represents flags for the Parse function. type ParseMode int const ( // If set, parse only the title and subtitle. TitlesOnly ParseMode = 1 ) // Parse parses a document from r. func (ctx *Context) Parse(r io.Reader, name string, mode ParseMode) (*Doc, error) { doc := new(Doc) lines, err := readLines(r) if err != nil { return nil, err } for i := lines.line; i < len(lines.text); i++ { if strings.HasPrefix(lines.text[i], "*") { break } if isSpeakerNote(lines.text[i]) { doc.TitleNotes = append(doc.TitleNotes, lines.text[i][2:]) } } err = parseHeader(doc, lines) if err != nil { return nil, err } if mode&TitlesOnly != 0 { return doc, nil } // Authors if doc.Authors, err = parseAuthors(lines); err != nil { return nil, err } // Sections if doc.Sections, err = parseSections(ctx, name, lines, []int{}); err != nil { return nil, err } return doc, nil } // Parse parses a document from r. Parse reads assets used by the presentation // from the file system using ioutil.ReadFile. func Parse(r io.Reader, name string, mode ParseMode) (*Doc, error) { ctx := Context{ReadFile: ioutil.ReadFile} return ctx.Parse(r, name, mode) } // isHeading matches any section heading. var isHeading = regexp.MustCompile(`^\*+ `) // lesserHeading returns true if text is a heading of a lesser or equal level // than that denoted by prefix. func lesserHeading(text, prefix string) bool { return isHeading.MatchString(text) && !strings.HasPrefix(text, prefix+"*") } // parseSections parses Sections from lines for the section level indicated by // number (a nil number indicates the top level). func parseSections(ctx *Context, name string, lines *Lines, number []int) ([]Section, error) { var sections []Section for i := 1; ; i++ { // Next non-empty line is title. text, ok := lines.nextNonEmpty() for ok && text == "" { text, ok = lines.next() } if !ok { break } prefix := strings.Repeat("*", len(number)+1) if !strings.HasPrefix(text, prefix+" ") { lines.back() break } section := Section{ Number: append(append([]int{}, number...), i), Title: text[len(prefix)+1:], } text, ok = lines.nextNonEmpty() for ok && !lesserHeading(text, prefix) { var e Elem r, _ := utf8.DecodeRuneInString(text) switch { case unicode.IsSpace(r): i := strings.IndexFunc(text, func(r rune) bool { return !unicode.IsSpace(r) }) if i < 0 { break } indent := text[:i] var s []string for ok && (strings.HasPrefix(text, indent) || text == "") { if text != "" { text = text[i:] } s = append(s, text) text, ok = lines.next() } lines.back() pre := strings.Join(s, "\n") pre = strings.Replace(pre, "\t", " ", -1) // browsers treat tabs badly pre = strings.TrimRightFunc(pre, unicode.IsSpace) e = Text{Lines: []string{pre}, Pre: true} case strings.HasPrefix(text, "- "): var b []string for ok && strings.HasPrefix(text, "- ") { b = append(b, text[2:]) text, ok = lines.next() } lines.back() e = List{Bullet: b} case isSpeakerNote(text): section.Notes = append(section.Notes, text[2:]) case strings.HasPrefix(text, prefix+"* "): lines.back() subsecs, err := parseSections(ctx, name, lines, section.Number) if err != nil { return nil, err } for _, ss := range subsecs { section.Elem = append(section.Elem, ss) } case strings.HasPrefix(text, "."): args := strings.Fields(text) if args[0] == ".background" { section.Classes = append(section.Classes, "background") section.Styles = append(section.Styles, "background-image: url('"+args[1]+"')") break } parser := parsers[args[0]] if parser == nil { return nil, fmt.Errorf("%s:%d: unknown command %q\n", name, lines.line, text) } t, err := parser(ctx, name, lines.line, text) if err != nil { return nil, err } e = t default: var l []string for ok && strings.TrimSpace(text) != "" { if text[0] == '.' { // Command breaks text block. lines.back() break } if strings.HasPrefix(text, `\.`) { // Backslash escapes initial period. text = text[1:] } l = append(l, text) text, ok = lines.next() } if len(l) > 0 { e = Text{Lines: l} } } if e != nil { section.Elem = append(section.Elem, e) } text, ok = lines.nextNonEmpty() } if isHeading.MatchString(text) { lines.back() } sections = append(sections, section) } return sections, nil } func parseHeader(doc *Doc, lines *Lines) error { var ok bool // First non-empty line starts header. doc.Title, ok = lines.nextNonEmpty() if !ok { return errors.New("unexpected EOF; expected title") } for { text, ok := lines.next() if !ok { return errors.New("unexpected EOF") } if text == "" { break } if isSpeakerNote(text) { continue } const tagPrefix = "Tags:" if strings.HasPrefix(text, tagPrefix) { tags := strings.Split(text[len(tagPrefix):], ",") for i := range tags { tags[i] = strings.TrimSpace(tags[i]) } doc.Tags = append(doc.Tags, tags...) } else if t, ok := parseTime(text); ok { doc.Time = t } else if doc.Subtitle == "" { doc.Subtitle = text } else { return fmt.Errorf("unexpected header line: %q", text) } } return nil } func parseAuthors(lines *Lines) (authors []Author, err error) { // This grammar demarcates authors with blanks. // Skip blank lines. if _, ok := lines.nextNonEmpty(); !ok { return nil, errors.New("unexpected EOF") } lines.back() var a *Author for { text, ok := lines.next() if !ok { return nil, errors.New("unexpected EOF") } // If we find a section heading, we're done. if strings.HasPrefix(text, "* ") { lines.back() break } if isSpeakerNote(text) { continue } // If we encounter a blank we're done with this author. if a != nil && len(text) == 0 { authors = append(authors, *a) a = nil continue } if a == nil { a = new(Author) } // Parse the line. Those that // - begin with @ are twitter names, // - contain slashes are links, or // - contain an @ symbol are an email address. // The rest is just text. var el Elem switch { case strings.HasPrefix(text, "@"): el = parseURL("http://twitter.com/" + text[1:]) case strings.Contains(text, ":"): el = parseURL(text) case strings.Contains(text, "@"): el = parseURL("mailto:" + text) } if l, ok := el.(Link); ok { l.Label = text el = l } if el == nil { el = Text{Lines: []string{text}} } a.Elem = append(a.Elem, el) } if a != nil { authors = append(authors, *a) } return authors, nil } func parseURL(text string) Elem { u, err := url.Parse(text) if err != nil { log.Printf("Parse(%q): %v", text, err) return nil } return Link{URL: u} } func parseTime(text string) (t time.Time, ok bool) { t, err := time.Parse("15:04 2 Jan 2006", text) if err == nil { return t, true } t, err = time.Parse("2 Jan 2006", text) if err == nil { // at 11am UTC it is the same date everywhere t = t.Add(time.Hour * 11) return t, true } return time.Time{}, false } func isSpeakerNote(s string) bool { return strings.HasPrefix(s, ": ") }
{ "pile_set_name": "Github" }
import * as HashMap from './HashMap'; export { HashMap }; export * from './functions'; export { HashMapStructure } from './internals/HashMap';
{ "pile_set_name": "Github" }
/*************************************************************************** qgswfsrequest.cpp --------------------- begin : February 2016 copyright : (C) 2011 by Martin Dobias (C) 2016 by Even Rouault email : wonder dot sk at gmail dot com even.rouault at spatialys.com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ #include "qgswfsrequest.h" #include "qgslogger.h" QgsWfsRequest::QgsWfsRequest( const QgsWFSDataSourceURI &uri ) : QgsBaseNetworkRequest( uri.auth(), tr( "WFS" ) ) , mUri( uri ) { QgsDebugMsgLevel( QStringLiteral( "theUri = " ) + uri.uri( ), 4 ); } QUrl QgsWfsRequest::requestUrl( const QString &request ) const { return mUri.requestUrl( request ); }
{ "pile_set_name": "Github" }
test_vrlh_1: #_ REGISTER_IN v3 [12345678, 87654321, 11223344, 55667788] #_ REGISTER_IN v4 [000D000D, 000D000D, 000D000D, 000D000D] vrlh v5, v3, v4 blr #_ REGISTER_OUT v3 [12345678, 87654321, 11223344, 55667788] #_ REGISTER_OUT v4 [000D000D, 000D000D, 000D000D, 000D000D] #_ REGISTER_OUT v5 [82460ACF, B0EC2864, 42248668, CAAC0EF1]
{ "pile_set_name": "Github" }
<?php /** * @package pkg_projectfork * @subpackage com_pfrepo * * @author Tobias Kuhn (eaxs) * @copyright Copyright (C) 2006-2013 Tobias Kuhn. All rights reserved. * @license http://www.gnu.org/licenses/gpl.html GNU/GPL, see LICENSE.txt **/ defined('_JEXEC') or die(); jimport('projectfork.controller.form.json'); /** * Projectfork Directory Form Controller * */ class PFrepoControllerDirectory extends PFControllerFormJson { }
{ "pile_set_name": "Github" }
Copyright (C) 2012-2016 Tim King, Phil Hagelberg, Bozhidar Batsov, Artur Malabarba and [contributors](https://github.com/clojure-emacs/cider/contributors). CIDER is distributed under the GNU General Public License, version 3, the same as Emacs. Type <kbd>C-h C-c</kbd> in Emacs to view it. `cider-nrepl` is distributed under the Eclipse Public License, the same as Clojure.
{ "pile_set_name": "Github" }
// The MIT License (MIT) // Copyright (c) 2014 Andreas Briese, eduToolbox@Bri-C GmbH, Sarstedt // Permission is hereby granted, free of charge, to any person obtaining a copy of // this software and associated documentation files (the "Software"), to deal in // the Software without restriction, including without limitation the rights to // use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of // the Software, and to permit persons to whom the Software is furnished to do so, // subject to the following conditions: // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS // FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR // COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER // IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN // CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // 2019/08/25 code revision to reduce unsafe use // Parts are adopted from the fork at ipfs/bbloom after performance rev by // Steve Allen (https://github.com/Stebalien) // (see https://github.com/ipfs/bbloom/blob/master/bbloom.go) // -> func Has // -> func set // -> func add package bbloom import ( "bytes" "encoding/json" "log" "math" "sync" "unsafe" ) // helper // not needed anymore by Set // var mask = []uint8{1, 2, 4, 8, 16, 32, 64, 128} func getSize(ui64 uint64) (size uint64, exponent uint64) { if ui64 < uint64(512) { ui64 = uint64(512) } size = uint64(1) for size < ui64 { size <<= 1 exponent++ } return size, exponent } func calcSizeByWrongPositives(numEntries, wrongs float64) (uint64, uint64) { size := -1 * numEntries * math.Log(wrongs) / math.Pow(float64(0.69314718056), 2) locs := math.Ceil(float64(0.69314718056) * size / numEntries) return uint64(size), uint64(locs) } // New // returns a new bloomfilter func New(params ...float64) (bloomfilter Bloom) { var entries, locs uint64 if len(params) == 2 { if params[1] < 1 { entries, locs = calcSizeByWrongPositives(params[0], params[1]) } else { entries, locs = uint64(params[0]), uint64(params[1]) } } else { log.Fatal("usage: New(float64(number_of_entries), float64(number_of_hashlocations)) i.e. New(float64(1000), float64(3)) or New(float64(number_of_entries), float64(number_of_hashlocations)) i.e. New(float64(1000), float64(0.03))") } size, exponent := getSize(uint64(entries)) bloomfilter = Bloom{ Mtx: &sync.Mutex{}, sizeExp: exponent, size: size - 1, setLocs: locs, shift: 64 - exponent, } bloomfilter.Size(size) return bloomfilter } // NewWithBoolset // takes a []byte slice and number of locs per entry // returns the bloomfilter with a bitset populated according to the input []byte func NewWithBoolset(bs *[]byte, locs uint64) (bloomfilter Bloom) { bloomfilter = New(float64(len(*bs)<<3), float64(locs)) for i, b := range *bs { *(*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(&bloomfilter.bitset[0])) + uintptr(i))) = b } return bloomfilter } // bloomJSONImExport // Im/Export structure used by JSONMarshal / JSONUnmarshal type bloomJSONImExport struct { FilterSet []byte SetLocs uint64 } // JSONUnmarshal // takes JSON-Object (type bloomJSONImExport) as []bytes // returns Bloom object func JSONUnmarshal(dbData []byte) Bloom { bloomImEx := bloomJSONImExport{} json.Unmarshal(dbData, &bloomImEx) buf := bytes.NewBuffer(bloomImEx.FilterSet) bs := buf.Bytes() bf := NewWithBoolset(&bs, bloomImEx.SetLocs) return bf } // // Bloom filter type Bloom struct { Mtx *sync.Mutex ElemNum uint64 bitset []uint64 sizeExp uint64 size uint64 setLocs uint64 shift uint64 } // <--- http://www.cse.yorku.ca/~oz/hash.html // modified Berkeley DB Hash (32bit) // hash is casted to l, h = 16bit fragments // func (bl Bloom) absdbm(b *[]byte) (l, h uint64) { // hash := uint64(len(*b)) // for _, c := range *b { // hash = uint64(c) + (hash << 6) + (hash << bl.sizeExp) - hash // } // h = hash >> bl.shift // l = hash << bl.shift >> bl.shift // return l, h // } // Update: found sipHash of Jean-Philippe Aumasson & Daniel J. Bernstein to be even faster than absdbm() // https://131002.net/siphash/ // siphash was implemented for Go by Dmitry Chestnykh https://github.com/dchest/siphash // Add // set the bit(s) for entry; Adds an entry to the Bloom filter func (bl *Bloom) Add(entry []byte) { l, h := bl.sipHash(entry) for i := uint64(0); i < bl.setLocs; i++ { bl.set((h + i*l) & bl.size) bl.ElemNum++ } } // AddTS // Thread safe: Mutex.Lock the bloomfilter for the time of processing the entry func (bl *Bloom) AddTS(entry []byte) { bl.Mtx.Lock() defer bl.Mtx.Unlock() bl.Add(entry) } // Has // check if bit(s) for entry is/are set // returns true if the entry was added to the Bloom Filter func (bl Bloom) Has(entry []byte) bool { l, h := bl.sipHash(entry) res := true for i := uint64(0); i < bl.setLocs; i++ { res = res && bl.isSet((h+i*l)&bl.size) // https://github.com/ipfs/bbloom/commit/84e8303a9bfb37b2658b85982921d15bbb0fecff // // Branching here (early escape) is not worth it // // This is my conclusion from benchmarks // // (prevents loop unrolling) // switch bl.IsSet((h + i*l) & bl.size) { // case false: // return false // } } return res } // HasTS // Thread safe: Mutex.Lock the bloomfilter for the time of processing the entry func (bl *Bloom) HasTS(entry []byte) bool { bl.Mtx.Lock() defer bl.Mtx.Unlock() return bl.Has(entry) } // AddIfNotHas // Only Add entry if it's not present in the bloomfilter // returns true if entry was added // returns false if entry was allready registered in the bloomfilter func (bl Bloom) AddIfNotHas(entry []byte) (added bool) { if bl.Has(entry) { return added } bl.Add(entry) return true } // AddIfNotHasTS // Tread safe: Only Add entry if it's not present in the bloomfilter // returns true if entry was added // returns false if entry was allready registered in the bloomfilter func (bl *Bloom) AddIfNotHasTS(entry []byte) (added bool) { bl.Mtx.Lock() defer bl.Mtx.Unlock() return bl.AddIfNotHas(entry) } // Size // make Bloom filter with as bitset of size sz func (bl *Bloom) Size(sz uint64) { bl.bitset = make([]uint64, sz>>6) } // Clear // resets the Bloom filter func (bl *Bloom) Clear() { bs := bl.bitset for i := range bs { bs[i] = 0 } } // Set // set the bit[idx] of bitsit func (bl *Bloom) set(idx uint64) { // ommit unsafe // *(*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(&bl.bitset[idx>>6])) + uintptr((idx%64)>>3))) |= mask[idx%8] bl.bitset[idx>>6] |= 1 << (idx % 64) } // IsSet // check if bit[idx] of bitset is set // returns true/false func (bl *Bloom) isSet(idx uint64) bool { // ommit unsafe // return (((*(*uint8)(unsafe.Pointer(uintptr(unsafe.Pointer(&bl.bitset[idx>>6])) + uintptr((idx%64)>>3)))) >> (idx % 8)) & 1) == 1 return bl.bitset[idx>>6]&(1<<(idx%64)) != 0 } // JSONMarshal // returns JSON-object (type bloomJSONImExport) as []byte func (bl Bloom) JSONMarshal() []byte { bloomImEx := bloomJSONImExport{} bloomImEx.SetLocs = uint64(bl.setLocs) bloomImEx.FilterSet = make([]byte, len(bl.bitset)<<3) for i := range bloomImEx.FilterSet { bloomImEx.FilterSet[i] = *(*byte)(unsafe.Pointer(uintptr(unsafe.Pointer(&bl.bitset[0])) + uintptr(i))) } data, err := json.Marshal(bloomImEx) if err != nil { log.Fatal("json.Marshal failed: ", err) } return data } // // alternative hashFn // func (bl Bloom) fnv64a(b *[]byte) (l, h uint64) { // h64 := fnv.New64a() // h64.Write(*b) // hash := h64.Sum64() // h = hash >> 32 // l = hash << 32 >> 32 // return l, h // } // // // <-- http://partow.net/programming/hashfunctions/index.html // // citation: An algorithm proposed by Donald E. Knuth in The Art Of Computer Programming Volume 3, // // under the topic of sorting and search chapter 6.4. // // modified to fit with boolset-length // func (bl Bloom) DEKHash(b *[]byte) (l, h uint64) { // hash := uint64(len(*b)) // for _, c := range *b { // hash = ((hash << 5) ^ (hash >> bl.shift)) ^ uint64(c) // } // h = hash >> bl.shift // l = hash << bl.sizeExp >> bl.sizeExp // return l, h // }
{ "pile_set_name": "Github" }
<?xml version='1.0' encoding='UTF-8'?> <resources> <string name="menu_zim_manager">Get Content</string> <string name="menu_help">المساعدة</string> <string name="menu_home">الصفحة الرئيسية</string> <string name="menu_forward">التالي</string> <string name="menu_back">رجوع</string> <string name="menu_settings">التفضيلات</string> <string name="menu_search">إبحث</string> <string name="menu_searchintext">إبحث في النص</string> <string name="menu_bookmarks">علامات القراءة</string> <string name="menu_randomarticle">مقالة عشوائية</string> <string name="menu_fullscreen">ملئ الشاشة</string> <string name="menu_exitfullscreen">إنهاء وضع الشاشة الكاملة</string> <string name="menu_read_aloud">اقرأ بصوت عال</string> <string name="menu_read_aloud_stop">توقف عن القراءة بصوت عال</string> <string name="menu_rescan_fs">إعادة تفحص بطاقة SD</string> <string name="save_media">حفظ وسائط باسم...</string> <string name="save_media_error">حدث خطأ أثناء محاولة حفظ الملف!</string> <string name="save_media_saved">نمّ حفظ الملف تحت %1$s في ملف الصور الخاص بك</string> <string name="rescan_fs_warning">مسح لملفات زيم، الرجاء الانتظار...</string> <string name="button_backtotop">عُد إلى الأعلى</string> <string name="search_label">إبحث</string> <string name="articlesearch_hint">اكتب للبحث عن المقالات</string> <string name="choose_file">حدد ملف محتوى زيم (*.zim)</string> <string name="add_bookmark">علّم هذه الصفحة</string> <string name="remove_bookmark">أزل تعليم المحدد</string> <string name="open_in_new_tab">افتح الوصلة في علامة تبويب جديدة</string> <string name="error_nozimfilesfound">لا توجد ملفات زيم على الجهتز الخاص بك.\nالق نظرة صفحة المساعدة للحصول على الإرشادات الخاصة بكيفيّة تحميل محتوى على كيويكس.\nإذا قمت بوضع ملف زيم على الجهز الخاص بك أو وحدة التخزين الخارجيّة، ربّما أنت في حاجة إلى إعادة تشغيل الجهاز.</string> <string name="error_filenotfound">خطأ: تعذر العثور على الملف المحدد زيم.</string> <string name="error_fileinvalid">خطأ: ملف زيم المحدد ليس ملفا صالحا.</string> <string name="error_articlenotfound">خطأ: تحميل المقالة \"%1$s\" لم يتم بنجاح.</string> <string name="error_articleurlnotfound">خطأ: تحميل المقالة (Url: %1$s) لم يتم بنجاح.</string> <string name="pref_display_title">عرض</string> <string name="pref_zoom_sub_title">Zoom</string> <string name="pref_zoom_dialog">مستوى التكبير/التصغير</string> <string-array name="pref_zoom_entries"> <item name="pref_zoom_entry_automatic">آلي</item> <item name="pref_zoom_entry_small">صغير</item> <item name="pref_zoom_entry_medium">متوسط</item> <item name="pref_zoom_entry_large">كبير</item> </string-array> <string name="pref_info_title">معلومة</string> <string name="pref_info_version">نسخة</string> <string name="pref_zoom_enable">تحكم التكبير/التصغير</string> <string name="pref_nightmode">الوضع الليلي</string> <string name="pref_nightmode_summary">إظهار المقالات مع الألوان المسترجعة</string> <string name="pref_backtotop">ارجع إلى الأعلى</string> <string name="pref_backtotop_summary">عرض زر في نهاية الصفحة للذهاب إلى أعلاها</string> <string name="pref_language_title">اللغة</string> <string name="pref_language_chooser">اختر لغة</string> <string name="tts_lang_not_supported">The language of this page is not supported, or appropriate language data was not installed. The article may not be properly read.</string> <string name="no_reader_application_installed">Could not find an installed application for this type of file</string> <string name="custom_app_missing_content">Your application is corrupted.\nThis might happen when you remove files on the SD Card.\nYou need to uninstall then reinstall the App from the Play Store.</string> <string name="go_to_play_store">Go to Play Store</string> <string name="no_bookmarks">No Bookmarks</string> <string name="menu_bookmarks_list">علامات القراءة</string> <string name="no_section_info">No Content Headers Found</string> <string name="request_storage">To access zim files we need access to your storage</string> <string name="clear_recent_history_dialog">Are you sure you want to delete your search history?</string> <string name="clear_recent_and_tabs_history_dialog">Are you sure you want to delete your search history and reset all active tabs?</string> <string name="delete_recent_search_item">Delete this item?</string> <string name="pref_clear_all_history_title">Clear History</string> <string name="pref_clear_all_history_summary">Clear recent searches and tabs history</string> <string name="all_history_cleared_toast">All History Cleared</string> <string name="clear_all_history_dialog_title">Clear All History</string> <string name="delete">Delete</string> <string name="delete_specific_search_toast">Recent search removed</string> <string name="hint_contents_drawer_message">You can swipe left to view the contents of this article</string> <string name="got_it">Got it</string> <string name="did_you_know">Did you know?</string> <string name="undo">Undo</string> <string name="tab_closed">Tab closed</string> <string name="deleted_message">deleted</string> <string name="bookmarks_restored">Bookmarks restored</string> <string name="bookmark_added">Bookmark added</string> <string name="rate_dialog_title">Please Rate Us</string> <string name="rate_dialog_msg_1">If you enjoy using</string> <string name="rate_dialog_msg_2">, please take a moment to rate it. Thanks for your support!</string> <string name="rate_dialog_positive">Rate!</string> <string name="rate_dialog_negative">No, thanks</string> <string name="rate_dialog_neutral">Later</string> <string name="open">Open</string> <string name="bookmark_removed">Bookmark removed</string> <string name="pref_newtab_background_title">Open new tab in background</string> <string name="pref_newtab_background_summary">When opening a new tab it will open in background</string> <string name="pref_extras">Extras</string> <string name="new_tab_snackbar">Article opened in new tab</string> <string name="search_widget_text">Search Kiwix</string> <string name="speech_prompt_text">Speak to search %s</string> <string name="speech_not_supported">Sorry! Your device does not support speech input</string> <string name="download_started_library">Download started...</string> <string name="menu_library">Download ZIM files</string> <string name="local_zims">Local</string> <string name="remote_zims">Remote</string> <string name="zim_manager">ZIM Management</string> <string name="delete_specific_zim">Delete this ZIM?</string> <string name="delete_specific_zim_toast">ZIM file deleted</string> <string name="zim_file_downloading">Downloading</string> <string name="zim_file_downloaded">Downloaded</string> <string name="download_pause">pause</string> <string name="download_play">play</string> <string name="download_stop">stop</string> <string name="download_close">close</string> <string name="no_downloads_here">No Downloads Here</string> <string name="no_files_here">No Files Here</string> <string name="download_complete_snackbar">Download complete</string> <string name="download_over_network">Are you sure you want to start the download without WiFi?</string> <string name="download_no_space">Insufficient space to download this file</string> <string name="space_available">Space Available:</string> <string name="zim_simple">Simple</string> <string name="zim_nopic">No Pictures</string> <string name="open_partial_zim">This file could be incomplete. Do you wish to attempt to open it?</string> <string name="no_network_connection">No network connection</string> </resources>
{ "pile_set_name": "Github" }
{ "name": "packet-ts-webserver", "version": "0.1.0", "devDependencies": { "@types/node": "^8.0.0" }, "dependencies": { "@pulumi/packet": "^1.4.0", "@pulumi/random": "latest" }, "license": "Apache-2.0" }
{ "pile_set_name": "Github" }
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "https://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"> <html xmlns="http://www.w3.org/1999/xhtml"> <head> <meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/> <meta http-equiv="X-UA-Compatible" content="IE=9"/> <meta name="generator" content="Doxygen 1.8.17"/> <meta name="viewport" content="width=device-width, initial-scale=1"/> <title>MNNKit: Class Members</title> <link href="tabs.css" rel="stylesheet" type="text/css"/> <script type="text/javascript" src="jquery.js"></script> <script type="text/javascript" src="dynsections.js"></script> <link href="doxygen.css" rel="stylesheet" type="text/css" /> </head> <body> <div id="top"><!-- do not remove this div, it is closed by doxygen! --> <div id="titlearea"> <table cellspacing="0" cellpadding="0"> <tbody> <tr style="height: 56px;"> <td id="projectalign" style="padding-left: 0.5em;"> <div id="projectname">MNNKit &#160;<span id="projectnumber">1.0</span> </div> <div id="projectbrief">MNN Kit SDK</div> </td> </tr> </tbody> </table> </div> <!-- end header part --> <!-- Generated by Doxygen 1.8.17 --> <script type="text/javascript" src="menudata.js"></script> <script type="text/javascript" src="menu.js"></script> <script type="text/javascript"> /* @license magnet:?xt=urn:btih:cf05388f2679ee054f2beb29a391d25f4e673ac3&amp;dn=gpl-2.0.txt GPL-v2 */ $(function() { initMenu('',false,false,'search.php','Search'); }); /* @license-end */</script> <div id="main-nav"></div> </div><!-- top --> <div class="contents"> <div class="textblock">Here is a list of all class members with links to the classes they belong to:</div> <h3><a id="index_e"></a>- e -</h3><ul> <li>elementSize() : <a class="el" href="class_m_n_n_1_1_tensor.html#a324f5b9156ba3994afbf355255db1b85">MNN::Tensor</a> </li> <li>errorWithMessage: : <a class="el" href="interface_f_m_database.html#ac352d1765a3fe3e8d359ed2bdae9f660">FMDatabase</a> </li> <li>exe : <a class="el" href="struct_m_n_n_1_1_express_1_1_executor_1_1_compute_cache_1_1_unit.html#af6f3ab4b987314774580dfbb3f841a83">MNN::Express::Executor::ComputeCache::Unit</a> </li> <li>executeLocked: : <a class="el" href="interface_f_m_database_pool.html#a7020144a42458ffcdc7fefcf9656d4ff">FMDatabasePool</a> </li> <li>executeQuery: : <a class="el" href="interface_f_m_database.html#a3b2f725058905c07d7d44dbc2b054183">FMDatabase</a> </li> <li>executeQuery:values:error: : <a class="el" href="interface_f_m_database.html#a3af528eff75850748f05b63ccf9df2e5">FMDatabase</a> </li> <li>executeQuery:withArgumentsInArray: : <a class="el" href="interface_f_m_database.html#ad72746b460d468fa2d39218b4df47443">FMDatabase</a> </li> <li>executeQuery:withArgumentsInArray:orDictionary:orVAList: : <a class="el" href="interface_f_m_database.html#a9a43605a3b91c98c8b562205a3bb1370">FMDatabase</a> , <a class="el" href="category_f_m_database_07_private_stuff_08.html#a403b9cc1c7580ce31815a1af811ce221">FMDatabase(PrivateStuff)</a> </li> <li>executeQuery:withParameterDictionary: : <a class="el" href="interface_f_m_database.html#af82f9fc438895a9cf3ba27d717467b25">FMDatabase</a> </li> <li>executeQuery:withVAList: : <a class="el" href="interface_f_m_database.html#ab7ad41e0c8bccf60ec008449d228d47c">FMDatabase</a> </li> <li>executeQueryWithFormat: : <a class="el" href="interface_f_m_database.html#a396b5cd74d6d9f163ae45ea2827ed9ae">FMDatabase</a> </li> <li>executeStatements: : <a class="el" href="interface_f_m_database.html#a96ea0edfe88808dfd742051242343755">FMDatabase</a> </li> <li>executeStatements:withResultBlock: : <a class="el" href="interface_f_m_database.html#a97267ddfd3fb4fd8d9827b585645c048">FMDatabase</a> </li> <li>executeUpdate: : <a class="el" href="interface_f_m_database.html#a7392af0328f480ca77a4a1ae9c56c11e">FMDatabase</a> </li> <li>executeUpdate:error:withArgumentsInArray:orDictionary:orVAList: : <a class="el" href="interface_f_m_database.html#a45d1384b74028998e5c0aba67de03f37">FMDatabase</a> </li> <li>executeUpdate:values:error: : <a class="el" href="interface_f_m_database.html#a4dbedf9473bf0c08f9ab637a042a942d">FMDatabase</a> </li> <li>executeUpdate:withArgumentsInArray: : <a class="el" href="interface_f_m_database.html#ae5db2b0826d372c6c8de7a75c4af6a09">FMDatabase</a> </li> <li>executeUpdate:withErrorAndBindings: : <a class="el" href="interface_f_m_database.html#a237b997531d92bb3beaec550b06fe72e">FMDatabase</a> </li> <li>executeUpdate:withParameterDictionary: : <a class="el" href="interface_f_m_database.html#a482a8ecb5a6b494bae7f5f8d41905903">FMDatabase</a> </li> <li>executeUpdate:withVAList: : <a class="el" href="interface_f_m_database.html#aa1754c3f7f19499186f4f4baba8eb6f1">FMDatabase</a> </li> <li>executeUpdateWithFormat: : <a class="el" href="interface_f_m_database.html#a9df29c3a1b23d2b4d9a6bc220cd6febd">FMDatabase</a> </li> <li>Executor() : <a class="el" href="class_m_n_n_1_1_express_1_1_executor.html#a6d710dafa2e503ec7ca3ba6fdb462012">MNN::Express::Executor</a> </li> <li>Expr() : <a class="el" href="class_m_n_n_1_1_express_1_1_expr.html#a9769301eedae80696700deeee17427a7">MNN::Express::Expr</a> , <a class="el" href="class_m_n_n_1_1_express_1_1_variable.html#a468d0bc0f5027352921401ec143639ae">MNN::Express::Variable</a> </li> <li>expr() : <a class="el" href="class_m_n_n_1_1_express_1_1_variable.html#a24e0e8a226d7720ad0fd3efc084ecc21">MNN::Express::Variable</a> </li> <li>extent : <a class="el" href="structhalide__dimension__t.html#a6c808a6b5accb9fc952917fa671fb9e8">halide_dimension_t</a> </li> <li>extra() : <a class="el" href="class_m_n_n_1_1_express_1_1_expr.html#ae2bb49cf82d6c48092ea301cd48df702">MNN::Express::Expr</a> </li> <li>extractSQL:argumentsList:intoString:arguments: : <a class="el" href="interface_f_m_database.html#a4b7b82b040a7ba1bd85d22bdad972afa">FMDatabase</a> </li> </ul> </div><!-- contents --> <!-- start footer part --> <hr class="footer"/><address class="footer"><small> Generated by &#160;<a href="http://www.doxygen.org/index.html"> <img class="footer" src="doxygen.png" alt="doxygen"/> </a> 1.8.17 </small></address> </body> </html>
{ "pile_set_name": "Github" }
writer/director lawrence kasdan had a hand in penning some of the biggest film successes of the 1980s . yes , that's right , he co-wrote raiders of the lost ark ( 9/10 ) , the empire strikes back ( 8 . 5/10 ) and return of the jedi ( 8/10 ) . now with this film , it looks as though he has decided to test his skills at mediocre screenwriting and bland directing . and guess what ? he succeeds once again ! plot : a man with a shady and regrettable past decides to run away and into a small american town , where he pretends to be a licensed psychologist to an open-armed swarm of people with problems . it isn't long before he befriends the small-town billionaire , folks become suspicious and he falls for one of his own patients . critique : this film is not a comedy ! it is a drama . i say this right up front because from the looks of the trailer , you would think that this film secures its base in humor , but unfortunately for us , it does not . it is a serious film ( seriously flawed if you ask me , but i digress ) which offers a couple of witty quips to keep you awake , but overall , just sits there . now on to my review . this movie sucks because it was slow and boring , starred an uninteresting protagonist with an unbelievable past , included extremely dull patients whose problems left me indifferent at best and pissed at worst , and certified it all with many a predictable ending . i have no idea what lawrence kasdan was trying to accomplish with this picture , but whatever it was . . . he missed ! anyone without his name could never have made this film because it is an extremely generic puff-piece , which on a good day , might be described as predictably digestible . it's no wonder that the studio is trying to sell it as a comedy ! it sucks as a drama , and bored my movie cohort right to sleep . i've given it four points on four extremely shallow yet distinguishable marks . first of all , i couldn't help but think about how much the lead actor , loren dean , looked like a young charles grodin . he even acted like him , save for the dry , sardonic wit . this kept me interested in watching him at least . number two , and this one is really shallow ( seemed to be geared that way as well ) , there were quite a few " titshots " , as they would say back in high school . and god help me if i can't get enough of those in an aimless drama . and three , and this one surprised me the most , actor ted danson's cameo ( yes , he will always be sam malone to us real fans ! ) was the best part about this movie and actually made me see him as a different person . a different asshole , but a different person nonetheless . if ever you rent this movie one day ( promise you won't throw away your hard-earned money at the theaters ? ! ) , wait for his scene because it's actually pretty good . other than that , drab , drab , drab and drab . even jason lee , a personal favorite of mine , was left out to dry with lame dialogue and one of the phoniest romances to hit the big screen in a while . well , at least he finally " came out " and did some real skateboarding in this movie ! oh yeah , and i guess that's the fourth point in my rating . anyway , if you enjoy watching boring patients babble on about their boring problems to an uninteresting psychologist . . . this film's your bag ! otherwise , skip it and see analyze this ( 8/10 ) again . . . now there's a great shrink movie ! little known facts about this film and its stars : " three's company " fans alert ! ! ! terri from the tv show , also known as actress priscilla barnes , plays a small but pivotal part as the landlady in this film . the scene is actually a fantasy sequence and does feature cleavage , so stay tuned , kids ! also , it is to note that the actress was once named " penthouse pet of the month " in march of 1976 . she was known as one joann witty back then . she's originally from jersey . who the hell is loren dean , the dude who plays mumford in this film ? you got me , but all i know is that he was born in las vegas in 1969 and played the character of billy bathgate in the 1991 film of the same name . jason lee was born in the state of california in the year of 1971 . he was a professional skateboarder before his acting career took off after mallrats ( 6/10 ) , and even owns his own skateboarding company called stereo manufacturing corp . he's been in every kevin smith film except for clerks ( 8 . 5/10 ) . listen closely and you will hear the pharmacist in this film ( the ballooning actor formerly known as pruit taylor vince ) make a reference to the " lost ark " , which is obviously an in-joke , considering that it is the writer/director of this film , lawrence kasdan , who co-wrote raiders of the lost ark ( 9/10 ) . kasdan also played the part of dr . green in 1997's as good as it gets ( 8/10 ) . ted danson also played the role of lawyer peter lowenstein in kasdan's 1981 directorial debut body heat starring a sexy kathleen turner and william hurt .
{ "pile_set_name": "Github" }
PREHOOK: query: create table dest_sp (cnt int) PREHOOK: type: CREATETABLE PREHOOK: Output: database:default PREHOOK: Output: default@dest_sp POSTHOOK: query: create table dest_sp (cnt int) POSTHOOK: type: CREATETABLE POSTHOOK: Output: database:default POSTHOOK: Output: default@dest_sp PREHOOK: query: insert overwrite table dest_sp select * from (select count(1) as cnt from src union all select count(1) as cnt from srcpart where ds = '2009-08-09' )x PREHOOK: type: QUERY PREHOOK: Input: default@src PREHOOK: Input: default@srcpart PREHOOK: Output: default@dest_sp POSTHOOK: query: insert overwrite table dest_sp select * from (select count(1) as cnt from src union all select count(1) as cnt from srcpart where ds = '2009-08-09' )x POSTHOOK: type: QUERY POSTHOOK: Input: default@src POSTHOOK: Input: default@srcpart POSTHOOK: Output: default@dest_sp POSTHOOK: Lineage: dest_sp.cnt EXPRESSION [(src)src.null, (srcpart)srcpart.null, ] PREHOOK: query: select * from dest_sp x order by x.cnt limit 2 PREHOOK: type: QUERY PREHOOK: Input: default@dest_sp #### A masked pattern was here #### POSTHOOK: query: select * from dest_sp x order by x.cnt limit 2 POSTHOOK: type: QUERY POSTHOOK: Input: default@dest_sp #### A masked pattern was here #### 0 500
{ "pile_set_name": "Github" }
{ "_from": "[email protected]", "_id": "[email protected]", "_inBundle": false, "_integrity": "sha1-N5TzeMWLNC6n27sjCVEJxLO2IpE=", "_location": "/fs-extra", "_phantomChildren": {}, "_requested": { "type": "version", "registry": true, "raw": "[email protected]", "name": "fs-extra", "escapedName": "fs-extra", "rawSpec": "3.0.1", "saveSpec": null, "fetchSpec": "3.0.1" }, "_requiredBy": [ "/gitbook-cli" ], "_resolved": "https://registry.npm.taobao.org/fs-extra/download/fs-extra-3.0.1.tgz", "_shasum": "3794f378c58b342ea7dbbb23095109c4b3b62291", "_spec": "[email protected]", "_where": "/Users/lijianzhao/github/StarsAndClown/node_modules/gitbook-cli", "author": { "name": "JP Richardson", "email": "[email protected]" }, "bugs": { "url": "https://github.com/jprichardson/node-fs-extra/issues" }, "bundleDependencies": false, "dependencies": { "graceful-fs": "^4.1.2", "jsonfile": "^3.0.0", "universalify": "^0.1.0" }, "deprecated": false, "description": "fs-extra contains methods that aren't included in the vanilla Node.js fs package. Such as mkdir -p, cp -r, and rm -rf.", "devDependencies": { "coveralls": "^2.11.2", "istanbul": "^0.4.5", "klaw": "^1.0.0", "klaw-sync": "^1.1.2", "minimist": "^1.1.1", "mocha": "^3.1.2", "proxyquire": "^1.7.10", "read-dir-files": "^0.1.1", "rimraf": "^2.2.8", "secure-random": "^1.1.1", "standard": "^10.0.2", "standard-markdown": "^2.3.0" }, "homepage": "https://github.com/jprichardson/node-fs-extra", "keywords": [ "fs", "file", "file system", "copy", "directory", "extra", "mkdirp", "mkdir", "mkdirs", "recursive", "json", "read", "write", "extra", "delete", "remove", "touch", "create", "text", "output", "move" ], "license": "MIT", "main": "./lib/index", "name": "fs-extra", "repository": { "type": "git", "url": "git+https://github.com/jprichardson/node-fs-extra.git" }, "scripts": { "coverage": "istanbul cover -i 'lib/**' -x '**/__tests__/**' test.js", "coveralls": "npm run coverage && coveralls < coverage/lcov.info", "lint": "standard && standard-markdown", "test": "npm run lint && npm run unit", "test-find": "find ./lib/**/__tests__ -name *.test.js | xargs mocha", "unit": "node test.js" }, "version": "3.0.1" }
{ "pile_set_name": "Github" }
# created by tools/tclZIC.tcl - do not edit set TZData(:Etc/GMT+11) { {-9223372036854775808 -39600 0 -11} }
{ "pile_set_name": "Github" }
// // © Copyright Henrik Ravn 2004 // // Use, modification and distribution are subject to the Boost Software License, Version 1.0. // (See accompanying file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) // using System; using System.Diagnostics; using System.Runtime.InteropServices; namespace DotZLib { /// <summary> /// Implements a data compressor, using the deflate algorithm in the ZLib dll /// </summary> public sealed class Deflater : CodecBase { #region Dll imports [DllImport("ZLIB1.dll", CallingConvention=CallingConvention.Cdecl, CharSet=CharSet.Ansi)] private static extern int deflateInit_(ref ZStream sz, int level, string vs, int size); [DllImport("ZLIB1.dll", CallingConvention=CallingConvention.Cdecl)] private static extern int deflate(ref ZStream sz, int flush); [DllImport("ZLIB1.dll", CallingConvention=CallingConvention.Cdecl)] private static extern int deflateReset(ref ZStream sz); [DllImport("ZLIB1.dll", CallingConvention=CallingConvention.Cdecl)] private static extern int deflateEnd(ref ZStream sz); #endregion /// <summary> /// Constructs an new instance of the <c>Deflater</c> /// </summary> /// <param name="level">The compression level to use for this <c>Deflater</c></param> public Deflater(CompressLevel level) : base() { int retval = deflateInit_(ref _ztream, (int)level, Info.Version, Marshal.SizeOf(_ztream)); if (retval != 0) throw new ZLibException(retval, "Could not initialize deflater"); resetOutput(); } /// <summary> /// Adds more data to the codec to be processed. /// </summary> /// <param name="data">Byte array containing the data to be added to the codec</param> /// <param name="offset">The index of the first byte to add from <c>data</c></param> /// <param name="count">The number of bytes to add</param> /// <remarks>Adding data may, or may not, raise the <c>DataAvailable</c> event</remarks> public override void Add(byte[] data, int offset, int count) { if (data == null) throw new ArgumentNullException(); if (offset < 0 || count < 0) throw new ArgumentOutOfRangeException(); if ((offset+count) > data.Length) throw new ArgumentException(); int total = count; int inputIndex = offset; int err = 0; while (err >= 0 && inputIndex < total) { copyInput(data, inputIndex, Math.Min(total - inputIndex, kBufferSize)); while (err >= 0 && _ztream.avail_in > 0) { err = deflate(ref _ztream, (int)FlushTypes.None); if (err == 0) while (_ztream.avail_out == 0) { OnDataAvailable(); err = deflate(ref _ztream, (int)FlushTypes.None); } inputIndex += (int)_ztream.total_in; } } setChecksum( _ztream.adler ); } /// <summary> /// Finishes up any pending data that needs to be processed and handled. /// </summary> public override void Finish() { int err; do { err = deflate(ref _ztream, (int)FlushTypes.Finish); OnDataAvailable(); } while (err == 0); setChecksum( _ztream.adler ); deflateReset(ref _ztream); resetOutput(); } /// <summary> /// Closes the internal zlib deflate stream /// </summary> protected override void CleanUp() { deflateEnd(ref _ztream); } } }
{ "pile_set_name": "Github" }
// Copyright 2016 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. //go:generate go run gen.go gen_common.go // Package number contains tools and data for formatting numbers. package number import ( "unicode/utf8" "golang.org/x/text/internal" "golang.org/x/text/language" ) // Info holds number formatting configuration data. type Info struct { system systemData // numbering system information symIndex byte // index to symbols } // InfoFromLangID returns a Info for the given compact language identifier and // numbering system identifier. If system is the empty string, the default // numbering system will be taken for that language. func InfoFromLangID(compactIndex int, numberSystem string) Info { p := langToDefaults[compactIndex] // Lookup the entry for the language. pSymIndex := byte(0) // Default: Latin, default symbols system, ok := systemMap[numberSystem] if !ok { // Take the value for the default numbering system. This is by far the // most common case as an alternative numbering system is hardly used. if p&0x80 == 0 { pSymIndex = p } else { // Take the first entry from the alternatives list. data := langToAlt[p&^0x80] pSymIndex = data.symIndex system = data.system } } else { langIndex := compactIndex ns := system outerLoop: for { if p&0x80 == 0 { if ns == 0 { // The index directly points to the symbol data. pSymIndex = p break } // Move to the parent and retry. langIndex = int(internal.Parent[langIndex]) } // The index points to a list of symbol data indexes. for _, e := range langToAlt[p&^0x80:] { if int(e.compactTag) != langIndex { if langIndex == 0 { // The CLDR root defines full symbol information for all // numbering systems (even though mostly by means of // aliases). This means that we will never fall back to // the default of the language. Also, the loop is // guaranteed to terminate as a consequence. ns = numLatn // Fall back to Latin and start from the original // language. See // http://unicode.org/reports/tr35/#Locale_Inheritance. langIndex = compactIndex } else { // Fall back to parent. langIndex = int(internal.Parent[langIndex]) } break } if e.system == ns { pSymIndex = e.symIndex break outerLoop } } } } if int(system) >= len(numSysData) { // algorithmic // Will generate ASCII digits in case the user inadvertently calls // WriteDigit or Digit on it. d := numSysData[0] d.id = system return Info{ system: d, symIndex: pSymIndex, } } return Info{ system: numSysData[system], symIndex: pSymIndex, } } // InfoFromTag returns a Info for the given language tag. func InfoFromTag(t language.Tag) Info { for { if index, ok := language.CompactIndex(t); ok { return InfoFromLangID(index, t.TypeForKey("nu")) } t = t.Parent() } } // IsDecimal reports if the numbering system can convert decimal to native // symbols one-to-one. func (n Info) IsDecimal() bool { return int(n.system.id) < len(numSysData) } // WriteDigit writes the UTF-8 sequence for n corresponding to the given ASCII // digit to dst and reports the number of bytes written. dst must be large // enough to hold the rune (can be up to utf8.UTFMax bytes). func (n Info) WriteDigit(dst []byte, asciiDigit rune) int { copy(dst, n.system.zero[:n.system.digitSize]) dst[n.system.digitSize-1] += byte(asciiDigit - '0') return int(n.system.digitSize) } // AppendDigit appends the UTF-8 sequence for n corresponding to the given digit // to dst and reports the number of bytes written. dst must be large enough to // hold the rune (can be up to utf8.UTFMax bytes). func (n Info) AppendDigit(dst []byte, digit byte) []byte { dst = append(dst, n.system.zero[:n.system.digitSize]...) dst[len(dst)-1] += digit return dst } // Digit returns the digit for the numbering system for the corresponding ASCII // value. For example, ni.Digit('3') could return '三'. Note that the argument // is the rune constant '3', which equals 51, not the integer constant 3. func (n Info) Digit(asciiDigit rune) rune { var x [utf8.UTFMax]byte n.WriteDigit(x[:], asciiDigit) r, _ := utf8.DecodeRune(x[:]) return r } // Symbol returns the string for the given symbol type. func (n Info) Symbol(t SymbolType) string { return symData.Elem(int(symIndex[n.symIndex][t])) } func formatForLang(t language.Tag, index []byte) *Pattern { for ; ; t = t.Parent() { if x, ok := language.CompactIndex(t); ok { return &formats[index[x]] } } }
{ "pile_set_name": "Github" }
module Vale.Stdcalls.X64.Sha open FStar.Mul val z3rlimit_hack (x:nat) : squash (x < x + x + 1) #reset-options "--z3rlimit 50" open FStar.HyperStack.ST module HS = FStar.HyperStack module B = LowStar.Buffer module IB = LowStar.ImmutableBuffer module DV = LowStar.BufferView.Down open Vale.Def.Types_s open Vale.Interop.Base module IX64 = Vale.Interop.X64 module VSig = Vale.AsLowStar.ValeSig module LSig = Vale.AsLowStar.LowStarSig module ME = Vale.X64.Memory module V = Vale.X64.Decls module IA = Vale.Interop.Assumptions module W = Vale.AsLowStar.Wrapper open Vale.X64.MemoryAdapters module VS = Vale.X64.State module MS = Vale.X64.Machine_s module SH = Vale.SHA.X64 let uint64 = UInt64.t (* A little utility to trigger normalization in types *) noextract let as_t (#a:Type) (x:normal a) : a = x noextract let as_normal_t (#a:Type) (x:a) : normal a = x [@__reduce__] noextract let b128 = buf_t TUInt32 TUInt128 [@__reduce__] noextract let b8_128 = buf_t TUInt8 TUInt128 [@__reduce__] noextract let ib128 = ibuf_t TUInt32 TUInt128 [@__reduce__] noextract let t128_mod = TD_Buffer TUInt32 TUInt128 default_bq [@__reduce__] noextract let t128_no_mod = TD_Buffer TUInt8 TUInt128 ({modified=false; strict_disjointness=false; taint=MS.Secret}) [@__reduce__] noextract let t128_imm = TD_ImmBuffer TUInt32 TUInt128 default_bq [@__reduce__] noextract let tuint64 = TD_Base TUInt64 [@__reduce__] noextract let dom: IX64.arity_ok_stdcall td = let y = [t128_mod; t128_no_mod; tuint64; t128_imm] in assert_norm (List.length y = 4); y (* Need to rearrange the order of arguments *) [@__reduce__] noextract let sha_pre : VSig.vale_pre dom = fun (c:V.va_code) (ctx_b:b128) (in_b:b8_128) (num_val:uint64) (k_b:ib128) (va_s0:V.va_state) -> SH.va_req_Sha_update_bytes_stdcall c va_s0 IA.win (as_vale_buffer ctx_b) (as_vale_buffer in_b) (UInt64.v num_val) (as_vale_immbuffer k_b) (* Need to rearrange the order of arguments *) [@__reduce__] noextract let sha_post : VSig.vale_post dom = fun (c:V.va_code) (ctx_b:b128) (in_b:b8_128) (num_val:uint64) (k_b:ib128) (va_s0:V.va_state) (va_s1:V.va_state) (f:V.va_fuel) -> SH.va_ens_Sha_update_bytes_stdcall c va_s0 IA.win (as_vale_buffer ctx_b) (as_vale_buffer in_b) (UInt64.v num_val) (as_vale_immbuffer k_b) va_s1 f module VS = Vale.X64.State #set-options "--z3rlimit 50 --max_fuel 0 --max_ifuel 0" [@__reduce__] noextract let sha_lemma' (code:V.va_code) (_win:bool) (ctx_b:b128) (in_b:b8_128) (num_val:uint64) (k_b:ib128) (va_s0:V.va_state) : Ghost (V.va_state & V.va_fuel) (requires sha_pre code ctx_b in_b num_val k_b va_s0) (ensures (fun (va_s1, f) -> V.eval_code code va_s0 f va_s1 /\ VSig.vale_calling_conventions_stdcall va_s0 va_s1 /\ sha_post code ctx_b in_b num_val k_b va_s0 va_s1 f /\ ME.buffer_writeable (as_vale_buffer ctx_b) /\ ME.buffer_writeable (as_vale_buffer in_b) )) = let va_s1, f = SH.va_lemma_Sha_update_bytes_stdcall code va_s0 IA.win (as_vale_buffer ctx_b) (as_vale_buffer in_b) (UInt64.v num_val) (as_vale_immbuffer k_b) in Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt32 ME.TUInt128 ctx_b; Vale.AsLowStar.MemoryHelpers.buffer_writeable_reveal ME.TUInt8 ME.TUInt128 in_b; (va_s1, f) (* Prove that sha_lemma' has the required type *) noextract let sha_lemma = as_t #(VSig.vale_sig_stdcall sha_pre sha_post) sha_lemma' noextract let code_sha = SH.va_code_Sha_update_bytes_stdcall IA.win #reset-options "--z3rlimit 20" (* Here's the type expected for the sha wrapper *) [@__reduce__] noextract let lowstar_sha_t = IX64.as_lowstar_sig_t_weak_stdcall code_sha dom [] _ _ (W.mk_prediction code_sha dom [] (sha_lemma code_sha IA.win)) [@ (CCConv "stdcall") ] val sha256_update : normal lowstar_sha_t
{ "pile_set_name": "Github" }
<?php /** * PHPExcel * * Copyright (C) 2006 - 2014 PHPExcel * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA * * @category PHPExcel * @package PHPExcel * @copyright Copyright (c) 2006 - 2014 PHPExcel (http://www.codeplex.com/PHPExcel) * @license http://www.gnu.org/licenses/old-licenses/lgpl-2.1.txt LGPL * @version ##VERSION##, ##DATE## */ /** Error reporting */ error_reporting(E_ALL); ini_set('display_errors', TRUE); ini_set('display_startup_errors', TRUE); date_default_timezone_set('Europe/London'); define('EOL',(PHP_SAPI == 'cli') ? PHP_EOL : '<br />'); date_default_timezone_set('Europe/London'); /** Include PHPExcel */ require_once dirname(__FILE__) . '/../Classes/PHPExcel.php'; // Create new PHPExcel object echo date('H:i:s') , " Create new PHPExcel object" , EOL; $objPHPExcel = new PHPExcel(); // Set document properties echo date('H:i:s') , " Set document properties" , EOL; $objPHPExcel->getProperties()->setCreator("Maarten Balliauw") ->setLastModifiedBy("Maarten Balliauw") ->setTitle("Office 2007 XLSX Test Document") ->setSubject("Office 2007 XLSX Test Document") ->setDescription("Test document for Office 2007 XLSX, generated using PHP classes.") ->setKeywords("office 2007 openxml php") ->setCategory("Test result file"); // Add some data echo date('H:i:s') , " Add some data" , EOL; $objPHPExcel->setActiveSheetIndex(0); $sharedStyle1 = new PHPExcel_Style(); $sharedStyle2 = new PHPExcel_Style(); $sharedStyle1->applyFromArray( array('fill' => array( 'type' => PHPExcel_Style_Fill::FILL_SOLID, 'color' => array('argb' => 'FFCCFFCC') ), 'borders' => array( 'bottom' => array('style' => PHPExcel_Style_Border::BORDER_THIN), 'right' => array('style' => PHPExcel_Style_Border::BORDER_MEDIUM) ) )); $sharedStyle2->applyFromArray( array('fill' => array( 'type' => PHPExcel_Style_Fill::FILL_SOLID, 'color' => array('argb' => 'FFFFFF00') ), 'borders' => array( 'bottom' => array('style' => PHPExcel_Style_Border::BORDER_THIN), 'right' => array('style' => PHPExcel_Style_Border::BORDER_MEDIUM) ) )); $objPHPExcel->getActiveSheet()->setSharedStyle($sharedStyle1, "A1:T100"); $objPHPExcel->getActiveSheet()->setSharedStyle($sharedStyle2, "C5:R95"); // Save Excel 2007 file echo date('H:i:s') , " Write to Excel2007 format" , EOL; $callStartTime = microtime(true); $objWriter = PHPExcel_IOFactory::createWriter($objPHPExcel, 'Excel2007'); $objWriter->save(str_replace('.php', '.xlsx', __FILE__)); $callEndTime = microtime(true); $callTime = $callEndTime - $callStartTime; echo date('H:i:s') , " File written to " , str_replace('.php', '.xlsx', pathinfo(__FILE__, PATHINFO_BASENAME)) , EOL; echo 'Call time to write Workbook was ' , sprintf('%.4f',$callTime) , " seconds" , EOL; // Echo memory usage echo date('H:i:s') , ' Current memory usage: ' , (memory_get_usage(true) / 1024 / 1024) , " MB" , EOL; // Save Excel 95 file echo date('H:i:s') , " Write to Excel5 format" , EOL; $callStartTime = microtime(true); $objWriter = PHPExcel_IOFactory::createWriter($objPHPExcel, 'Excel5'); $objWriter->save(str_replace('.php', '.xls', __FILE__)); $callEndTime = microtime(true); $callTime = $callEndTime - $callStartTime; echo date('H:i:s') , " File written to " , str_replace('.php', '.xls', pathinfo(__FILE__, PATHINFO_BASENAME)) , EOL; echo 'Call time to write Workbook was ' , sprintf('%.4f',$callTime) , " seconds" , EOL; // Echo memory usage echo date('H:i:s') , ' Current memory usage: ' , (memory_get_usage(true) / 1024 / 1024) , " MB" , EOL; // Echo memory peak usage echo date('H:i:s') , " Peak memory usage: " , (memory_get_peak_usage(true) / 1024 / 1024) , " MB" , EOL; // Echo done echo date('H:i:s') , " Done writing file" , EOL; echo 'File has been created in ' , getcwd() , EOL;
{ "pile_set_name": "Github" }
var createWrap = require('./_createWrap'); /** Used to compose bitmasks for function metadata. */ var WRAP_ARY_FLAG = 128; /** * Creates a function that invokes `func`, with up to `n` arguments, * ignoring any additional arguments. * * @static * @memberOf _ * @since 3.0.0 * @category Function * @param {Function} func The function to cap arguments for. * @param {number} [n=func.length] The arity cap. * @param- {Object} [guard] Enables use as an iteratee for methods like `_.map`. * @returns {Function} Returns the new capped function. * @example * * _.map(['6', '8', '10'], _.ary(parseInt, 1)); * // => [6, 8, 10] */ function ary(func, n, guard) { n = guard ? undefined : n; n = (func && n == null) ? func.length : n; return createWrap(func, WRAP_ARY_FLAG, undefined, undefined, undefined, undefined, n); } module.exports = ary;
{ "pile_set_name": "Github" }
/* Simple DirectMedia Layer Copyright (C) 1997-2013 Sam Lantinga <[email protected]> This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ /** * \file SDL_test_harness.h * * Include file for SDL test framework. * * This code is a part of the SDL2_test library, not the main SDL library. */ /* Defines types for test case definitions and the test execution harness API. Based on original GSOC code by Markus Kauppila <[email protected]> */ #ifndef _SDL_test_harness_h #define _SDL_test_harness_h #include "begin_code.h" /* Set up for C function definitions, even when using C++ */ #ifdef __cplusplus extern "C" { #endif /* ! Definitions for test case structures */ #define TEST_ENABLED 1 #define TEST_DISABLED 0 /* ! Definition of all the possible test return values of the test case method */ #define TEST_ABORTED -1 #define TEST_STARTED 0 #define TEST_COMPLETED 1 #define TEST_SKIPPED 2 /* ! Definition of all the possible test results for the harness */ #define TEST_RESULT_PASSED 0 #define TEST_RESULT_FAILED 1 #define TEST_RESULT_NO_ASSERT 2 #define TEST_RESULT_SKIPPED 3 #define TEST_RESULT_SETUP_FAILURE 4 /* !< Function pointer to a test case setup function (run before every test) */ typedef void (*SDLTest_TestCaseSetUpFp)(void *arg); /* !< Function pointer to a test case function */ typedef int (*SDLTest_TestCaseFp)(void *arg); /* !< Function pointer to a test case teardown function (run after every test) */ typedef void (*SDLTest_TestCaseTearDownFp)(void *arg); /** * Holds information about a single test case. */ typedef struct SDLTest_TestCaseReference { /* !< Func2Stress */ SDLTest_TestCaseFp testCase; /* !< Short name (or function name) "Func2Stress" */ char *name; /* !< Long name or full description "This test pushes func2() to the limit." */ char *description; /* !< Set to TEST_ENABLED or TEST_DISABLED (test won't be run) */ int enabled; } SDLTest_TestCaseReference; /** * Holds information about a test suite (multiple test cases). */ typedef struct SDLTest_TestSuiteReference { /* !< "PlatformSuite" */ char *name; /* !< The function that is run before each test. NULL skips. */ SDLTest_TestCaseSetUpFp testSetUp; /* !< The test cases that are run as part of the suite. Last item should be NULL. */ const SDLTest_TestCaseReference **testCases; /* !< The function that is run after each test. NULL skips. */ SDLTest_TestCaseTearDownFp testTearDown; } SDLTest_TestSuiteReference; /** * \brief Execute a test suite using the given run seed and execution key. * * \param testSuites Suites containing the test case. * \param userRunSeed Custom run seed provided by user, or NULL to autogenerate one. * \param userExecKey Custom execution key provided by user, or 0 to autogenerate one. * \param filter Filter specification. NULL disables. Case sensitive. * \param testIterations Number of iterations to run each test case. * * \returns Test run result; 0 when all tests passed, 1 if any tests failed. */ int SDLTest_RunSuites(SDLTest_TestSuiteReference *testSuites[], const char *userRunSeed, Uint64 userExecKey, const char *filter, int testIterations); /* Ends C function definitions when using C++ */ #ifdef __cplusplus } #endif #include "close_code.h" #endif /* _SDL_test_harness_h */ /* vi: set ts=4 sw=4 expandtab: */
{ "pile_set_name": "Github" }
http://bigocheatsheet.com/ https://algs4.cs.princeton.edu/cheatsheet/ https://wiki.python.org/moin/TimeComplexity
{ "pile_set_name": "Github" }
//#pragma comment(exestr, "$Header: /usr4/winnt/SOURCES/ddk35/src/hal/halsni/mips/RCS/jxreturn.c,v 1.3 1995/02/13 12:49:56 flo Exp $") /*++ Copyright (c) 1993-94 Siemens Nixdorf Informationssysteme AG Copyright (c) 1991 Microsoft Corporation Module Name: jxreturn.c Abstract: This module implements the HAL return to firmware function. --*/ #include "halp.h" #include "eisa.h" #include "SNIregs.h" #include "mpagent.h" VOID HalpBootCpuRestart( VOID ); VOID HalReturnToFirmware( IN FIRMWARE_REENTRY Routine ) /*++ Routine Description: This function returns control to the specified firmware routine. In most cases it generates a soft reset. Arguments: Routine - Supplies a value indicating which firmware routine to invoke. Return Value: Does not return. --*/ { KIRQL OldIrql; UCHAR DataByte; PUCHAR MachineControlRegister = (HalpIsRM200) ? (PUCHAR) RM200_MCR_ADDR : (PUCHAR) RM400_MCR_ADDR; // // Disable Interrupts. // KeRaiseIrql(HIGH_LEVEL, &OldIrql); // // Case on the type of return. // switch (Routine) { case HalHaltRoutine: // // Hang looping. // for (;;) {} case HalPowerDownRoutine: // // PowerOff is done by the SNI machines by writing the Power_Off bit to the machine control register ... // WRITE_REGISTER_UCHAR(MachineControlRegister, MCR_POWER_OFF); for (;;) {} // hang looping case HalRestartRoutine: case HalRebootRoutine: case HalInteractiveModeRoutine: if (HalpIsMulti) { ULONG Mask; Mask = HalpActiveProcessors & ~(PCR->SetMember); #if DBG DbgPrint("Send message RESTART to maskcpu = %x \n",Mask); #endif HalpSendIpi(Mask,MPA_RESTART_MESSAGE); // // if this is not the Boot CPU, we call a special Firmware entry to stop it // if (PCR->Number ) { // // remove this processor from the list of active processors // HalpActiveProcessors &= (~(PCR->SetMember)); HalSweepDcache(); // this should run only local ... #if DBG DbgPrint(" Reinit slave %x \n", ((SNI_PRIVATE_VECTOR *)(SYSTEM_BLOCK->VendorVector))->reinit_slave); #endif ((SNI_PRIVATE_VECTOR *)(SYSTEM_BLOCK->VendorVector))->reinit_slave(); } else HalpBootCpuRestart(); } DataByte = READ_REGISTER_UCHAR( &((PEISA_CONTROL) HalpOnboardControlBase)->ExtendedNmiResetControl); ((PNMI_EXTENDED_CONTROL) &DataByte)->BusReset = 1; WRITE_REGISTER_UCHAR( &((PEISA_CONTROL) HalpOnboardControlBase)->ExtendedNmiResetControl, DataByte ); KeStallExecutionProcessor(10000); ((PNMI_EXTENDED_CONTROL) &DataByte)->BusReset = 0; WRITE_REGISTER_UCHAR( &((PEISA_CONTROL) HalpOnboardControlBase)->ExtendedNmiResetControl, DataByte ); if (HalpIsRM200) { // // Reset the SNI RM200 machines by writing the reset bit to the machine control register ... // ArcReboot does not work correctly on the RM200 (Reset of the Isa Bus) // WRITE_REGISTER_UCHAR(MachineControlRegister, (MCR_INRESET | MCR_PODD)); } else { ArcReboot(); } for (;;) ; default: DbgPrint("HalReturnToFirmware invalid argument\n"); KeLowerIrql(OldIrql); DbgBreakPoint(); } } VOID HalpBootCpuRestart( VOID ) /*++ Routine Description: This function returns control to the firmware Arcreboot routine. it waits until all other cpu's have beet shut down. this code is executed only on the boot cpu Arguments: None Return Value: Does not return. --*/ { UCHAR DataByte; ULONG cpt; cpt = 0; while(HalpActiveProcessors != PCR->SetMember) { KeStallExecutionProcessor(500000); ++cpt;if (cpt == 20) break; } // // if there are still ssome processors active, we do a reset of the entire machine // if (HalpActiveProcessors != PCR->SetMember) { #if DBG DbgPrint(" Some processors did not answer (%x). Reset machine started. \n", HalpActiveProcessors); #endif WRITE_REGISTER_UCHAR((PUCHAR) RM400_MCR_ADDR, (MCR_INRESET | MCR_PODD)); } else { #if DBG DbgPrint("Reboot started \n"); #endif } DataByte = READ_REGISTER_UCHAR( &((PEISA_CONTROL) HalpOnboardControlBase)->ExtendedNmiResetControl); ((PNMI_EXTENDED_CONTROL) &DataByte)->BusReset = 1; WRITE_REGISTER_UCHAR( &((PEISA_CONTROL) HalpOnboardControlBase)->ExtendedNmiResetControl, DataByte ); KeStallExecutionProcessor(10000); ((PNMI_EXTENDED_CONTROL) &DataByte)->BusReset = 0; WRITE_REGISTER_UCHAR( &((PEISA_CONTROL) HalpOnboardControlBase)->ExtendedNmiResetControl, DataByte ); ArcReboot(); for (;;) ; }
{ "pile_set_name": "Github" }
module.exports = { port: 3030, trickle: true };
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <vector xmlns:android="http://schemas.android.com/apk/res/android" android:width="1068dp" android:height="1024dp" android:viewportWidth="1068" android:viewportHeight="1024"> <path android:fillColor="#ffffff" android:pathData="M221.461458 994.82099l64.376265-348.509318L28.8257 402.170511l351.473528-46.399376L532.975033 35.929825l152.835493 319.84131 351.31384 46.399376L780.192187 646.311673l64.448125 348.509318-311.667275-168.979961L221.461458 994.82099 221.461458 994.82099zM532.975033 782.559563l259.77862 140.822955-53.709099-290.460694 214.119797-203.462612-292.788148-38.658495L532.975033 124.233357l-127.239485 266.567361-292.869988 38.664483 214.201637 203.456624-53.709099 290.460694L532.975033 782.559563 532.975033 782.559563zM532.975033 782.559563" /> </vector>
{ "pile_set_name": "Github" }
/* * Globalize Culture uk * * http://github.com/jquery/globalize * * Copyright Software Freedom Conservancy, Inc. * Dual licensed under the MIT or GPL Version 2 licenses. * http://jquery.org/license * * This file was generated by the Globalize Culture Generator * Translation: bugs found in this file need to be fixed in the generator */ (function( window, undefined ) { var Globalize; if ( typeof require !== "undefined" && typeof exports !== "undefined" && typeof module !== "undefined" ) { // Assume CommonJS Globalize = require( "globalize" ); } else { // Global variable Globalize = window.Globalize; } Globalize.addCultureInfo( "uk", "default", { name: "uk", englishName: "Ukrainian", nativeName: "українська", language: "uk", numberFormat: { ",": " ", ".": ",", percent: { pattern: ["-n%","n%"], ",": " ", ".": "," }, currency: { pattern: ["-n$","n$"], ",": " ", ".": ",", symbol: "₴" } }, calendars: { standard: { "/": ".", firstDay: 1, days: { names: ["неділя","понеділок","вівторок","середа","четвер","п'ятниця","субота"], namesAbbr: ["Нд","Пн","Вт","Ср","Чт","Пт","Сб"], namesShort: ["Нд","Пн","Вт","Ср","Чт","Пт","Сб"] }, months: { names: ["Січень","Лютий","Березень","Квітень","Травень","Червень","Липень","Серпень","Вересень","Жовтень","Листопад","Грудень",""], namesAbbr: ["Січ","Лют","Бер","Кві","Тра","Чер","Лип","Сер","Вер","Жов","Лис","Гру",""] }, monthsGenitive: { names: ["січня","лютого","березня","квітня","травня","червня","липня","серпня","вересня","жовтня","листопада","грудня",""], namesAbbr: ["січ","лют","бер","кві","тра","чер","лип","сер","вер","жов","лис","гру",""] }, AM: null, PM: null, patterns: { d: "dd.MM.yyyy", D: "d MMMM yyyy' р.'", t: "H:mm", T: "H:mm:ss", f: "d MMMM yyyy' р.' H:mm", F: "d MMMM yyyy' р.' H:mm:ss", M: "d MMMM", Y: "MMMM yyyy' р.'" } } } }); }( this ));
{ "pile_set_name": "Github" }
name: softirq_entry ID: 33 format: field:unsigned short common_type; offset:0; size:2; signed:0; field:unsigned char common_flags; offset:2; size:1; signed:0; field:unsigned char common_preempt_count; offset:3; size:1; signed:0; field:int common_pid; offset:4; size:4; signed:1; field:unsigned int vec; offset:8; size:4; signed:0; print fmt: "vec=%u [action=%s]", REC->vec, __print_symbolic(REC->vec, { 0, "HI" }, { 1, "TIMER" }, { 2, "NET_TX" }, { 3, "NET_RX" }, { 4, "BLOCK" }, { 5, "BLOCK_IOPOLL" }, { 6, "TASKLET" }, { 7, "SCHED" }, { 8, "HRTIMER" }, { 9, "RCU" })
{ "pile_set_name": "Github" }
{ "CVE_data_meta": { "ASSIGNER": "[email protected]", "ID": "CVE-2018-17945", "STATE": "RESERVED" }, "data_format": "MITRE", "data_type": "CVE", "data_version": "4.0", "description": { "description_data": [ { "lang": "eng", "value": "** RESERVED ** This candidate has been reserved by an organization or individual that will use it when announcing a new security problem. When the candidate has been publicized, the details for this candidate will be provided." } ] } }
{ "pile_set_name": "Github" }
server { listen 1.1.1.1:80; location ~ /(nginx_status|status) { stub_status on; access_log off; allow 127.0.0.1; deny all; } } server { listen 1.1.1.1:81; location ~ ^/nginx_status$ { stub_status on; access_log off; allow 127.0.0.1; deny all; } } server { listen 1.1.1.1:443 ssl; location ~ ^/ssl_stat$ { stub_status on; access_log off; allow 127.0.0.1; deny all; } } server { listen 1.1.1.1:82; location ~ ^/(nginx_status|status)_weird_(some|thing)$ { stub_status on; access_log off; allow 127.0.0.1; deny all; } } server { listen 1.1.1.1:83; location ~ ^/(d$ { stub_status on; access_log off; allow 127.0.0.1; deny all; } } server { listen 1.1.1.1:84; location ~ valid_location { stub_status on; access_log off; allow 127.0.0.1; deny all; } } server { listen 1.1.1.1:85; location invalid_location { stub_status on; access_log off; allow 127.0.0.1; deny all; } }
{ "pile_set_name": "Github" }
package events_stream import ( "fmt" "github.com/bettercap/bettercap/network" "github.com/bettercap/bettercap/session" "github.com/evilsocket/islazy/tui" ) func (mod *EventsStream) viewHIDEvent(e session.Event) { dev := e.Data.(*network.HIDDevice) if e.Tag == "hid.device.new" { fmt.Fprintf(mod.output, "[%s] [%s] new HID device %s detected on channel %s.\n", e.Time.Format(mod.timeFormat), tui.Green(e.Tag), tui.Bold(dev.Address), dev.Channels()) } else if e.Tag == "hid.device.lost" { fmt.Fprintf(mod.output, "[%s] [%s] HID device %s lost.\n", e.Time.Format(mod.timeFormat), tui.Green(e.Tag), tui.Red(dev.Address)) } }
{ "pile_set_name": "Github" }
from .. import Provider as DateTimeProvider class Provider(DateTimeProvider): def day_of_week(self): day = self.date('%w') DAY_NAMES = { "0": "Nedjelja", "1": "Ponedjeljak", "2": "Utorak", "3": "Srijeda", "4": "Četvrtak", "5": "Petak", "6": "Subota", } return DAY_NAMES[day] def month_name(self): month = self.month() MONTH_NAMES = { "01": "Siječanj", "02": "Veljača", "03": "Ožujak", "04": "Travanj", "05": "Svibanj", "06": "Lipanj", "07": "Srpanj", "08": "Kolovoz", "09": "Rujan", "10": "Listopad", "11": "Studeni", "12": "Prosinac", } return MONTH_NAMES[month]
{ "pile_set_name": "Github" }
--- date: 2016-08-02 title: Sales effectiveness categories: - sales - tips author_staff_member: gerald --- Sales effectiveness refers to the ability of a company’s sales professionals to “win” at each stage of the customer’s buying process, and ultimately earn the business on the right terms and in the right timeframe. ![American River](https://source.unsplash.com/random/1500x1146) ## How do you improve sales effectiveness? Improving sales effectiveness is not just a sales function issue; it’s a company issue, as it requires deep collaboration between sales and marketing to understand what's working and not working, and continuous improvement of the knowledge, messages, skills, and strategies that sales people apply as they work sales opportunities. Sales effectiveness has historically been used to describe a category of technologies and consulting services aimed at helping companies improve their sales results. Many companies are creating sales effectiveness functions and have even given people titles such as VP of Sales Effectiveness. ![Coins](https://source.unsplash.com/random/1500x1147) "By analyzing sales force performance, managers can make changes to optimize sales going forward. Toward that end, there are many ways to gauge the performance of individual salespeople and of the sales force as a whole, in addition to total annual sales." In a survey of nearly 200 senior marketing managers, 54 percent responded that they found the "sales force effectiveness" metric very useful. [source](https://en.wikipedia.org/wiki/Sales_effectiveness): Wikipedia
{ "pile_set_name": "Github" }
// Tencent is pleased to support the open source community by making TNN available. // // Copyright (C) 2020 THL A29 Limited, a Tencent company. All rights reserved. // // Licensed under the BSD 3-Clause License (the "License"); you may not use this file except // in compliance with the License. You may obtain a copy of the License at // // https://opensource.org/licenses/BSD-3-Clause // // Unless required by applicable law or agreed to in writing, software distributed // under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR // CONDITIONS OF ANY KIND, either express or implied. See the License for the // specific language governing permissions and limitations under the License. #ifndef TNN_SOURCE_TNN_DEVICE_METAL_METAL_PRELU_LAYER_ACC_H_ #define TNN_SOURCE_TNN_DEVICE_METAL_METAL_PRELU_LAYER_ACC_H_ #include "tnn/device/metal/acc/metal_layer_acc.h" namespace TNN_NS { // @brief prelu layer metal acc class MetalPReluLayerAcc : public MetalLayerAcc { public: // @brief virtual destrcutor virtual ~MetalPReluLayerAcc(); Status AllocateBufferParam(const std::vector<Blob *> &inputs, const std::vector<Blob *> &outputs); Status Forward(const std::vector<Blob *> &inputs, const std::vector<Blob *> &outputs); protected: std::shared_ptr<MetalLayerAcc> activate_impl_ = nullptr; id<MTLBuffer> buffer_slope_ = nil; }; } // namespace TNN_NS #endif // TNN_SOURCE_TNN_DEVICE_METAL_METAL_PRELU_LAYER_ACC_H_
{ "pile_set_name": "Github" }
OpenSSL Cookbook ================ [![Build Status](https://travis-ci.org/opscode-cookbooks/openssl.svg?branch=master)](https://travis-ci.org/opscode-cookbooks/openssl) This cookbook provides tools for working with the Ruby OpenSSL library. It includes: - A library method to generate secure random passwords in recipes, using the Ruby SecureRandom library. - An LWRP for generating RSA private keys. - An LWRP for generating x509 certificates. - An LWRP for generating dhparam.pem files. - An attribute-driven recipe for upgrading OpenSSL packages. Requirements ------------ The `random_password` mixin works on any platform with the Ruby SecureRandom module. This module is already included with Chef. The `openssl_x509`, `openssl_rsa_key` and `openssl_dhparam` LWRPs work on any platform with the OpenSSL Ruby bindings installed. These bindings are already included with Chef. The `upgrade` recipe has been tested on the following platforms: * Ubuntu 12.04, 14.04 * Debian 7.4 * CentOS 6.5 The recipe may work on other platforms or different versions of the above platforms, but this has not been tested. Dependencies ------------ This cookbook depends on the [Chef Sugar](http://supermarket.chef.io/cookbooks/chef-sugar/) cookbook. [Chef Sugar](http://supermarket.chef.io/cookbooks/chef-sugar/) is used to make the default attribute settings easier to reason about. (See [Attributes](#attributes)) Attributes ---------- * `node['openssl']['packages']` - An array of packages required to use openssl. The default attributes attempt to be smart about which packages are the default, but this may need to be changed by users of the `openssl::upgrade` recipe. * `node['openssl']['restart_services']` - An array of service resources that depend on the packages listed in the `node['openssl']['packages']` attribute. This array is empty by default, as Chef has no reasonable way to detect which applications or services are compiled against these packages. *Note* Each service listed in this array should represent a "`service`" resource specified in the recipes of the node's run list. Recipes ------- ### default An empty placeholder recipe. Takes no action. ### upgrade The upgrade recipe iterates over the list of packages in the `node['openssl']['packages']` attribute, and manages them with the `:upgrade` action. Each package will send a `:restart` notification to service resources named in the `node['openssl']['restart_services']` attribute. #### Example Usage In this example, assume the node is running the `stats_collector` daemon, which depends on the openssl library. Imagine that a new openssl vulnerability has been disclosed, and the operating system vendor has released an update to openssl to address this vulnerability. In order to protect the node, an administrator crafts this recipe: ```ruby node.default['openssl']['restart_services'] = ['stats_collector'] # other recipe code here... service 'stats_collector' do action [:enable, :start] end include_recipe 'openssl::upgrade' ``` When executed, this recipe will ensure that openssl is upgraded to the latest version, and that the `stats_collector` service is restarted to pick up the latest security fixes released in the openssl package. Libraries & LWRPs ----------------- There are two mixins packaged with this cookbook. ### random_password (`OpenSSLCookbook::RandomPassword`) The `RandomPassword` mixin can be used to generate secure random passwords in Chef cookbooks, usually for assignment to a variable or an attribute. `random_password` uses Ruby's SecureRandom library and is customizable. #### Example Usage ```ruby Chef::Recipe.send(:include, OpenSSLCookbook::RandomPassword) node.set['my_secure_attribute'] = random_password node.set_unless['my_secure_attribute'] = random_password node.set['my_secure_attribute'] = random_password(length: 50) node.set['my_secure_attribute'] = random_password(length: 50, mode: :base64) node.set['my_secure_attribute'] = random_password(length: 50, mode: :base64, encoding: 'ASCII') ``` Note that node attributes are widely accessible. Storing unencrypted passwords in node attributes, as in this example, carries risk. ### ~~secure_password (`Opscode::OpenSSL::Password`)~~ This library should be considered deprecated and will be removed in a future version. Please use `OpenSSLCookbook::RandomPassword` instead. The documentation is kept here for historical reasons. #### ~~Example Usage~~ ```ruby ::Chef::Recipe.send(:include, Opscode::OpenSSL::Password) node.set_unless['my_password'] = secure_password ``` ~~Note that node attributes are widely accessible. Storing unencrypted passwords in node attributes, as in this example, carries risk.~~ ### openssl_x509 This LWRP generates self-signed, PEM-formatted x509 certificates. If no existing key is specified, the LWRP will automatically generate a passwordless key with the certificate. #### Attributes | Name | Type | Description | | ----- | ---- | ------------ | | `common_name` | String (Required) | Value for the `CN` certificate field. | | `org` | String (Required) | Value for the `O` certificate field. | | `org_unit` | String (Required) | Value for the `OU` certificate field. | | `country` | String (Required) | Value for the `C` ssl field. | | `expire` | Fixnum (Optional) | Value representing the number of days from _now_ through which the issued certificate cert will remain valid. The certificate will expire after this period. | | `key_file` | String (Optional) | The path to a certificate key file on the filesystem. If the `key_file` attribute is specified, the LWRP will attempt to source a key from this location. If no key file is found, the LWRP will generate a new key file at this location. If the `key_file` attribute is not specified, the LWRP will generate a key file in the same directory as the generated certificate, with the same name as the generated certificate. | `key_pass` | String (Optional) | The passphrase for an existing key's passphrase | `key_length` | Fixnum (Optional) | The desired Bit Length of the generated key. _Default: 2048_ | | `owner` | String (optional) | The owner of all files created by the LWRP. _Default: "root"_ | | `group` | String (optional) | The group of all files created by the LWRP. _Default: "root"_ | | `mode` | String or Fixnum (Optional) | The permission mode of all files created by the LWRP. _Default: "0400"_ | #### Example Usage In this example, an administrator wishes to create a self-signed x509 certificate for use with a web server. In order to create the certificate, the administrator crafts this recipe: ```ruby openssl_x509 '/etc/httpd/ssl/mycert.pem' do common_name 'www.f00bar.com' org 'Foo Bar' org_unit 'Lab' country 'US' end ``` When executed, this recipe will generate a key certificate at `/etc/httpd/ssl/mycert.key`. It will then use that key to generate a new certificate file at `/etc/httpd/ssl/mycert.pem`. ### openssl_dhparam This LWRP generates dhparam.pem files. If a valid dhparam.pem file is found at the specified location, no new file will be created. If a file is found at the specified location but it is not a valid dhparam file, it will be overwritten. #### Attributes | Name | Type | Description | | ----- | ---- | ------------ | | `key_length` | Fixnum (Optional) | The desired Bit Length of the generated key. _Default: 2048_ | | `generator` | Fixnum (Optional) | The desired Diffie-Hellmann generator. Can be _2_ or _5_. | | `owner` | String (optional) | The owner of all files created by the LWRP. _Default: "root"_ | | `group` | String (optional) | The group of all files created by the LWRP. _Default: "root"_ | | `mode` | String or Fixnum (Optional) | The permission mode of all files created by the LWRP. _Default: "0644"_ | #### Example Usage In this example, an administrator wishes to create a dhparam.pem file for use with a web server. In order to create the .pem file, the administrator crafts this recipe: ```ruby openssl_dhparam '/etc/httpd/ssl/dhparam.pem' do key_length 2048 generator 2 end ``` When executed, this recipe will generate a dhparam file at `/etc/httpd/ssl/dhparam.pem`. ### openssl_rsa_key This LWRP generates rsa key files. If a valid rsa key file can be opened at the specified location, no new file will be created. If the RSA key file cannot be opened, either because it does not exist or because the password to the RSA key file does not match the password in the recipe, it will be overwritten. #### Attributes | Name | Type | Description | | ----- | ---- | ------------ | | `key_length` | Fixnum (Optional) | The desired Bit Length of the generated key. _Default: 2048_ | | `key_pass` | String (Optional) | The desired passphrase for the key. | | `owner` | String (optional) | The owner of all files created by the LWRP. _Default: "root"_ | | `group` | String (optional) | The group of all files created by the LWRP. _Default: "root"_ | | `mode` | String or Fixnum (Optional) | The permission mode of all files created by the LWRP. _Default: "0644"_ | #### Example Usage In this example, an administrator wishes to create a new RSA private key file in order to generate other certificates and public keys. In order to create the key file, the administrator crafts this recipe: ```ruby openssl_rsa_key '/etc/httpd/ssl/server.key' do key_length 2048 end ``` When executed, this recipe will generate a passwordless RSA key file at `/etc/httpd/ssl/server.key`. License and Author ------------------ Author:: Jesse Nelson (<[email protected]>) Author:: Seth Vargo (<[email protected]>) Author:: Charles Johnson (<[email protected]>) Author:: Joshua Timberman (<[email protected]>) ======= ```text Copyright:: 2009-2015, Chef Software, Inc <[email protected]> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ```
{ "pile_set_name": "Github" }
################################################################################ # # fbtft # ################################################################################ FBTFT_VERSION = 274035404701245e7491c0c6471c5b72ade4d491 FBTFT_SITE = $(call github,notro,fbtft,$(FBTFT_VERSION)) FBTFT_LICENSE = GPL-2.0 $(eval $(generic-package))
{ "pile_set_name": "Github" }
<!DOCTYPE html> <!--[if lt IE 7]> <html class="no-js lt-ie9 lt-ie8 lt-ie7" lang="en"> <![endif]--> <!--[if IE 7]> <html class="no-js lt-ie9 lt-ie8" lang="en"> <![endif]--> <!--[if IE 8]> <html class="no-js lt-ie9" lang="en"> <![endif]--> <!--[if gt IE 8]><!--> <html class="no-js" lang="en"> <!--<![endif]--> <head> <meta charset="utf-8"> <title>KSS: Living Styleguide</title> <meta name="description" content=""> <meta name="generator" content="kss-node" /> <meta name="viewport" content="width=device-width"> <link href='http://fonts.googleapis.com/css?family=Droid+Serif' rel='stylesheet' type='text/css'> <link rel="stylesheet" href="public/kss.css"> <link rel="stylesheet" href="public/style.css"> </head> <body><div id="kss-wrapper"> <div id="kss-nav"> <header id="kss-header"> <hgroup><h1>kss-node Styleguide</h1></hgroup> </header> <aside> <nav><ul> <li><a href="index.html">0.0: Overview</a></li> <li><a href="section-1.html">1.0: Button</a></li> <li><a href="section-2.html">2.0: Paragraph</a></li> </ul></nav> </aside> </div> <div role="main" id="kss-main"> <article id="kss-content"> <h1>Hello styleguide</h1> <p>Some information about the styleguide </p> <p>Created from KSS-Node! </p> </article> </div> <footer> </footer> <!-- SCRIPTS --> <script src="public/kss.js"></script> <script src="public/prettify.js"></script> <script src="http://ajax.googleapis.com/ajax/libs/jquery/1.7.1/jquery.min.js"></script> <script> $(document).ready(function(){ var sidebarAdjust = function() { // Match footer/body height var height = Math.max($(window).height(), $('#kss-main').height()); if ($(window).width() <= 768) { $('#kss-main, #kss-nav').height('auto'); } else { $('#kss-main, #kss-nav').height(height); } }; $(window).resize(sidebarAdjust); $(window).scroll(sidebarAdjust); sidebarAdjust(); // Ensure code blocks are highlighted properly... $('pre>code').addClass('prettyprint'); prettyPrint(); }); </script> </div></body> </html>
{ "pile_set_name": "Github" }
{ "Share Your Stream": "Share Your Stream", "Share to Facebook": "Share to Facebook", "Grow your viewership using the free Streamlabs Deck App": "Grow your viewership using the free Streamlabs Deck App", "Connected": "Connected", "Shared": "Shared", "Build a small following before you start streaming of like 10-20 viewers, whether they are friends, family, teammates, whatever": "Build a small following before you start streaming of like 10-20 viewers, whether they are friends, family, teammates, whatever", "As a small streamer one of the best ways to grow is growing with your friends or other streamers. Sharing communities and such can help growth": "As a small streamer one of the best ways to grow is growing with your friends or other streamers. Sharing communities and such can help growth", "Try to focus on each social media equally. People forget that you can build a social media presence and bring them over to your stream": "Try to focus on each social media equally. People forget that you can build a social media presence and bring them over to your stream", "I got my first group of viewers by messaging ALL my friends and family": "I got my first group of viewers by messaging ALL my friends and family", "Don't show this code on stream. Click to reveal": "Don't show this code on stream. Click to reveal" }
{ "pile_set_name": "Github" }
unityRebuildLibraryVersion: 11 unityForwardCompatibleVersion: 40
{ "pile_set_name": "Github" }
using System; using System.Linq; namespace Dotmim.Sync.Tools { class Program { static void Main(string[] args) { try { // From dotnet command line or debug mode if (args.Length == 0) //args = new string[] { "table", "northwind", "-a", "Customers" }; args = new string[] { "-s", "adv" }; Runner.Execute(args); } catch (Exception ex) { var d = Console.ForegroundColor; Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine(ex.Message); Console.ForegroundColor = d; } } } }
{ "pile_set_name": "Github" }
/* ### * IP: GHIDRA * REVIEWED: YES * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package ghidra.app.plugin.core.datamgr; /** * Interface to define a method that is called when the selected data type changes in * the data type sync table. */ interface DataTypeSyncListener { /** * Notification that the given data type was selected. */ void dataTypeSelected(DataTypeSyncInfo syncInfo); }
{ "pile_set_name": "Github" }
<?php /** * Zend Framework (http://framework.zend.com/) * * @link http://github.com/zendframework/zf2 for the canonical source repository * @copyright Copyright (c) 2005-2015 Zend Technologies USA Inc. (http://www.zend.com) * @license http://framework.zend.com/license/new-bsd New BSD License */ namespace Zend\Barcode\Renderer; use Traversable; use Zend\Barcode\Barcode; use Zend\Barcode\Exception as BarcodeException; use Zend\Barcode\Object; use Zend\Stdlib\ArrayUtils; /** * Class for rendering the barcode */ abstract class AbstractRenderer implements RendererInterface { /** * Namespace of the renderer for autoloading * @var string */ protected $rendererNamespace = 'Zend\Barcode\Renderer'; /** * Renderer type * @var string */ protected $type = null; /** * Activate/Deactivate the automatic rendering of exception * @var bool */ protected $automaticRenderError = false; /** * Offset of the barcode from the top of the rendering resource * @var int */ protected $topOffset = 0; /** * Offset of the barcode from the left of the rendering resource * @var int */ protected $leftOffset = 0; /** * Horizontal position of the barcode in the rendering resource * @var int */ protected $horizontalPosition = 'left'; /** * Vertical position of the barcode in the rendering resource * @var int */ protected $verticalPosition = 'top'; /** * Module size rendering * @var float */ protected $moduleSize = 1; /** * Barcode object * @var Object\ObjectInterface */ protected $barcode; /** * Drawing resource */ protected $resource; /** * Show a transparent background * @var Boolean */ protected $transparentBackground = false; /** * Constructor * @param array|Traversable $options */ public function __construct($options = null) { if ($options instanceof Traversable) { $options = ArrayUtils::iteratorToArray($options); } if (is_array($options)) { $this->setOptions($options); } $this->type = strtolower(substr( get_class($this), strlen($this->rendererNamespace) + 1 )); } /** * Set renderer state from options array * @param array $options * @return AbstractRenderer */ public function setOptions($options) { foreach ($options as $key => $value) { $method = 'set' . $key; if (method_exists($this, $method)) { $this->$method($value); } } return $this; } /** * Set renderer namespace for autoloading * * @param string $namespace * @return AbstractRenderer */ public function setRendererNamespace($namespace) { $this->rendererNamespace = $namespace; return $this; } /** * Retrieve renderer namespace * * @return string */ public function getRendererNamespace() { return $this->rendererNamespace; } /** * Set whether background should be transparent * Will work for SVG and Image (png and gif only) * * @param $bool * @return $this */ public function setTransparentBackground($bool) { $this->transparentBackground = $bool; return $this; } /** * @return bool */ public function getTransparentBackground() { return $this->transparentBackground; } /** * Retrieve renderer type * @return string */ public function getType() { return $this->type; } /** * Manually adjust top position * @param int $value * @return AbstractRenderer * @throws Exception\OutOfRangeException */ public function setTopOffset($value) { if (!is_numeric($value) || intval($value) < 0) { throw new Exception\OutOfRangeException( 'Vertical position must be greater than or equals 0' ); } $this->topOffset = intval($value); return $this; } /** * Retrieve vertical adjustment * @return int */ public function getTopOffset() { return $this->topOffset; } /** * Manually adjust left position * @param int $value * @return AbstractRenderer * @throws Exception\OutOfRangeException */ public function setLeftOffset($value) { if (!is_numeric($value) || intval($value) < 0) { throw new Exception\OutOfRangeException( 'Horizontal position must be greater than or equals 0' ); } $this->leftOffset = intval($value); return $this; } /** * Retrieve vertical adjustment * @return int */ public function getLeftOffset() { return $this->leftOffset; } /** * Activate/Deactivate the automatic rendering of exception * @param bool $value * @return AbstractRenderer */ public function setAutomaticRenderError($value) { $this->automaticRenderError = (bool) $value; return $this; } /** * Horizontal position of the barcode in the rendering resource * @param string $value * @return AbstractRenderer * @throws Exception\UnexpectedValueException */ public function setHorizontalPosition($value) { if (!in_array($value, array('left', 'center', 'right'))) { throw new Exception\UnexpectedValueException( "Invalid barcode position provided must be 'left', 'center' or 'right'" ); } $this->horizontalPosition = $value; return $this; } /** * Horizontal position of the barcode in the rendering resource * @return string */ public function getHorizontalPosition() { return $this->horizontalPosition; } /** * Vertical position of the barcode in the rendering resource * @param string $value * @return AbstractRenderer * @throws Exception\UnexpectedValueException */ public function setVerticalPosition($value) { if (!in_array($value, array('top', 'middle', 'bottom'))) { throw new Exception\UnexpectedValueException( "Invalid barcode position provided must be 'top', 'middle' or 'bottom'" ); } $this->verticalPosition = $value; return $this; } /** * Vertical position of the barcode in the rendering resource * @return string */ public function getVerticalPosition() { return $this->verticalPosition; } /** * Set the size of a module * @param float $value * @return AbstractRenderer * @throws Exception\OutOfRangeException */ public function setModuleSize($value) { if (!is_numeric($value) || floatval($value) <= 0) { throw new Exception\OutOfRangeException( 'Float size must be greater than 0' ); } $this->moduleSize = floatval($value); return $this; } /** * Set the size of a module * @return float */ public function getModuleSize() { return $this->moduleSize; } /** * Retrieve the automatic rendering of exception * @return bool */ public function getAutomaticRenderError() { return $this->automaticRenderError; } /** * Set the barcode object * @param Object\ObjectInterface $barcode * @return AbstractRenderer */ public function setBarcode(Object\ObjectInterface $barcode) { $this->barcode = $barcode; return $this; } /** * Retrieve the barcode object * @return Object\ObjectInterface */ public function getBarcode() { return $this->barcode; } /** * Checking of parameters after all settings * @return bool */ public function checkParams() { $this->checkBarcodeObject(); $this->checkSpecificParams(); return true; } /** * Check if a barcode object is correctly provided * @return void * @throws Exception\RuntimeException */ protected function checkBarcodeObject() { if ($this->barcode === null) { throw new Exception\RuntimeException( 'No barcode object provided' ); } } /** * Calculate the left and top offset of the barcode in the * rendering support * * @param float $supportHeight * @param float $supportWidth * @return void */ protected function adjustPosition($supportHeight, $supportWidth) { $barcodeHeight = $this->barcode->getHeight(true) * $this->moduleSize; if ($barcodeHeight != $supportHeight && $this->topOffset == 0) { switch ($this->verticalPosition) { case 'middle': $this->topOffset = floor(($supportHeight - $barcodeHeight) / 2); break; case 'bottom': $this->topOffset = $supportHeight - $barcodeHeight; break; case 'top': default: $this->topOffset = 0; break; } } $barcodeWidth = $this->barcode->getWidth(true) * $this->moduleSize; if ($barcodeWidth != $supportWidth && $this->leftOffset == 0) { switch ($this->horizontalPosition) { case 'center': $this->leftOffset = floor(($supportWidth - $barcodeWidth) / 2); break; case 'right': $this->leftOffset = $supportWidth - $barcodeWidth; break; case 'left': default: $this->leftOffset = 0; break; } } } /** * Draw the barcode in the rendering resource * * @throws BarcodeException\ExceptionInterface * @return mixed */ public function draw() { try { $this->checkParams(); $this->initRenderer(); $this->drawInstructionList(); } catch (BarcodeException\ExceptionInterface $e) { if ($this->automaticRenderError && !($e instanceof BarcodeException\RendererCreationException)) { $barcode = Barcode::makeBarcode( 'error', array('text' => $e->getMessage()) ); $this->setBarcode($barcode); $this->resource = null; $this->initRenderer(); $this->drawInstructionList(); } else { throw $e; } } return $this->resource; } /** * Sub process to draw the barcode instructions * Needed by the automatic error rendering */ private function drawInstructionList() { $instructionList = $this->barcode->draw(); foreach ($instructionList as $instruction) { switch ($instruction['type']) { case 'polygon': $this->drawPolygon( $instruction['points'], $instruction['color'], $instruction['filled'] ); break; case 'text': //$text, $size, $position, $font, $color, $alignment = 'center', $orientation = 0) $this->drawText( $instruction['text'], $instruction['size'], $instruction['position'], $instruction['font'], $instruction['color'], $instruction['alignment'], $instruction['orientation'] ); break; default: throw new Exception\UnexpectedValueException( 'Unkown drawing command' ); } } } /** * Checking of parameters after all settings * @return void */ abstract protected function checkSpecificParams(); /** * Initialize the rendering resource * @return void */ abstract protected function initRenderer(); /** * Draw a polygon in the rendering resource * @param array $points * @param int $color * @param bool $filled */ abstract protected function drawPolygon($points, $color, $filled = true); /** * Draw a polygon in the rendering resource * @param string $text * @param float $size * @param array $position * @param string $font * @param int $color * @param string $alignment * @param float $orientation */ abstract protected function drawText( $text, $size, $position, $font, $color, $alignment = 'center', $orientation = 0 ); }
{ "pile_set_name": "Github" }
// Copyright 2009 The Go Authors. All rights reserved. // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file. package scanner import ( "container/vector" "fmt" "go/token" "io" "os" "sort" ) // An implementation of an ErrorHandler may be provided to the Scanner. // If a syntax error is encountered and a handler was installed, Error // is called with a position and an error message. The position points // to the beginning of the offending token. // type ErrorHandler interface { Error(pos token.Position, msg string) } // ErrorVector implements the ErrorHandler interface. It maintains a list // of errors which can be retrieved with GetErrorList and GetError. The // zero value for an ErrorVector is an empty ErrorVector ready to use. // // A common usage pattern is to embed an ErrorVector alongside a // scanner in a data structure that uses the scanner. By passing a // reference to an ErrorVector to the scanner's Init call, default // error handling is obtained. // type ErrorVector struct { errors vector.Vector } // Reset resets an ErrorVector to no errors. func (h *ErrorVector) Reset() { h.errors.Resize(0, 0) } // ErrorCount returns the number of errors collected. func (h *ErrorVector) ErrorCount() int { return h.errors.Len() } // Within ErrorVector, an error is represented by an Error node. The // position Pos, if valid, points to the beginning of the offending // token, and the error condition is described by Msg. // type Error struct { Pos token.Position Msg string } func (e *Error) String() string { if e.Pos.Filename != "" || e.Pos.IsValid() { // don't print "<unknown position>" // TODO(gri) reconsider the semantics of Position.IsValid return e.Pos.String() + ": " + e.Msg } return e.Msg } // An ErrorList is a (possibly sorted) list of Errors. type ErrorList []*Error // ErrorList implements the sort Interface. func (p ErrorList) Len() int { return len(p) } func (p ErrorList) Swap(i, j int) { p[i], p[j] = p[j], p[i] } func (p ErrorList) Less(i, j int) bool { e := &p[i].Pos f := &p[j].Pos // Note that it is not sufficient to simply compare file offsets because // the offsets do not reflect modified line information (through //line // comments). if e.Filename < f.Filename { return true } if e.Filename == f.Filename { if e.Line < f.Line { return true } if e.Line == f.Line { return e.Column < f.Column } } return false } func (p ErrorList) String() string { switch len(p) { case 0: return "unspecified error" case 1: return p[0].String() } return fmt.Sprintf("%s (and %d more errors)", p[0].String(), len(p)-1) } // These constants control the construction of the ErrorList // returned by GetErrors. // const ( Raw = iota // leave error list unchanged Sorted // sort error list by file, line, and column number NoMultiples // sort error list and leave only the first error per line ) // GetErrorList returns the list of errors collected by an ErrorVector. // The construction of the ErrorList returned is controlled by the mode // parameter. If there are no errors, the result is nil. // func (h *ErrorVector) GetErrorList(mode int) ErrorList { if h.errors.Len() == 0 { return nil } list := make(ErrorList, h.errors.Len()) for i := 0; i < h.errors.Len(); i++ { list[i] = h.errors.At(i).(*Error) } if mode >= Sorted { sort.Sort(list) } if mode >= NoMultiples { var last token.Position // initial last.Line is != any legal error line i := 0 for _, e := range list { if e.Pos.Filename != last.Filename || e.Pos.Line != last.Line { last = e.Pos list[i] = e i++ } } list = list[0:i] } return list } // GetError is like GetErrorList, but it returns an os.Error instead // so that a nil result can be assigned to an os.Error variable and // remains nil. // func (h *ErrorVector) GetError(mode int) os.Error { if h.errors.Len() == 0 { return nil } return h.GetErrorList(mode) } // ErrorVector implements the ErrorHandler interface. func (h *ErrorVector) Error(pos token.Position, msg string) { h.errors.Push(&Error{pos, msg}) } // PrintError is a utility function that prints a list of errors to w, // one error per line, if the err parameter is an ErrorList. Otherwise // it prints the err string. // func PrintError(w io.Writer, err os.Error) { if list, ok := err.(ErrorList); ok { for _, e := range list { fmt.Fprintf(w, "%s\n", e) } } else { fmt.Fprintf(w, "%s\n", err) } }
{ "pile_set_name": "Github" }
/* * This file is generated by Sencha Cmd and should NOT be edited. It will be replaced * during an upgrade. */ // This flag is checked by many Components to avoid compatibility warnings when // the code is running under the slicer Ext.slicer = true; Ext.require([ 'Ext.layout.Context' ]); Ext.theme = Ext.apply(Ext.theme || {}, { /** * The array of all component manifests. These objects have the following set of * properties recognized by the slicer: * @private */ _manifest: [], /** * The collection of shortcuts for a given alias (e.g., 'widget.panel'). This is an * object keyed by alias whose values are arrays of shortcut definitions. * @private */ _shortcuts: {}, doRequire : function(xtype) { if(xtype.indexOf("widget.") != 0) { xtype = "widget." + xtype; } Ext.require([xtype]); }, /** * Adds one ore more component entries to the theme manifest. These entries will be * instantiated by the `Ext.theme.render` method when the page is ready. * * Usage: * * Ext.theme.addManifest({ * xtype: 'widget.menu', * folder: 'menu', * delegate: '.x-menu-item-link', * filename: 'menu-item-active', * config: { * floating: false, * width: 200, * items: [{ * text: 'test', * cls: 'x-menu-item-active' * }] * } * },{ * //... * }); * * @param manifest {Object} An object with type of component, slicing information and * component configuration. If this parameter is an array, each element is treated as * a manifest entry. Otherwise, each argument passed is treated as a manifest entry. * * @param manifest.xtype {String} The xtype ('grid') or alias ('widget.grid'). This * is used to specify the type of component to create as well as a potential key to * any `shortcuts` defined for the xtype. * * @param manifest.config {Object} The component configuration object. The properties * of this depend on the `xtype` of the component. * * @param [manifest.delegate] {String} The DOM query to use to select the element to * slice. The default is to slice the primary element of the component. * * @param [manifest.parentCls] An optional CSS class to add to the parent of the * component. * * @param [manifest.setup] {Function} An optional function to be called to initialize * the component. * @param manifest.setup.component {Ext.Component} The component instance * @param manifest.setup.container {Element} The component's container. * * @param [manifest.folder] {String} The folder in to which to produce image slices. * Only applies to Ext JS 4.1 (removed in 4.2). * * @param [manifest.filename] {String} The base filename for slices. * Only applies to Ext JS 4.1 (removed in 4.2). * * @param [manifest.reverse] {Boolean} True to position the slices for linear gradient * background at then opposite "end" (right or bottom) and apply the stretch to the * area before it (left or top). Only applies to Ext JS 4.1 (removed in 4.2). */ addManifest: function (manifest) { var all = Ext.theme._manifest; var add = Ext.isArray(manifest) ? manifest : arguments; if(manifest.xtype) { Ext.theme.doRequire(manifest.xtype); } for (var i = 0, n = add.length; i < n; ++i) { if(add[i].xtype) { Ext.theme.doRequire(add[i].xtype); } all.push(add[i]); } }, /** * Adds one or more shortcuts to the rendering process. A `shortcut` is an object that * looks the same as a `manifest` entry. These are combined by copying the properties * from the shortcut over those of the manifest entry. In basic terms: * * var config = Ext.apply(Ext.apply({}, manfiest.config), shortcut.config); * var entry = Ext.apply(Ext.apply({}, manfiest), shortcut); * entry.config = config; * * This is not exactly the process, but the idea is the same. The difference is that * the `ui` of the manifest entry is used to replace any `"{ui}"` substrings found in * any string properties of the shortcut or its `config` object. * * Usage: * * Ext.theme.addShortcuts({ * 'widget.foo': [{ * config: { * } * },{ * config: { * } * }], * * 'widget.bar': [ ... ] * }); */ addShortcuts: function (shortcuts) { var all = Ext.theme._shortcuts; for (var key in shortcuts) { var add = shortcuts[key]; var xtype = Ext.theme.addWidget(key); var existing = all[xtype]; Ext.theme.doRequire(xtype); for(var i=0; i < add.length; i++) { var config = add[i]; if(config.xtype) { Ext.theme.doRequire(config.xtype); } } if (!existing) { all[xtype] = existing = []; } existing.push.apply(existing, add); } }, /** * This method ensures that a given string has the specified prefix (e.g., "widget."). * @private */ addPrefix: function (prefix, s) { if (!s || (s.length > prefix.length && s.substring(0,prefix.length) === prefix)) { return s; } return prefix + s; }, /** * This method returns the given string with "widget." added to the front if that is * not already present. * @private */ addWidget: function (str) { return Ext.theme.addPrefix('widget.', str); }, /** * This method accepts an manifest entry and a shortcut entry and returns the merged * version. * @private */ applyShortcut: function (manifestEntry, shortcut) { var ui = manifestEntry.ui; var config = Ext.theme.copyProps({}, manifestEntry.config); var entry = Ext.theme.copyProps({}, manifestEntry); if (ui && !config.ui) { config.ui = ui; } if (shortcut) { var tpl = { ui: ui }; Ext.theme.copyProps(entry, shortcut, tpl); Ext.theme.copyProps(config, shortcut.config, tpl); } entry.xtype = Ext.theme.addWidget(entry.xtype); entry.config = config; // both guys have "config" so smash merged one on now... return entry; }, /** * This method copies property from a `src` object to a `dest` object and reaplces * `"{foo}"` fragments of any string properties as defined in the `tpl` object. * * var obj = Ext.theme.copyProps({}, { * foo: 'Hello-{ui}' * }, { * ui: 'World' * }); * * console.log('obj.foo: ' + obj.foo); // logs "Hello-World" * * @return {Object} The `dest` object or a new object (if `dest` was null). * @private */ copyProps: function (dest, src, tpl) { var out = dest || {}; var replacements = []; var token; if (src) { if (tpl) { for (token in tpl) { replacements.push({ re: new RegExp('\\{' + token + '\\}', 'g'), value: tpl[token] }); } } for (var key in src) { var val = src[key]; if (tpl && typeof val === 'string') { for (var i = 0; i < replacements.length; ++ i) { val = val.replace(replacements[i].re, replacements[i].value); } } out[key] = val; } } return out; }, /** * Renders a component given its manifest and shortcut entries. * @private */ renderWidget: function (manifestEntry, shortcut) { var entry = Ext.theme.applyShortcut(manifestEntry, shortcut); var config = entry.config; var widget = Ext.create(entry.xtype, config); var ct = Ext.fly(document.body).createChild({ cls: 'widget-container' }); Ext.theme.currentWidget = widget; if (widget.floating === true) { widget.floating = { shadow: false }; } if (widget.floating) { widget.focusOnToFront = false; } if (entry.setup) { entry.setup.call(widget, widget, ct); } else { widget.render(ct); if (widget.floating) { widget.showAt(0, 0); ct.setHeight(widget.getHeight()); } } var el = widget.el; if (entry.delegate) { el = el.down(entry.delegate); } el.addCls('x-slicer-target'); // this is what generateSlicerManifest looks for if (entry.over) { widget.addOverCls(); } if (config.parentCls) { el.parent().addCls(config.parentCls); } if (Ext.theme.legacy) { // The 4.1 approach has some interesting extra pieces // var data = {}; if (entry.reverse) { data.reverse = true; } if (entry.filename) { data.filename = entry.filename; } if (entry.folder) { data.folder = entry.folder; } if (entry.offsets) { data.offsets = entry.offsets; } Ext.theme.setData(el.dom, data); } Ext.theme.currentWidget = null; }, /** * Renders all of the components that have been added to the manifest. * @private */ render: function () { console.log("rendering widgets...") var manifest = Ext.theme._manifest; var shortcuts = Ext.theme._shortcuts; for (var k = 0, n = manifest ? manifest.length : 0; k < n; ++k) { var manifestEntry = manifest[k]; var xtype = Ext.theme.addWidget(manifestEntry.xtype); var widgetShortcuts = xtype ? shortcuts[xtype] : null; if (xtype && manifestEntry.ui && widgetShortcuts) { for (var i = 0; i < widgetShortcuts.length; i++) { Ext.theme.renderWidget(manifestEntry, widgetShortcuts[i]); } } else { Ext.theme.renderWidget(manifestEntry); } } }, /** * Renders all components (see `render`) and notifies the Slicer that things are ready. * @private */ run: function () { var extjsVer = Ext.versions.extjs; var globalData = {}; if (Ext.layout.Context) { Ext.override(Ext.layout.Context, { run: function () { var ok = this.callParent(), widget = Ext.theme.currentWidget; if (!ok && widget) { Ext.Error.raise("Layout run failed: " + widget.id); } return ok; } }); } console.log("loading widget definitions..."); // Previous to Ext JS 4.2, themes and their manifests where defined differently. // So pass this along if we are hosting a pre-4.2 theme. // if (extjsVer && extjsVer.isLessThan(new Ext.Version("4.2"))) { globalData.format = "1.0"; // tell the Slicer tool Ext.theme.legacy = true; // not for our own data collection // Check for the Cmd3.0/ExtJS4.1 variables: // if (Ext.manifest && Ext.manifest.widgets) { Ext.theme.addManifest(Ext.manifest.widgets); } if (Ext.shortcuts) { Ext.theme.addShortcuts(Ext.shortcuts); } if (Ext.userManifest && Ext.userManifest.widgets) { Ext.theme.addManifest(Ext.userManifest.widgets); } } Ext.theme.setData(document.body, globalData); Ext.theme.render(); Ext.theme.generateSlicerManifest(); }, generateSlicerManifest: function() { var now = new Date().getTime(), me = Ext.theme, // This function is defined by slicer.js (the framework-independent piece) gsm = window && window['generateSlicerManifest'], delta; me.generateStart = me.generateStart || now; delta = now - me.generateStart; if(gsm) { gsm(); } else if(delta < (10 * 1000)){ // allow the outer script wrapper a chance to inject the capture function // but stop trying after 10 seconds Ext.defer(Ext.theme.generateSlicerManifest, 100); } }, /** * Sets the `data-slicer` attribute to the JSON-encoded value of the provided data. * @private */ setData: function (el, data) { if (data) { var json = Ext.encode(data); if (json !== '{}') { el.setAttribute('data-slicer', json); } } }, /** * This used to be `loadExtStylesheet`. * @private */ loadCss: function (src, callback) { var xhr = new XMLHttpRequest(); xhr.open('GET', src); xhr.onload = function() { var css = xhr.responseText, head = document.getElementsByTagName('head')[0], style = document.createElement('style'); // There's bugginess in the next gradient syntax in WebKit r84622 // This might be fixed in a later WebKit, but for now we're going to // strip it out here since compass generates it. // // TODO: Upgrade to later WebKit revision css = css.replace(/background(-image)?: ?-webkit-linear-gradient(?:.*?);/g, ''); style.type = 'text/css'; style.innerText = css; head.appendChild(style); callback(); }; xhr.send(null); } }); console.log("registering ready listener..."); Ext.onReady(Ext.theme.run, Ext.theme);
{ "pile_set_name": "Github" }
import collections Process = collections.namedtuple('Process', 'args pid ppid')
{ "pile_set_name": "Github" }
fileFormatVersion: 2 guid: 83273a08f84aa9a48b37fe0b1bae0958 timeCreated: 1461708051 licenseType: Pro MonoImporter: serializedVersion: 2 defaultReferences: [] executionOrder: 0 icon: {instanceID: 0} userData: assetBundleName: assetBundleVariant:
{ "pile_set_name": "Github" }
 Эта страница позволяет вам добавлять контент в TwoPaneView и автоматически изменять порядок расположения контента так, чтобы он лучше подходил к окну. Элемент управления TwoPaneView предназначен для того, чтобы в полной мере использовать преимущества устройств с двумя экранами без специального программирования. На устройстве с двумя экранами двухпанельное представление гарантирует, что пользовательский интерфейс (UI) четко разделен, когда он охватывает промежуток между экранами, так что ваш контент представлен с обеих сторон промежутка. [Получите дополнительные сведения о Two Pane View.](https://docs.microsoft.com/ru-ru/windows/uwp/design/controls-and-patterns/two-pane-view)
{ "pile_set_name": "Github" }
//=-- ExprEngineC.cpp - ExprEngine support for C expressions ----*- C++ -*-===// // // The LLVM Compiler Infrastructure // // This file is distributed under the University of Illinois Open Source // License. See LICENSE.TXT for details. // //===----------------------------------------------------------------------===// // // This file defines ExprEngine's support for C expressions. // //===----------------------------------------------------------------------===// #include "clang/AST/ExprCXX.h" #include "clang/AST/DeclCXX.h" #include "clang/StaticAnalyzer/Core/CheckerManager.h" #include "clang/StaticAnalyzer/Core/PathSensitive/ExprEngine.h" using namespace clang; using namespace ento; using llvm::APSInt; /// Optionally conjure and return a symbol for offset when processing /// an expression \p Expression. /// If \p Other is a location, conjure a symbol for \p Symbol /// (offset) if it is unknown so that memory arithmetic always /// results in an ElementRegion. /// \p Count The number of times the current basic block was visited. static SVal conjureOffsetSymbolOnLocation( SVal Symbol, SVal Other, Expr* Expression, SValBuilder &svalBuilder, unsigned Count, const LocationContext *LCtx) { QualType Ty = Expression->getType(); if (Other.getAs<Loc>() && Ty->isIntegralOrEnumerationType() && Symbol.isUnknown()) { return svalBuilder.conjureSymbolVal(Expression, LCtx, Ty, Count); } return Symbol; } void ExprEngine::VisitBinaryOperator(const BinaryOperator* B, ExplodedNode *Pred, ExplodedNodeSet &Dst) { Expr *LHS = B->getLHS()->IgnoreParens(); Expr *RHS = B->getRHS()->IgnoreParens(); // FIXME: Prechecks eventually go in ::Visit(). ExplodedNodeSet CheckedSet; ExplodedNodeSet Tmp2; getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, B, *this); // With both the LHS and RHS evaluated, process the operation itself. for (ExplodedNodeSet::iterator it=CheckedSet.begin(), ei=CheckedSet.end(); it != ei; ++it) { ProgramStateRef state = (*it)->getState(); const LocationContext *LCtx = (*it)->getLocationContext(); SVal LeftV = state->getSVal(LHS, LCtx); SVal RightV = state->getSVal(RHS, LCtx); BinaryOperator::Opcode Op = B->getOpcode(); if (Op == BO_Assign) { // EXPERIMENTAL: "Conjured" symbols. // FIXME: Handle structs. if (RightV.isUnknown()) { unsigned Count = currBldrCtx->blockCount(); RightV = svalBuilder.conjureSymbolVal(nullptr, B->getRHS(), LCtx, Count); } // Simulate the effects of a "store": bind the value of the RHS // to the L-Value represented by the LHS. SVal ExprVal = B->isGLValue() ? LeftV : RightV; evalStore(Tmp2, B, LHS, *it, state->BindExpr(B, LCtx, ExprVal), LeftV, RightV); continue; } if (!B->isAssignmentOp()) { StmtNodeBuilder Bldr(*it, Tmp2, *currBldrCtx); if (B->isAdditiveOp()) { // TODO: This can be removed after we enable history tracking with // SymSymExpr. unsigned Count = currBldrCtx->blockCount(); RightV = conjureOffsetSymbolOnLocation( RightV, LeftV, RHS, svalBuilder, Count, LCtx); LeftV = conjureOffsetSymbolOnLocation( LeftV, RightV, LHS, svalBuilder, Count, LCtx); } // Although we don't yet model pointers-to-members, we do need to make // sure that the members of temporaries have a valid 'this' pointer for // other checks. if (B->getOpcode() == BO_PtrMemD) state = createTemporaryRegionIfNeeded(state, LCtx, LHS); // Process non-assignments except commas or short-circuited // logical expressions (LAnd and LOr). SVal Result = evalBinOp(state, Op, LeftV, RightV, B->getType()); if (!Result.isUnknown()) { state = state->BindExpr(B, LCtx, Result); } Bldr.generateNode(B, *it, state); continue; } assert (B->isCompoundAssignmentOp()); switch (Op) { default: llvm_unreachable("Invalid opcode for compound assignment."); case BO_MulAssign: Op = BO_Mul; break; case BO_DivAssign: Op = BO_Div; break; case BO_RemAssign: Op = BO_Rem; break; case BO_AddAssign: Op = BO_Add; break; case BO_SubAssign: Op = BO_Sub; break; case BO_ShlAssign: Op = BO_Shl; break; case BO_ShrAssign: Op = BO_Shr; break; case BO_AndAssign: Op = BO_And; break; case BO_XorAssign: Op = BO_Xor; break; case BO_OrAssign: Op = BO_Or; break; } // Perform a load (the LHS). This performs the checks for // null dereferences, and so on. ExplodedNodeSet Tmp; SVal location = LeftV; evalLoad(Tmp, B, LHS, *it, state, location); for (ExplodedNodeSet::iterator I = Tmp.begin(), E = Tmp.end(); I != E; ++I) { state = (*I)->getState(); const LocationContext *LCtx = (*I)->getLocationContext(); SVal V = state->getSVal(LHS, LCtx); // Get the computation type. QualType CTy = cast<CompoundAssignOperator>(B)->getComputationResultType(); CTy = getContext().getCanonicalType(CTy); QualType CLHSTy = cast<CompoundAssignOperator>(B)->getComputationLHSType(); CLHSTy = getContext().getCanonicalType(CLHSTy); QualType LTy = getContext().getCanonicalType(LHS->getType()); // Promote LHS. V = svalBuilder.evalCast(V, CLHSTy, LTy); // Compute the result of the operation. SVal Result = svalBuilder.evalCast(evalBinOp(state, Op, V, RightV, CTy), B->getType(), CTy); // EXPERIMENTAL: "Conjured" symbols. // FIXME: Handle structs. SVal LHSVal; if (Result.isUnknown()) { // The symbolic value is actually for the type of the left-hand side // expression, not the computation type, as this is the value the // LValue on the LHS will bind to. LHSVal = svalBuilder.conjureSymbolVal(nullptr, B->getRHS(), LCtx, LTy, currBldrCtx->blockCount()); // However, we need to convert the symbol to the computation type. Result = svalBuilder.evalCast(LHSVal, CTy, LTy); } else { // The left-hand side may bind to a different value then the // computation type. LHSVal = svalBuilder.evalCast(Result, LTy, CTy); } // In C++, assignment and compound assignment operators return an // lvalue. if (B->isGLValue()) state = state->BindExpr(B, LCtx, location); else state = state->BindExpr(B, LCtx, Result); evalStore(Tmp2, B, LHS, *I, state, location, LHSVal); } } // FIXME: postvisits eventually go in ::Visit() getCheckerManager().runCheckersForPostStmt(Dst, Tmp2, B, *this); } void ExprEngine::VisitBlockExpr(const BlockExpr *BE, ExplodedNode *Pred, ExplodedNodeSet &Dst) { CanQualType T = getContext().getCanonicalType(BE->getType()); const BlockDecl *BD = BE->getBlockDecl(); // Get the value of the block itself. SVal V = svalBuilder.getBlockPointer(BD, T, Pred->getLocationContext(), currBldrCtx->blockCount()); ProgramStateRef State = Pred->getState(); // If we created a new MemRegion for the block, we should explicitly bind // the captured variables. if (const BlockDataRegion *BDR = dyn_cast_or_null<BlockDataRegion>(V.getAsRegion())) { BlockDataRegion::referenced_vars_iterator I = BDR->referenced_vars_begin(), E = BDR->referenced_vars_end(); auto CI = BD->capture_begin(); auto CE = BD->capture_end(); for (; I != E; ++I) { const VarRegion *capturedR = I.getCapturedRegion(); const VarRegion *originalR = I.getOriginalRegion(); // If the capture had a copy expression, use the result of evaluating // that expression, otherwise use the original value. // We rely on the invariant that the block declaration's capture variables // are a prefix of the BlockDataRegion's referenced vars (which may include // referenced globals, etc.) to enable fast lookup of the capture for a // given referenced var. const Expr *copyExpr = nullptr; if (CI != CE) { assert(CI->getVariable() == capturedR->getDecl()); copyExpr = CI->getCopyExpr(); CI++; } if (capturedR != originalR) { SVal originalV; const LocationContext *LCtx = Pred->getLocationContext(); if (copyExpr) { originalV = State->getSVal(copyExpr, LCtx); } else { originalV = State->getSVal(loc::MemRegionVal(originalR)); } State = State->bindLoc(loc::MemRegionVal(capturedR), originalV, LCtx); } } } ExplodedNodeSet Tmp; StmtNodeBuilder Bldr(Pred, Tmp, *currBldrCtx); Bldr.generateNode(BE, Pred, State->BindExpr(BE, Pred->getLocationContext(), V), nullptr, ProgramPoint::PostLValueKind); // FIXME: Move all post/pre visits to ::Visit(). getCheckerManager().runCheckersForPostStmt(Dst, Tmp, BE, *this); } ProgramStateRef ExprEngine::handleLValueBitCast( ProgramStateRef state, const Expr* Ex, const LocationContext* LCtx, QualType T, QualType ExTy, const CastExpr* CastE, StmtNodeBuilder& Bldr, ExplodedNode* Pred) { if (T->isLValueReferenceType()) { assert(!CastE->getType()->isLValueReferenceType()); ExTy = getContext().getLValueReferenceType(ExTy); } else if (T->isRValueReferenceType()) { assert(!CastE->getType()->isRValueReferenceType()); ExTy = getContext().getRValueReferenceType(ExTy); } // Delegate to SValBuilder to process. SVal OrigV = state->getSVal(Ex, LCtx); SVal V = svalBuilder.evalCast(OrigV, T, ExTy); // Negate the result if we're treating the boolean as a signed i1 if (CastE->getCastKind() == CK_BooleanToSignedIntegral) V = evalMinus(V); state = state->BindExpr(CastE, LCtx, V); if (V.isUnknown() && !OrigV.isUnknown()) { state = escapeValue(state, OrigV, PSK_EscapeOther); } Bldr.generateNode(CastE, Pred, state); return state; } ProgramStateRef ExprEngine::handleLVectorSplat( ProgramStateRef state, const LocationContext* LCtx, const CastExpr* CastE, StmtNodeBuilder &Bldr, ExplodedNode* Pred) { // Recover some path sensitivity by conjuring a new value. QualType resultType = CastE->getType(); if (CastE->isGLValue()) resultType = getContext().getPointerType(resultType); SVal result = svalBuilder.conjureSymbolVal(nullptr, CastE, LCtx, resultType, currBldrCtx->blockCount()); state = state->BindExpr(CastE, LCtx, result); Bldr.generateNode(CastE, Pred, state); return state; } void ExprEngine::VisitCast(const CastExpr *CastE, const Expr *Ex, ExplodedNode *Pred, ExplodedNodeSet &Dst) { ExplodedNodeSet dstPreStmt; getCheckerManager().runCheckersForPreStmt(dstPreStmt, Pred, CastE, *this); if (CastE->getCastKind() == CK_LValueToRValue) { for (ExplodedNodeSet::iterator I = dstPreStmt.begin(), E = dstPreStmt.end(); I!=E; ++I) { ExplodedNode *subExprNode = *I; ProgramStateRef state = subExprNode->getState(); const LocationContext *LCtx = subExprNode->getLocationContext(); evalLoad(Dst, CastE, CastE, subExprNode, state, state->getSVal(Ex, LCtx)); } return; } // All other casts. QualType T = CastE->getType(); QualType ExTy = Ex->getType(); if (const ExplicitCastExpr *ExCast=dyn_cast_or_null<ExplicitCastExpr>(CastE)) T = ExCast->getTypeAsWritten(); StmtNodeBuilder Bldr(dstPreStmt, Dst, *currBldrCtx); for (ExplodedNodeSet::iterator I = dstPreStmt.begin(), E = dstPreStmt.end(); I != E; ++I) { Pred = *I; ProgramStateRef state = Pred->getState(); const LocationContext *LCtx = Pred->getLocationContext(); switch (CastE->getCastKind()) { case CK_LValueToRValue: llvm_unreachable("LValueToRValue casts handled earlier."); case CK_ToVoid: continue; // The analyzer doesn't do anything special with these casts, // since it understands retain/release semantics already. case CK_ARCProduceObject: case CK_ARCConsumeObject: case CK_ARCReclaimReturnedObject: case CK_ARCExtendBlockObject: // Fall-through. case CK_CopyAndAutoreleaseBlockObject: // The analyser can ignore atomic casts for now, although some future // checkers may want to make certain that you're not modifying the same // value through atomic and nonatomic pointers. case CK_AtomicToNonAtomic: case CK_NonAtomicToAtomic: // True no-ops. case CK_NoOp: case CK_ConstructorConversion: case CK_UserDefinedConversion: case CK_FunctionToPointerDecay: case CK_BuiltinFnToFnPtr: { // Copy the SVal of Ex to CastE. ProgramStateRef state = Pred->getState(); const LocationContext *LCtx = Pred->getLocationContext(); SVal V = state->getSVal(Ex, LCtx); state = state->BindExpr(CastE, LCtx, V); Bldr.generateNode(CastE, Pred, state); continue; } case CK_MemberPointerToBoolean: case CK_PointerToBoolean: { SVal V = state->getSVal(Ex, LCtx); auto PTMSV = V.getAs<nonloc::PointerToMember>(); if (PTMSV) V = svalBuilder.makeTruthVal(!PTMSV->isNullMemberPointer(), ExTy); if (V.isUndef() || PTMSV) { state = state->BindExpr(CastE, LCtx, V); Bldr.generateNode(CastE, Pred, state); continue; } // Explicitly proceed with default handler for this case cascade. state = handleLValueBitCast(state, Ex, LCtx, T, ExTy, CastE, Bldr, Pred); continue; } case CK_Dependent: case CK_ArrayToPointerDecay: case CK_BitCast: case CK_AddressSpaceConversion: case CK_BooleanToSignedIntegral: case CK_NullToPointer: case CK_IntegralToPointer: case CK_PointerToIntegral: { SVal V = state->getSVal(Ex, LCtx); if (V.getAs<nonloc::PointerToMember>()) { state = state->BindExpr(CastE, LCtx, UnknownVal()); Bldr.generateNode(CastE, Pred, state); continue; } // Explicitly proceed with default handler for this case cascade. state = handleLValueBitCast(state, Ex, LCtx, T, ExTy, CastE, Bldr, Pred); continue; } case CK_IntegralToBoolean: case CK_IntegralToFloating: case CK_FloatingToIntegral: case CK_FloatingToBoolean: case CK_FloatingCast: case CK_FloatingRealToComplex: case CK_FloatingComplexToReal: case CK_FloatingComplexToBoolean: case CK_FloatingComplexCast: case CK_FloatingComplexToIntegralComplex: case CK_IntegralRealToComplex: case CK_IntegralComplexToReal: case CK_IntegralComplexToBoolean: case CK_IntegralComplexCast: case CK_IntegralComplexToFloatingComplex: case CK_CPointerToObjCPointerCast: case CK_BlockPointerToObjCPointerCast: case CK_AnyPointerToBlockPointerCast: case CK_ObjCObjectLValueCast: case CK_ZeroToOCLEvent: case CK_ZeroToOCLQueue: case CK_IntToOCLSampler: case CK_LValueBitCast: { state = handleLValueBitCast(state, Ex, LCtx, T, ExTy, CastE, Bldr, Pred); continue; } case CK_IntegralCast: { // Delegate to SValBuilder to process. SVal V = state->getSVal(Ex, LCtx); V = svalBuilder.evalIntegralCast(state, V, T, ExTy); state = state->BindExpr(CastE, LCtx, V); Bldr.generateNode(CastE, Pred, state); continue; } case CK_DerivedToBase: case CK_UncheckedDerivedToBase: { // For DerivedToBase cast, delegate to the store manager. SVal val = state->getSVal(Ex, LCtx); val = getStoreManager().evalDerivedToBase(val, CastE); state = state->BindExpr(CastE, LCtx, val); Bldr.generateNode(CastE, Pred, state); continue; } // Handle C++ dyn_cast. case CK_Dynamic: { SVal val = state->getSVal(Ex, LCtx); // Compute the type of the result. QualType resultType = CastE->getType(); if (CastE->isGLValue()) resultType = getContext().getPointerType(resultType); bool Failed = false; // Check if the value being cast evaluates to 0. if (val.isZeroConstant()) Failed = true; // Else, evaluate the cast. else val = getStoreManager().attemptDownCast(val, T, Failed); if (Failed) { if (T->isReferenceType()) { // A bad_cast exception is thrown if input value is a reference. // Currently, we model this, by generating a sink. Bldr.generateSink(CastE, Pred, state); continue; } else { // If the cast fails on a pointer, bind to 0. state = state->BindExpr(CastE, LCtx, svalBuilder.makeNull()); } } else { // If we don't know if the cast succeeded, conjure a new symbol. if (val.isUnknown()) { DefinedOrUnknownSVal NewSym = svalBuilder.conjureSymbolVal(nullptr, CastE, LCtx, resultType, currBldrCtx->blockCount()); state = state->BindExpr(CastE, LCtx, NewSym); } else // Else, bind to the derived region value. state = state->BindExpr(CastE, LCtx, val); } Bldr.generateNode(CastE, Pred, state); continue; } case CK_BaseToDerived: { SVal val = state->getSVal(Ex, LCtx); QualType resultType = CastE->getType(); if (CastE->isGLValue()) resultType = getContext().getPointerType(resultType); bool Failed = false; if (!val.isConstant()) { val = getStoreManager().attemptDownCast(val, T, Failed); } // Failed to cast or the result is unknown, fall back to conservative. if (Failed || val.isUnknown()) { val = svalBuilder.conjureSymbolVal(nullptr, CastE, LCtx, resultType, currBldrCtx->blockCount()); } state = state->BindExpr(CastE, LCtx, val); Bldr.generateNode(CastE, Pred, state); continue; } case CK_NullToMemberPointer: { SVal V = svalBuilder.getMemberPointer(nullptr); state = state->BindExpr(CastE, LCtx, V); Bldr.generateNode(CastE, Pred, state); continue; } case CK_DerivedToBaseMemberPointer: case CK_BaseToDerivedMemberPointer: case CK_ReinterpretMemberPointer: { SVal V = state->getSVal(Ex, LCtx); if (auto PTMSV = V.getAs<nonloc::PointerToMember>()) { SVal CastedPTMSV = svalBuilder.makePointerToMember( getBasicVals().accumCXXBase( llvm::make_range<CastExpr::path_const_iterator>( CastE->path_begin(), CastE->path_end()), *PTMSV)); state = state->BindExpr(CastE, LCtx, CastedPTMSV); Bldr.generateNode(CastE, Pred, state); continue; } // Explicitly proceed with default handler for this case cascade. state = handleLVectorSplat(state, LCtx, CastE, Bldr, Pred); continue; } // Various C++ casts that are not handled yet. case CK_ToUnion: case CK_VectorSplat: { state = handleLVectorSplat(state, LCtx, CastE, Bldr, Pred); continue; } } } } void ExprEngine::VisitCompoundLiteralExpr(const CompoundLiteralExpr *CL, ExplodedNode *Pred, ExplodedNodeSet &Dst) { StmtNodeBuilder B(Pred, Dst, *currBldrCtx); ProgramStateRef State = Pred->getState(); const LocationContext *LCtx = Pred->getLocationContext(); const Expr *Init = CL->getInitializer(); SVal V = State->getSVal(CL->getInitializer(), LCtx); if (isa<CXXConstructExpr>(Init) || isa<CXXStdInitializerListExpr>(Init)) { // No work needed. Just pass the value up to this expression. } else { assert(isa<InitListExpr>(Init)); Loc CLLoc = State->getLValue(CL, LCtx); State = State->bindLoc(CLLoc, V, LCtx); if (CL->isGLValue()) V = CLLoc; } B.generateNode(CL, Pred, State->BindExpr(CL, LCtx, V)); } void ExprEngine::VisitDeclStmt(const DeclStmt *DS, ExplodedNode *Pred, ExplodedNodeSet &Dst) { // Assumption: The CFG has one DeclStmt per Decl. const VarDecl *VD = dyn_cast_or_null<VarDecl>(*DS->decl_begin()); if (!VD) { //TODO:AZ: remove explicit insertion after refactoring is done. Dst.insert(Pred); return; } // FIXME: all pre/post visits should eventually be handled by ::Visit(). ExplodedNodeSet dstPreVisit; getCheckerManager().runCheckersForPreStmt(dstPreVisit, Pred, DS, *this); ExplodedNodeSet dstEvaluated; StmtNodeBuilder B(dstPreVisit, dstEvaluated, *currBldrCtx); for (ExplodedNodeSet::iterator I = dstPreVisit.begin(), E = dstPreVisit.end(); I!=E; ++I) { ExplodedNode *N = *I; ProgramStateRef state = N->getState(); const LocationContext *LC = N->getLocationContext(); // Decls without InitExpr are not initialized explicitly. if (const Expr *InitEx = VD->getInit()) { // Note in the state that the initialization has occurred. ExplodedNode *UpdatedN = N; SVal InitVal = state->getSVal(InitEx, LC); assert(DS->isSingleDecl()); if (getObjectUnderConstruction(state, DS, LC)) { state = finishObjectConstruction(state, DS, LC); // We constructed the object directly in the variable. // No need to bind anything. B.generateNode(DS, UpdatedN, state); } else { // Recover some path-sensitivity if a scalar value evaluated to // UnknownVal. if (InitVal.isUnknown()) { QualType Ty = InitEx->getType(); if (InitEx->isGLValue()) { Ty = getContext().getPointerType(Ty); } InitVal = svalBuilder.conjureSymbolVal(nullptr, InitEx, LC, Ty, currBldrCtx->blockCount()); } B.takeNodes(UpdatedN); ExplodedNodeSet Dst2; evalBind(Dst2, DS, UpdatedN, state->getLValue(VD, LC), InitVal, true); B.addNodes(Dst2); } } else { B.generateNode(DS, N, state); } } getCheckerManager().runCheckersForPostStmt(Dst, B.getResults(), DS, *this); } void ExprEngine::VisitLogicalExpr(const BinaryOperator* B, ExplodedNode *Pred, ExplodedNodeSet &Dst) { assert(B->getOpcode() == BO_LAnd || B->getOpcode() == BO_LOr); StmtNodeBuilder Bldr(Pred, Dst, *currBldrCtx); ProgramStateRef state = Pred->getState(); if (B->getType()->isVectorType()) { // FIXME: We do not model vector arithmetic yet. When adding support for // that, note that the CFG-based reasoning below does not apply, because // logical operators on vectors are not short-circuit. Currently they are // modeled as short-circuit in Clang CFG but this is incorrect. // Do not set the value for the expression. It'd be UnknownVal by default. Bldr.generateNode(B, Pred, state); return; } ExplodedNode *N = Pred; while (!N->getLocation().getAs<BlockEntrance>()) { ProgramPoint P = N->getLocation(); assert(P.getAs<PreStmt>()|| P.getAs<PreStmtPurgeDeadSymbols>()); (void) P; assert(N->pred_size() == 1); N = *N->pred_begin(); } assert(N->pred_size() == 1); N = *N->pred_begin(); BlockEdge BE = N->getLocation().castAs<BlockEdge>(); SVal X; // Determine the value of the expression by introspecting how we // got this location in the CFG. This requires looking at the previous // block we were in and what kind of control-flow transfer was involved. const CFGBlock *SrcBlock = BE.getSrc(); // The only terminator (if there is one) that makes sense is a logical op. CFGTerminator T = SrcBlock->getTerminator(); if (const BinaryOperator *Term = cast_or_null<BinaryOperator>(T.getStmt())) { (void) Term; assert(Term->isLogicalOp()); assert(SrcBlock->succ_size() == 2); // Did we take the true or false branch? unsigned constant = (*SrcBlock->succ_begin() == BE.getDst()) ? 1 : 0; X = svalBuilder.makeIntVal(constant, B->getType()); } else { // If there is no terminator, by construction the last statement // in SrcBlock is the value of the enclosing expression. // However, we still need to constrain that value to be 0 or 1. assert(!SrcBlock->empty()); CFGStmt Elem = SrcBlock->rbegin()->castAs<CFGStmt>(); const Expr *RHS = cast<Expr>(Elem.getStmt()); SVal RHSVal = N->getState()->getSVal(RHS, Pred->getLocationContext()); if (RHSVal.isUndef()) { X = RHSVal; } else { // We evaluate "RHSVal != 0" expression which result in 0 if the value is // known to be false, 1 if the value is known to be true and a new symbol // when the assumption is unknown. nonloc::ConcreteInt Zero(getBasicVals().getValue(0, B->getType())); X = evalBinOp(N->getState(), BO_NE, svalBuilder.evalCast(RHSVal, B->getType(), RHS->getType()), Zero, B->getType()); } } Bldr.generateNode(B, Pred, state->BindExpr(B, Pred->getLocationContext(), X)); } void ExprEngine::VisitInitListExpr(const InitListExpr *IE, ExplodedNode *Pred, ExplodedNodeSet &Dst) { StmtNodeBuilder B(Pred, Dst, *currBldrCtx); ProgramStateRef state = Pred->getState(); const LocationContext *LCtx = Pred->getLocationContext(); QualType T = getContext().getCanonicalType(IE->getType()); unsigned NumInitElements = IE->getNumInits(); if (!IE->isGLValue() && (T->isArrayType() || T->isRecordType() || T->isVectorType() || T->isAnyComplexType())) { llvm::ImmutableList<SVal> vals = getBasicVals().getEmptySValList(); // Handle base case where the initializer has no elements. // e.g: static int* myArray[] = {}; if (NumInitElements == 0) { SVal V = svalBuilder.makeCompoundVal(T, vals); B.generateNode(IE, Pred, state->BindExpr(IE, LCtx, V)); return; } for (InitListExpr::const_reverse_iterator it = IE->rbegin(), ei = IE->rend(); it != ei; ++it) { SVal V = state->getSVal(cast<Expr>(*it), LCtx); vals = getBasicVals().prependSVal(V, vals); } B.generateNode(IE, Pred, state->BindExpr(IE, LCtx, svalBuilder.makeCompoundVal(T, vals))); return; } // Handle scalars: int{5} and int{} and GLvalues. // Note, if the InitListExpr is a GLvalue, it means that there is an address // representing it, so it must have a single init element. assert(NumInitElements <= 1); SVal V; if (NumInitElements == 0) V = getSValBuilder().makeZeroVal(T); else V = state->getSVal(IE->getInit(0), LCtx); B.generateNode(IE, Pred, state->BindExpr(IE, LCtx, V)); } void ExprEngine::VisitGuardedExpr(const Expr *Ex, const Expr *L, const Expr *R, ExplodedNode *Pred, ExplodedNodeSet &Dst) { assert(L && R); StmtNodeBuilder B(Pred, Dst, *currBldrCtx); ProgramStateRef state = Pred->getState(); const LocationContext *LCtx = Pred->getLocationContext(); const CFGBlock *SrcBlock = nullptr; // Find the predecessor block. ProgramStateRef SrcState = state; for (const ExplodedNode *N = Pred ; N ; N = *N->pred_begin()) { ProgramPoint PP = N->getLocation(); if (PP.getAs<PreStmtPurgeDeadSymbols>() || PP.getAs<BlockEntrance>()) { // If the state N has multiple predecessors P, it means that successors // of P are all equivalent. // In turn, that means that all nodes at P are equivalent in terms // of observable behavior at N, and we can follow any of them. // FIXME: a more robust solution which does not walk up the tree. continue; } SrcBlock = PP.castAs<BlockEdge>().getSrc(); SrcState = N->getState(); break; } assert(SrcBlock && "missing function entry"); // Find the last expression in the predecessor block. That is the // expression that is used for the value of the ternary expression. bool hasValue = false; SVal V; for (CFGElement CE : llvm::reverse(*SrcBlock)) { if (Optional<CFGStmt> CS = CE.getAs<CFGStmt>()) { const Expr *ValEx = cast<Expr>(CS->getStmt()); ValEx = ValEx->IgnoreParens(); // For GNU extension '?:' operator, the left hand side will be an // OpaqueValueExpr, so get the underlying expression. if (const OpaqueValueExpr *OpaqueEx = dyn_cast<OpaqueValueExpr>(L)) L = OpaqueEx->getSourceExpr(); // If the last expression in the predecessor block matches true or false // subexpression, get its the value. if (ValEx == L->IgnoreParens() || ValEx == R->IgnoreParens()) { hasValue = true; V = SrcState->getSVal(ValEx, LCtx); } break; } } if (!hasValue) V = svalBuilder.conjureSymbolVal(nullptr, Ex, LCtx, currBldrCtx->blockCount()); // Generate a new node with the binding from the appropriate path. B.generateNode(Ex, Pred, state->BindExpr(Ex, LCtx, V, true)); } void ExprEngine:: VisitOffsetOfExpr(const OffsetOfExpr *OOE, ExplodedNode *Pred, ExplodedNodeSet &Dst) { StmtNodeBuilder B(Pred, Dst, *currBldrCtx); APSInt IV; if (OOE->EvaluateAsInt(IV, getContext())) { assert(IV.getBitWidth() == getContext().getTypeSize(OOE->getType())); assert(OOE->getType()->isBuiltinType()); assert(OOE->getType()->getAs<BuiltinType>()->isInteger()); assert(IV.isSigned() == OOE->getType()->isSignedIntegerType()); SVal X = svalBuilder.makeIntVal(IV); B.generateNode(OOE, Pred, Pred->getState()->BindExpr(OOE, Pred->getLocationContext(), X)); } // FIXME: Handle the case where __builtin_offsetof is not a constant. } void ExprEngine:: VisitUnaryExprOrTypeTraitExpr(const UnaryExprOrTypeTraitExpr *Ex, ExplodedNode *Pred, ExplodedNodeSet &Dst) { // FIXME: Prechecks eventually go in ::Visit(). ExplodedNodeSet CheckedSet; getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, Ex, *this); ExplodedNodeSet EvalSet; StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); QualType T = Ex->getTypeOfArgument(); for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { if (Ex->getKind() == UETT_SizeOf) { if (!T->isIncompleteType() && !T->isConstantSizeType()) { assert(T->isVariableArrayType() && "Unknown non-constant-sized type."); // FIXME: Add support for VLA type arguments and VLA expressions. // When that happens, we should probably refactor VLASizeChecker's code. continue; } else if (T->getAs<ObjCObjectType>()) { // Some code tries to take the sizeof an ObjCObjectType, relying that // the compiler has laid out its representation. Just report Unknown // for these. continue; } } APSInt Value = Ex->EvaluateKnownConstInt(getContext()); CharUnits amt = CharUnits::fromQuantity(Value.getZExtValue()); ProgramStateRef state = (*I)->getState(); state = state->BindExpr(Ex, (*I)->getLocationContext(), svalBuilder.makeIntVal(amt.getQuantity(), Ex->getType())); Bldr.generateNode(Ex, *I, state); } getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, Ex, *this); } void ExprEngine::handleUOExtension(ExplodedNodeSet::iterator I, const UnaryOperator *U, StmtNodeBuilder &Bldr) { // FIXME: We can probably just have some magic in Environment::getSVal() // that propagates values, instead of creating a new node here. // // Unary "+" is a no-op, similar to a parentheses. We still have places // where it may be a block-level expression, so we need to // generate an extra node that just propagates the value of the // subexpression. const Expr *Ex = U->getSubExpr()->IgnoreParens(); ProgramStateRef state = (*I)->getState(); const LocationContext *LCtx = (*I)->getLocationContext(); Bldr.generateNode(U, *I, state->BindExpr(U, LCtx, state->getSVal(Ex, LCtx))); } void ExprEngine::VisitUnaryOperator(const UnaryOperator* U, ExplodedNode *Pred, ExplodedNodeSet &Dst) { // FIXME: Prechecks eventually go in ::Visit(). ExplodedNodeSet CheckedSet; getCheckerManager().runCheckersForPreStmt(CheckedSet, Pred, U, *this); ExplodedNodeSet EvalSet; StmtNodeBuilder Bldr(CheckedSet, EvalSet, *currBldrCtx); for (ExplodedNodeSet::iterator I = CheckedSet.begin(), E = CheckedSet.end(); I != E; ++I) { switch (U->getOpcode()) { default: { Bldr.takeNodes(*I); ExplodedNodeSet Tmp; VisitIncrementDecrementOperator(U, *I, Tmp); Bldr.addNodes(Tmp); break; } case UO_Real: { const Expr *Ex = U->getSubExpr()->IgnoreParens(); // FIXME: We don't have complex SValues yet. if (Ex->getType()->isAnyComplexType()) { // Just report "Unknown." break; } // For all other types, UO_Real is an identity operation. assert (U->getType() == Ex->getType()); ProgramStateRef state = (*I)->getState(); const LocationContext *LCtx = (*I)->getLocationContext(); Bldr.generateNode(U, *I, state->BindExpr(U, LCtx, state->getSVal(Ex, LCtx))); break; } case UO_Imag: { const Expr *Ex = U->getSubExpr()->IgnoreParens(); // FIXME: We don't have complex SValues yet. if (Ex->getType()->isAnyComplexType()) { // Just report "Unknown." break; } // For all other types, UO_Imag returns 0. ProgramStateRef state = (*I)->getState(); const LocationContext *LCtx = (*I)->getLocationContext(); SVal X = svalBuilder.makeZeroVal(Ex->getType()); Bldr.generateNode(U, *I, state->BindExpr(U, LCtx, X)); break; } case UO_AddrOf: { // Process pointer-to-member address operation. const Expr *Ex = U->getSubExpr()->IgnoreParens(); if (const DeclRefExpr *DRE = dyn_cast<DeclRefExpr>(Ex)) { const ValueDecl *VD = DRE->getDecl(); if (isa<CXXMethodDecl>(VD) || isa<FieldDecl>(VD)) { ProgramStateRef State = (*I)->getState(); const LocationContext *LCtx = (*I)->getLocationContext(); SVal SV = svalBuilder.getMemberPointer(cast<DeclaratorDecl>(VD)); Bldr.generateNode(U, *I, State->BindExpr(U, LCtx, SV)); break; } } // Explicitly proceed with default handler for this case cascade. handleUOExtension(I, U, Bldr); break; } case UO_Plus: assert(!U->isGLValue()); // FALL-THROUGH. case UO_Deref: case UO_Extension: { handleUOExtension(I, U, Bldr); break; } case UO_LNot: case UO_Minus: case UO_Not: { assert (!U->isGLValue()); const Expr *Ex = U->getSubExpr()->IgnoreParens(); ProgramStateRef state = (*I)->getState(); const LocationContext *LCtx = (*I)->getLocationContext(); // Get the value of the subexpression. SVal V = state->getSVal(Ex, LCtx); if (V.isUnknownOrUndef()) { Bldr.generateNode(U, *I, state->BindExpr(U, LCtx, V)); break; } switch (U->getOpcode()) { default: llvm_unreachable("Invalid Opcode."); case UO_Not: // FIXME: Do we need to handle promotions? state = state->BindExpr(U, LCtx, evalComplement(V.castAs<NonLoc>())); break; case UO_Minus: // FIXME: Do we need to handle promotions? state = state->BindExpr(U, LCtx, evalMinus(V.castAs<NonLoc>())); break; case UO_LNot: // C99 6.5.3.3: "The expression !E is equivalent to (0==E)." // // Note: technically we do "E == 0", but this is the same in the // transfer functions as "0 == E". SVal Result; if (Optional<Loc> LV = V.getAs<Loc>()) { Loc X = svalBuilder.makeNullWithType(Ex->getType()); Result = evalBinOp(state, BO_EQ, *LV, X, U->getType()); } else if (Ex->getType()->isFloatingType()) { // FIXME: handle floating point types. Result = UnknownVal(); } else { nonloc::ConcreteInt X(getBasicVals().getValue(0, Ex->getType())); Result = evalBinOp(state, BO_EQ, V.castAs<NonLoc>(), X, U->getType()); } state = state->BindExpr(U, LCtx, Result); break; } Bldr.generateNode(U, *I, state); break; } } } getCheckerManager().runCheckersForPostStmt(Dst, EvalSet, U, *this); } void ExprEngine::VisitIncrementDecrementOperator(const UnaryOperator* U, ExplodedNode *Pred, ExplodedNodeSet &Dst) { // Handle ++ and -- (both pre- and post-increment). assert (U->isIncrementDecrementOp()); const Expr *Ex = U->getSubExpr()->IgnoreParens(); const LocationContext *LCtx = Pred->getLocationContext(); ProgramStateRef state = Pred->getState(); SVal loc = state->getSVal(Ex, LCtx); // Perform a load. ExplodedNodeSet Tmp; evalLoad(Tmp, U, Ex, Pred, state, loc); ExplodedNodeSet Dst2; StmtNodeBuilder Bldr(Tmp, Dst2, *currBldrCtx); for (ExplodedNodeSet::iterator I=Tmp.begin(), E=Tmp.end();I!=E;++I) { state = (*I)->getState(); assert(LCtx == (*I)->getLocationContext()); SVal V2_untested = state->getSVal(Ex, LCtx); // Propagate unknown and undefined values. if (V2_untested.isUnknownOrUndef()) { state = state->BindExpr(U, LCtx, V2_untested); // Perform the store, so that the uninitialized value detection happens. Bldr.takeNodes(*I); ExplodedNodeSet Dst3; evalStore(Dst3, U, U, *I, state, loc, V2_untested); Bldr.addNodes(Dst3); continue; } DefinedSVal V2 = V2_untested.castAs<DefinedSVal>(); // Handle all other values. BinaryOperator::Opcode Op = U->isIncrementOp() ? BO_Add : BO_Sub; // If the UnaryOperator has non-location type, use its type to create the // constant value. If the UnaryOperator has location type, create the // constant with int type and pointer width. SVal RHS; SVal Result; if (U->getType()->isAnyPointerType()) RHS = svalBuilder.makeArrayIndex(1); else if (U->getType()->isIntegralOrEnumerationType()) RHS = svalBuilder.makeIntVal(1, U->getType()); else RHS = UnknownVal(); // The use of an operand of type bool with the ++ operators is deprecated // but valid until C++17. And if the operand of the ++ operator is of type // bool, it is set to true until C++17. Note that for '_Bool', it is also // set to true when it encounters ++ operator. if (U->getType()->isBooleanType() && U->isIncrementOp()) Result = svalBuilder.makeTruthVal(true, U->getType()); else Result = evalBinOp(state, Op, V2, RHS, U->getType()); // Conjure a new symbol if necessary to recover precision. if (Result.isUnknown()){ DefinedOrUnknownSVal SymVal = svalBuilder.conjureSymbolVal(nullptr, U, LCtx, currBldrCtx->blockCount()); Result = SymVal; // If the value is a location, ++/-- should always preserve // non-nullness. Check if the original value was non-null, and if so // propagate that constraint. if (Loc::isLocType(U->getType())) { DefinedOrUnknownSVal Constraint = svalBuilder.evalEQ(state, V2,svalBuilder.makeZeroVal(U->getType())); if (!state->assume(Constraint, true)) { // It isn't feasible for the original value to be null. // Propagate this constraint. Constraint = svalBuilder.evalEQ(state, SymVal, svalBuilder.makeZeroVal(U->getType())); state = state->assume(Constraint, false); assert(state); } } } // Since the lvalue-to-rvalue conversion is explicit in the AST, // we bind an l-value if the operator is prefix and an lvalue (in C++). if (U->isGLValue()) state = state->BindExpr(U, LCtx, loc); else state = state->BindExpr(U, LCtx, U->isPostfix() ? V2 : Result); // Perform the store. Bldr.takeNodes(*I); ExplodedNodeSet Dst3; evalStore(Dst3, U, U, *I, state, loc, Result); Bldr.addNodes(Dst3); } Dst.insert(Dst2); }
{ "pile_set_name": "Github" }
<?php declare(strict_types=1); /** * Copyright (c) 2013-2020 OpenCFP * * For the full copyright and license information, please view * the LICENSE file that was distributed with this source code. * * @see https://github.com/opencfp/opencfp */ namespace OpenCFP\Infrastructure\Auth; use Cartalyst\Sentinel\Sessions\SessionInterface as SentinelSessionInterface; use Symfony\Component\HttpFoundation\Session\SessionInterface as SymfonySessionInterface; final class SymfonySentinelSession implements SentinelSessionInterface { /** * @var SymfonySessionInterface */ private $session; /** * @var string */ private $key; public function __construct(SymfonySessionInterface $session, $key = null) { $this->session = $session; $this->key = $key ?: 'cartalyst_sentinel'; } /** * Put a value in the Sentinel session. * * @param mixed $value */ public function put($value) { $this->session->set($this->key, $value); } /** * Returns the Sentinel session value. * * @return mixed */ public function get() { return $this->session->get($this->key); } /** * Removes the Sentinel session. */ public function forget() { $this->session->remove($this->key); } }
{ "pile_set_name": "Github" }
/* * Copyright (C) 2008 Apple Inc. All Rights Reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions * are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY APPLE INC. ``AS IS'' AND ANY * EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR * CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, * PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #ifndef NavigatorBase_h #define NavigatorBase_h #include <wtf/Forward.h> namespace WebCore { class NavigatorBase { public: static String appName(); String appVersion() const; virtual String userAgent() const = 0; static String platform(); static String appCodeName(); static String product(); static String productSub(); static String vendor(); static String vendorSub(); static bool onLine(); static String language(); #if ENABLE(NAVIGATOR_HWCONCURRENCY) static int hardwareConcurrency(); #endif protected: virtual ~NavigatorBase(); }; } // namespace WebCore #endif // NavigatorBase_h
{ "pile_set_name": "Github" }
# Event 13 - task_0 ###### Version: 0 ## Description None ## Data Dictionary |Standard Name|Field Name|Type|Description|Sample Value| |---|---|---|---|---| |TBD|WString1|UnicodeString|None|`None`| ## Tags * etw_level_Informational * etw_task_task_0
{ "pile_set_name": "Github" }
name: Build on: pull_request: release: types: - published jobs: build_wheels: name: Build wheels on ${{ matrix.os }} runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-18.04, windows-latest, macos-latest] steps: - uses: actions/checkout@v2 # Include all history and tags with: fetch-depth: 0 - uses: actions/setup-python@v2 name: Install Python with: python-version: '3.8' - name: Install cibuildwheel run: | python -m pip install cibuildwheel==1.5.5 - name: Build wheels run: | python -m cibuildwheel --output-dir wheelhouse env: CIBW_SKIP: pp* cp27-win* - uses: actions/upload-artifact@v2 with: path: ./wheelhouse/*.whl build_sdist: name: Build source distribution runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 # Include all history and tags with: fetch-depth: 0 - uses: actions/setup-python@v2 name: Install Python with: python-version: '3.7' - name: Build sdist run: | pip install cython python setup.py sdist - uses: actions/upload-artifact@v2 with: path: dist/*.tar.gz upload_pypi: needs: [build_wheels, build_sdist] runs-on: ubuntu-latest if: github.event_name == 'release' && github.event.action == 'published' steps: - uses: actions/download-artifact@v2 with: name: artifact path: dist - uses: pypa/gh-action-pypi-publish@master with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} # To test: repository_url: https://test.pypi.org/legacy/
{ "pile_set_name": "Github" }
<?php /** * @package Joomla.UnitTest * * @copyright Copyright (C) 2005 - 2013 Open Source Matters, Inc. All rights reserved. * @license GNU General Public License version 2 or later; see LICENSE */ /** * Test class for JSessionStorageXcache. * Generated by PHPUnit on 2011-10-26 at 19:36:09. * * @package Joomla.UnitTest * @subpackage Session * @since 11.1 */ class JSessionStorageXcacheTest extends PHPUnit_Framework_TestCase { /** * @var JSessionStorageXcache */ protected $object; /** * Sets up the fixture, for example, opens a network connection. * This method is called before a test is executed. * * @return void */ protected function setUp() { parent::setUp(); // Skip these tests if XCache isn't available. if (!JSessionStorageXcache::isSupported()) { $this->markTestSkipped('XCache storage is not enabled on this system.'); } $this->object = JSessionStorage::getInstance('Xcache'); } /** * Tears down the fixture, for example, closes a network connection. * This method is called after a test is executed. * * @return void */ protected function tearDown() { } /** * Test... * * @todo Implement testRead(). * * @return void */ public function testRead() { // Remove the following lines when you implement this test. $this->markTestIncomplete('This test has not been implemented yet.'); } /** * Test... * * @todo Implement testWrite(). * * @return void */ public function testWrite() { // Remove the following lines when you implement this test. $this->markTestIncomplete('This test has not been implemented yet.'); } /** * Test... * * @todo Implement testDestroy(). * * @return void */ public function testDestroy() { // Remove the following lines when you implement this test. $this->markTestIncomplete('This test has not been implemented yet.'); } /** * Test... * * @todo Implement testIsSupported(). * * @return void */ public function testIsSupported() { // Remove the following lines when you implement this test. $this->markTestIncomplete('This test has not been implemented yet.'); } }
{ "pile_set_name": "Github" }
// SPDX-License-Identifier: GPL-2.0+ /* * Driver for USB Mass Storage compliant devices * * Current development and maintenance by: * (c) 1999-2002 Matthew Dharm ([email protected]) * * Developed with the assistance of: * (c) 2000 David L. Brown, Jr. ([email protected]) * (c) 2002 Alan Stern ([email protected]) * * Initial work by: * (c) 1999 Michael Gee ([email protected]) * * This driver is based on the 'USB Mass Storage Class' document. This * describes in detail the protocol used to communicate with such * devices. Clearly, the designers had SCSI and ATAPI commands in * mind when they created this document. The commands are all very * similar to commands in the SCSI-II and ATAPI specifications. * * It is important to note that in a number of cases this class * exhibits class-specific exemptions from the USB specification. * Notably the usage of NAK, STALL and ACK differs from the norm, in * that they are used to communicate wait, failed and OK on commands. * * Also, for certain devices, the interrupt endpoint is used to convey * status of a command. */ #include <linux/highmem.h> #include <linux/export.h> #include <scsi/scsi.h> #include <scsi/scsi_cmnd.h> #include "usb.h" #include "protocol.h" #include "debug.h" #include "scsiglue.h" #include "transport.h" /*********************************************************************** * Protocol routines ***********************************************************************/ void usb_stor_pad12_command(struct scsi_cmnd *srb, struct us_data *us) { /* * Pad the SCSI command with zeros out to 12 bytes. If the * command already is 12 bytes or longer, leave it alone. * * NOTE: This only works because a scsi_cmnd struct field contains * a unsigned char cmnd[16], so we know we have storage available */ for (; srb->cmd_len < 12; srb->cmd_len++) srb->cmnd[srb->cmd_len] = 0; /* send the command to the transport layer */ usb_stor_invoke_transport(srb, us); } void usb_stor_ufi_command(struct scsi_cmnd *srb, struct us_data *us) { /* * fix some commands -- this is a form of mode translation * UFI devices only accept 12 byte long commands * * NOTE: This only works because a scsi_cmnd struct field contains * a unsigned char cmnd[16], so we know we have storage available */ /* Pad the ATAPI command with zeros */ for (; srb->cmd_len < 12; srb->cmd_len++) srb->cmnd[srb->cmd_len] = 0; /* set command length to 12 bytes (this affects the transport layer) */ srb->cmd_len = 12; /* XXX We should be constantly re-evaluating the need for these */ /* determine the correct data length for these commands */ switch (srb->cmnd[0]) { /* for INQUIRY, UFI devices only ever return 36 bytes */ case INQUIRY: srb->cmnd[4] = 36; break; /* again, for MODE_SENSE_10, we get the minimum (8) */ case MODE_SENSE_10: srb->cmnd[7] = 0; srb->cmnd[8] = 8; break; /* for REQUEST_SENSE, UFI devices only ever return 18 bytes */ case REQUEST_SENSE: srb->cmnd[4] = 18; break; } /* end switch on cmnd[0] */ /* send the command to the transport layer */ usb_stor_invoke_transport(srb, us); } void usb_stor_transparent_scsi_command(struct scsi_cmnd *srb, struct us_data *us) { /* send the command to the transport layer */ usb_stor_invoke_transport(srb, us); } EXPORT_SYMBOL_GPL(usb_stor_transparent_scsi_command); /*********************************************************************** * Scatter-gather transfer buffer access routines ***********************************************************************/ /* * Copy a buffer of length buflen to/from the srb's transfer buffer. * Update the **sgptr and *offset variables so that the next copy will * pick up from where this one left off. */ unsigned int usb_stor_access_xfer_buf(unsigned char *buffer, unsigned int buflen, struct scsi_cmnd *srb, struct scatterlist **sgptr, unsigned int *offset, enum xfer_buf_dir dir) { unsigned int cnt = 0; struct scatterlist *sg = *sgptr; struct sg_mapping_iter miter; unsigned int nents = scsi_sg_count(srb); if (sg) nents = sg_nents(sg); else sg = scsi_sglist(srb); sg_miter_start(&miter, sg, nents, dir == FROM_XFER_BUF ? SG_MITER_FROM_SG: SG_MITER_TO_SG); if (!sg_miter_skip(&miter, *offset)) return cnt; while (sg_miter_next(&miter) && cnt < buflen) { unsigned int len = min_t(unsigned int, miter.length, buflen - cnt); if (dir == FROM_XFER_BUF) memcpy(buffer + cnt, miter.addr, len); else memcpy(miter.addr, buffer + cnt, len); if (*offset + len < miter.piter.sg->length) { *offset += len; *sgptr = miter.piter.sg; } else { *offset = 0; *sgptr = sg_next(miter.piter.sg); } cnt += len; } sg_miter_stop(&miter); return cnt; } EXPORT_SYMBOL_GPL(usb_stor_access_xfer_buf); /* * Store the contents of buffer into srb's transfer buffer and set the * SCSI residue. */ void usb_stor_set_xfer_buf(unsigned char *buffer, unsigned int buflen, struct scsi_cmnd *srb) { unsigned int offset = 0; struct scatterlist *sg = NULL; buflen = min(buflen, scsi_bufflen(srb)); buflen = usb_stor_access_xfer_buf(buffer, buflen, srb, &sg, &offset, TO_XFER_BUF); if (buflen < scsi_bufflen(srb)) scsi_set_resid(srb, scsi_bufflen(srb) - buflen); } EXPORT_SYMBOL_GPL(usb_stor_set_xfer_buf);
{ "pile_set_name": "Github" }
steps: - name: 'gcr.io/cloud-builders/docker' args: - 'build' - '--tag=gcr.io/$PROJECT_ID/android:base' - '--file=Dockerfile-base' - '.' - name: 'gcr.io/cloud-builders/docker' args: - 'build' - '--tag=gcr.io/$PROJECT_ID/android:${_ANDROID_VERSION}' - '--build-arg' - 'android_version=${_ANDROID_VERSION}' - '--build-arg' - 'project_id=$PROJECT_ID' - '--file=Dockerfile-sdk' - '.' - '--cache-from' - 'gcr.io/$PROJECT_ID/android:base' - name: 'gcr.io/cloud-builders/docker' args: - 'build' - '--tag=gcr.io/$PROJECT_ID/android:${_ANDROID_VERSION}-ndk-r17b' - '--build-arg' - 'android_version=${_ANDROID_VERSION}' - '--build-arg' - 'project_id=$PROJECT_ID' - '--file=Dockerfile-ndk' - '.' - '--cache-from' - 'gcr.io/$PROJECT_ID/android:${_ANDROID_VERSION}' images: - 'gcr.io/$PROJECT_ID/android:base' - 'gcr.io/$PROJECT_ID/android:${_ANDROID_VERSION}' - 'gcr.io/$PROJECT_ID/android:${_ANDROID_VERSION}-ndk-r17b' timeout: 2000s
{ "pile_set_name": "Github" }
package org.apache.archiva.rss.processor; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ public abstract class AbstractArtifactsRssFeedProcessor implements RssFeedProcessor { protected abstract String getTitle(); protected abstract String getDescription(); }
{ "pile_set_name": "Github" }
-- LuaJIT implementation Limited memory BFGS (L-BFGS), based on libLBFGS -- Copyright (c) 1990, Jorge Nocedal -- Copyright (c) 2007-2010 Naoaki Okazaki -- Copyright (c) 2014 Stefano peluchetti -- All rights reserved. -- TODO: -- + method proposed by More and Thuente -- + stop criteria (delta and past parameters) local alg = require "sci.alg" local vec = alg.vec local sqrt, max = math.sqrt, math.max -- Set to functions below: local linesearches = { morethuente = true, armijo = true, wolfe = true, strongwolfe = true, } local function lbfgs_param(opt) opt = opt or { } local default = { ---------------------------------------------------------------------------- -- The number of corrections to approximate the inverse Hessian matrix. -- The L-BFGS routine stores the computation results of previous \ref m -- iterations to approximate the inverse Hessian matrix of the current -- iteration. This parameter controls the size of the limited memories -- (corrections). The default value is \c 6. Values less than \c 3 are -- not recommended. Large values will result in excessive computing time. m = 6, ---------------------------------------------------------------------------- -- Epsilon for convergence test. -- This parameter determines the accuracy with which the solution is to -- be found. A minimization terminates when -- ||g|| < \ref epsilon * max(1, ||x||), -- where ||.|| denotes the Euclidean (L2) norm. The default value is -- \c 1e-5. epsilon = 1e-6, -- TODO: Pass stopping criteria. -- The maximum number of iterations. -- Setting this parameter to zero continues an -- optimization process until a convergence or error. The default value -- is \c 0. max_iterations = 0, -- TODO: Pass stopping criteria. ---------------------------------------------------------------------------- -- The line search algorithm. -- This parameter specifies a line search algorithm to be used by the -- L-BFGS routine. linesearch = "strongwolfe", -- TODO: Change default! -- The maximum number of trials for the line search. -- This parameter controls the number of function and gradients evaluations -- per iteration for the line search routine. The default value is \c 40. max_linesearch = 40, -- The minimum step of the line search routine. -- The default value is \c 1e-20. This value need not be modified unless -- the exponents are too large for the machine being used, or unless the -- problem is extremely badly scaled (in which case the exponents should -- be increased). min_step = 1e-20, -- The maximum step of the line search. -- The default value is \c 1e+20. This value need not be modified unless -- the exponents are too large for the machine being used, or unless the -- problem is extremely badly scaled (in which case the exponents should -- be increased). max_step = 1e20, -- A parameter to control the accuracy of the line search routine. -- The default value is \c 1e-4. This parameter should be greater -- than zero and smaller than \c 0.5. ftol = 1e-4, -- TODO: Add test! -- A coefficient for the Wolfe condition. -- This parameter is valid only when the backtracking line-search -- algorithm is used with the Wolfe condition, -- ::LBFGS_LINESEARCH_BACKTRACKING_STRONG_WOLFE or -- ::LBFGS_LINESEARCH_BACKTRACKING_WOLFE . -- The default value is \c 0.9. This parameter should be greater -- the \ref ftol parameter and smaller than \c 1.0. wolfe = 0.9, -- TODO: Add test! -- A parameter to control the accuracy of the line search routine. -- The default value is \c 0.9. If the function and gradient -- evaluations are inexpensive with respect to the cost of the -- iteration (which is sometimes the case when solving very large -- problems) it may be advantageous to set this parameter to a small -- value. A typical small value is \c 0.1. This parameter shuold be -- greater than the \ref ftol parameter (\c 1e-4) and smaller than -- \c 1.0. gtol = 0.9, -- TODO: Add test! -- The machine precision for floating-point values. -- This parameter must be a positive value set by a client program to -- estimate the machine precision. The line search routine will terminate -- with the status code (::LBFGSERR_ROUNDING_ERROR) if the relative width -- of the interval of uncertainty is less than this parameter. xtol = 1e-16, } local o = { } for k,v in pairs(default) do o[k] = opt[k] or v end assert(o.m >= 1, "opt.m must be strictly positive integer") assert(o.epsilon >= 0, "opt.epsilon must be positive") -- assert(o.past >= 0, "opt.past must be positive integer") -- assert(o.delta >= 0, "opt.delta must be positive") assert(o.min_step >= 0, "opt.min_step must be positive") assert(o.max_step >= 0, "opt.max_step must be positive") assert(o.min_step <= o.max_step, "opt.min_step <= opt.max_step required") assert(o.ftol >= 0, "opt.ftol must be positive") if o.linesearch == "wolfe" or o.linesearch == "strongwolfe" then assert(o.wolfe > o.ftol, "opt.wolfe > opt.ftol required") assert(o.wolfe < 1, "opt.wolfe < 1 required") end assert(o.gtol >= 0, "opt.gtol must be positive") assert(o.xtol >= 0, "opt.xtol must be positive") assert(o.max_linesearch > 0, "opt.max_linesearch must be strictly positive") assert(linesearches[o.linesearch], "invalid line search string") return o end -- Algebra --------------------------------------------------------------------- local function vecadd(y, x, c) for i=1,#x do y[i] = y[i] + c*x[i] end end local function vecdiff(z, x, y) for i=1,#x do z[i] = x[i] - y[i] end end local function vecmul(y, x) for i=1,#x do y[i] = y[i]*x[i] end end --------------------------------------- local function vecset(x, c) for i=1,#x do x[i] = c end end local function veccpy(y, x) for i=1,#x do y[i] = x[i] end end local function vecdot(x, y) local s = 0 for i=1,#x do s = s + x[i]*y[i] end return s end local function vecscale(x, scale) for i=1,#x do x[i] = x[i]*scale end end -- Line searches --------------------------------------------------------------- -- [Backtracking method with the Armijo condition] -- The backtracking method finds the step length such that it satisfies -- the sufficient decrease (Armijo) condition: -- - f(x + a * d) <= f(x) + opt.ftol * a * g(x)^T d -- where x is the current point, d is the current search direction, and -- a is the step length. -- -- [Backtracking method with regular Wolfe condition] -- The backtracking method finds the step length such that it satisfies -- both the Armijo condition and the curvature condition: -- - g(x + a * d)^T d >= opt.wolfe * g(x)^T d -- where x is the current point, d is the current search direction, and -- a is the step length. -- -- [Backtracking method with strong Wolfe condition] -- The backtracking method finds the step length such that it satisfies -- both the Armijo condition and the following condition: -- - |g(x + a * d)^T d| <= opt.wolfe * |g(x)^T d| -- where x is the current point, d is the current search direction, and -- a is the step length. -- -- All these cases are covered in: local function backtracking(x, finit, g, s, step, xp, gp, wp, gradf, param, scale) local dec, inc = 0.5, 2.1 if step <= 0 then return nil, "step size must be positive" end -- Compute the initial gradient in the search direction: local dginit = vecdot(g, s) -- Make sure that s points to a descent direction: if 0 < dginit then return nil, "s is not pointing to a descent direction" end local dgtest = param.ftol*dginit for count=1,1/0 do veccpy(x, xp) vecadd(x, s, step) -- Evaluate the function and gradient values: local f = scale*gradf(x, g) vecscale(g, scale) local width if f > finit + step*dgtest then width = dec else if param.linesearch == "armijo" then -- OK Armijo condition. return count, f end local dg = vecdot(g, s) if dg < param.wolfe*dginit then width = inc else if param.linesearch == "wolfe" then -- OK Wolfe condition. return count, f end if dg > -param.wolfe*dginit then width = dec else -- OK strong Wolfe condition. return count, f end end end if step < param.min_step then return nil, "step smaller than opt.min_step" end if step > param.max_step then return nil, "step larger than opt.max_step" end if param.max_linesearch <= count then return nil, "opt.max_linesearch line search iterations exceeded" end step = step*width end end linesearches.armijo = backtracking linesearches.wolfe = backtracking linesearches.strongwolfe = backtracking -- Method proposed by More and Thuente: -- linesearches.morethuente (TODO: make default) -- TODO: Implement. local function stop(x, g, epsilon) -- Compute x and g norms. local xnorm = sqrt(vecdot(x, x)) local gnorm = sqrt(vecdot(g, g)) -- Converged if: -- |g(x)| / \max(1, |x|) < \epsilon if gnorm/max(1, xnorm) <= epsilon then return true end end -- LBFGS ----------------------------------------------------------------------- local function lbfgs(scale, gradf, param) local x = assert(param.x0, "x0 is required") param = lbfgs_param(param) local epsilon = param.epsilon local n = #x assert(n > 0, "problem size must be positive") local m = param.m local linesearch = linesearches[param.linesearch] local xp, g, gp, d, w = vec(n), vec(n), vec(n), vec(n), vec(n) local lm = { } for i=0,m-1 do lm[i] = { alpha = 0, ys = 0, y = vec(n), s = vec(n) } end local fx = scale*gradf(x, g) -- Initial function value and gradient. vecscale(g, scale) -- Initial direction, assume initial Hessian as identity matrix: for i=1,n do d[i] = -g[i] end if stop(x, g, epsilon) then return x, fx/scale end local step = 1/sqrt(vecdot(d, d)) -- Initial step. local iend = 0 for k=1,1/0 do -- Store the current position and gradient vectors: --print(x:width()) veccpy(xp, x) veccpy(gp, g) -- Search for an optimal step: local ls, fx_or_err = linesearch(x, fx, g, d, step, xp, gp, w, gradf, param, scale) if not ls then return nil, fx_or_err end if stop(x, g, epsilon) then return x, fx_or_err/scale end -- TODO: Use stop(). if param.max_iterations ~= 0 and param.max_iterations < k+1 then return nil, "opt.max_iterations LBFGS iterations exceeded" end --[[Update vectors s and y: s_{k+1} = x_{k+1} - x_{k} = \step * d_{k}. y_{k+1} = g_{k+1} - g_{k}.]]-- local it = lm[iend] vecdiff(it.s, x, xp) vecdiff(it.y, g, gp) --[[Compute scalars ys and yy: ys = y^t \cdot s = 1 / \rho. yy = y^t \cdot y. Notice that yy is used for scaling the Hessian matrix H_0 (Cholesky factor).]] local ys = vecdot(it.y, it.s) local yy = vecdot(it.y, it.y) it.ys = ys --[[Recursive formula to compute dir = -(H \cdot g). This is described in page 779 of: Jorge Nocedal. Updating Quasi-Newton Matrices with Limited Storage. Mathematics of Computation, Vol. 35, No. 151, pp. 773--782, 1980.]] local bound = (m <= k) and m or k iend = (iend + 1) % m -- Compute the steepest direction: for i=1,n do d[i] = -g[i] end local j = iend for i=0,bound-1 do j = (j + m - 1) % m it = lm[j] -- \alpha_{j} = \rho_{j} s^{t}_{j} \cdot q_{k+1}. it.alpha = vecdot(it.s, d) it.alpha = it.alpha/it.ys -- q_{i} = q_{i+1} - \alpha_{i} y_{i}. vecadd(d, it.y, -it.alpha) end for i=1,n do d[i] = d[i]*(ys/yy) end for i=0,bound-1 do it = lm[j] -- \beta_{j} = \rho_{j} y^t_{j} \cdot \gamma_{i}. local beta = vecdot(it.y, d, n); beta = beta/it.ys -- \gamma_{i+1} = \gamma_{i} + (\alpha_{j} - \beta_{j}) s_{j}. vecadd(d, it.s, it.alpha - beta) j = (j + 1) % m end -- Now the search direction d is ready. We try step = 1 first: step = 1 end end return { optim = lbfgs }
{ "pile_set_name": "Github" }
#StoryGraph World Generator The StoryGraph world generator is a program that translates a plain english description of a StoryGraph world into a working StoryGraph program. StoryGraph objects, especially rules, can be cumbersome to write out by hand, so this program allows you to easily define the basic types, rules and actors of your world and generate the code automatically. The program generated by the World Generator will be somewhat dull, but since the boilerplate is all there it will be easy to go in and make modifications and additions to add color to your StoryGraph world. ##How to Use First, write a description of your world using the grammar below and save it to disk. Go to the root director of StoryGraph in your console and use the following command: ```shell node generateWorld path/to/description.txt myWorld.js ``` The second parameter is the output file name and it is important that is has a .js extension. Now you can modify your world as you see fit. Generated worlds automatically console.log a four step story so you can immediately test you world like this: ```shell node myWorld.js ``` ##Grammar Here is the grammar of the world generator. Note that the formats provided here are not flexible. Only the parts inside curly braces may be replaced with your custom text. ###Basic Types FORMAT: There is a type called {typename}. EXAMPLE: ```code There is a type called person. There is a type called ghost. ``` ###Type Extensions FORMAT: A {new type} is a {base type}. EXAMPLE: ```code A woman is a person. A cat is an animal. A skeleton is a ghost. ``` ###Type Decorators FORMAT: Some actors are {typename}. OPTIONAL FORMAT: Some actors are {type one} and some are {type two}. EXAMPLE: ```code Some actors are smart and some are stupid. Some actors are scary. ``` ###Actors Note that the placeholder {type} here may be a basic type or extended type preceded by any number of type decorators. See the example for clarification. FORMAT: There is a {type} called {name}. EXAMPLE: ```code There is a ghost called Slimer. There is a smart kind man named Joe. There is a beautiful woman named Angelina. ``` ###Rules Again, the placeholder {type} may be preceded by any number of decorators. FORMAT: If a {type one} <{encounter text}> a {type two} then the {type one||two} <{result text}>. OPTIONAL FORMAT: If a {type one} <{encounter text}> a {type two} then the {type one||two} <{result text}> the {type one||two} EXAMPLE: ```code If a boy <is startled by> a ghost then the boy <starts to cry>. If a man <sees> a ghost then the man <stares in disbelief at> the ghost. ``` ###Full Example Here is a full working example: ```code There is a type called entity. A vapor is an entity. A life is an entity. An animal is a life. A solid is an entity. Some actors are light and some are heavy. Some actors are wet and some are dry. Some actors are slow and some are fast. Some actors are bright and some are dark. Some actors are expansive and some are small. A rock is a dry heavy solid. A bird is a light fast animal. There is a dark bird called crow. There is a bright bird called gull. There is a slow light animal called crab. There is a light wet vapor called waves. There is a wet bright slow vapor called clouds. There is a wet dark slow vapor called fog. There is a bright solid called seashore. There is an expansive rock called cliff. There is a small rock called Boulder. If a slow wet entity <emerges> a dry entity then the slow wet entity <drifts across> the dry entity. If a vapor <billows up onto> an expansive solid then the vapor <lightly covers> the expansive solid. If a fast animal <comes upon> a slow entity then the fast animal <scurries over> the slow entity. If an expansive entity <bears down on> a small entity then the expansive entity <envelops> the small entity. If a bird <flies into> an expansive entity then the bird <soars high above> the expansive entity. If a bird <discovers> a small solid then the bird <settles upon> the small solid. ``` Notice that in the above example the first rule would need some editing. The dry entity must be removed from the value section of the rule cause in order for the rendering of the rule to make sense. This is because the first part of the sentence is used to render both the type and value of the cause section of the rule.
{ "pile_set_name": "Github" }
import { warn } from '../../util/index' export default { bind () { process.env.NODE_ENV !== 'production' && warn( 'v-ref:' + this.arg + ' must be used on a child ' + 'component. Found on <' + this.el.tagName.toLowerCase() + '>.', this.vm ) } }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2016 咖枯 <[email protected] | [email protected]> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package com.kaku.weac.activities; import android.support.v4.app.Fragment; import android.view.MotionEvent; import com.kaku.weac.R; import com.kaku.weac.fragment.LifeIndexDetailFragment; /** * 生活指数详情activity * * @author 咖枯 * @version 1.0 2015/10/10 */ public class LifeIndexDetailActivity extends SingleFragmentDialogActivity { @Override protected Fragment createFragment() { return new LifeIndexDetailFragment(); } @Override public boolean onTouchEvent(MotionEvent event) { if (MotionEvent.ACTION_OUTSIDE == event.getAction() || MotionEvent.ACTION_DOWN == event.getAction()) { onFinish(); return true; } return super.onTouchEvent(event); } @Override public void onBackPressed() { super.onBackPressed(); onFinish(); } /** * 完成退出 */ private void onFinish() { finish(); overridePendingTransition(0, R.anim.zoomout); } }
{ "pile_set_name": "Github" }
/* * SoapUI, Copyright (C) 2004-2019 SmartBear Software * * Licensed under the EUPL, Version 1.1 or - as soon as they will be approved by the European Commission - subsequent * versions of the EUPL (the "Licence"); * You may not use this work except in compliance with the Licence. * You may obtain a copy of the Licence at: * * http://ec.europa.eu/idabc/eupl * * Unless required by applicable law or agreed to in writing, software distributed under the Licence is * distributed on an "AS IS" basis, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the Licence for the specific language governing permissions and limitations * under the Licence. */ package com.eviware.soapui.impl.wsdl.panels.request; import com.eviware.soapui.support.types.StringToStringMap; import javax.swing.table.AbstractTableModel; import javax.swing.table.TableModel; import java.util.ArrayList; import java.util.List; /** * TableModel for StringToString Maps * * @author ole.matzura */ public class StringToStringMapTableModel extends AbstractTableModel implements TableModel { private StringToStringMap data; private final String keyCaption; private final String valueCaption; private List<String> keyList; private final boolean editable; public StringToStringMapTableModel(StringToStringMap data, String keyCaption, String valueCaption, boolean editable) { this.data = data; this.keyCaption = keyCaption; this.valueCaption = valueCaption; this.editable = editable; keyList = data == null ? new ArrayList<String>() : new ArrayList<String>(data.keySet()); } public int getColumnCount() { return 2; } public String getColumnName(int arg0) { return arg0 == 0 ? keyCaption : valueCaption; } public boolean isCellEditable(int arg0, int arg1) { return editable; } public Class<?> getColumnClass(int arg0) { return String.class; } public void setValueAt(Object arg0, int arg1, int arg2) { String oldKey = keyList.get(arg1); if (arg2 == 0) { String value = data.get(oldKey); data.remove(oldKey); data.put(arg0.toString(), value); keyList.set(arg1, arg0.toString()); } else { data.put(oldKey, arg0.toString()); } fireTableCellUpdated(arg1, arg2); } public int getRowCount() { return data == null ? 0 : data.size(); } public Object getValueAt(int arg0, int arg1) { String str = keyList.get(arg0); return arg1 == 0 ? str : data.get(str); } public void add(String key, String value) { if (keyList.contains(key)) { data.put(key, value); fireTableCellUpdated(keyList.indexOf(key), 1); } else { data.put(key, value); keyList.add(key); fireTableRowsInserted(keyList.size() - 1, keyList.size() - 1); } } public void remove(int row) { String key = keyList.get(row); keyList.remove(row); data.remove(key); fireTableRowsDeleted(row, row); } public StringToStringMap getData() { return new StringToStringMap(this.data == null ? new StringToStringMap() : this.data); } public void setData(StringToStringMap data) { this.data = data == null ? new StringToStringMap() : data; keyList = new ArrayList<String>(this.data.keySet()); fireTableDataChanged(); } }
{ "pile_set_name": "Github" }
#!/usr/bin/env node // Standalone semver comparison program. // Exits successfully and prints matching version(s) if // any supplied version is valid and passes all tests. var argv = process.argv.slice(2) var versions = [] var range = [] var inc = null var version = require('../package.json').version var loose = false var includePrerelease = false var coerce = false var identifier var semver = require('../semver') var reverse = false var options = {} main() function main () { if (!argv.length) return help() while (argv.length) { var a = argv.shift() var indexOfEqualSign = a.indexOf('=') if (indexOfEqualSign !== -1) { a = a.slice(0, indexOfEqualSign) argv.unshift(a.slice(indexOfEqualSign + 1)) } switch (a) { case '-rv': case '-rev': case '--rev': case '--reverse': reverse = true break case '-l': case '--loose': loose = true break case '-p': case '--include-prerelease': includePrerelease = true break case '-v': case '--version': versions.push(argv.shift()) break case '-i': case '--inc': case '--increment': switch (argv[0]) { case 'major': case 'minor': case 'patch': case 'prerelease': case 'premajor': case 'preminor': case 'prepatch': inc = argv.shift() break default: inc = 'patch' break } break case '--preid': identifier = argv.shift() break case '-r': case '--range': range.push(argv.shift()) break case '-c': case '--coerce': coerce = true break case '-h': case '--help': case '-?': return help() default: versions.push(a) break } } var options = { loose: loose, includePrerelease: includePrerelease } versions = versions.map(function (v) { return coerce ? (semver.coerce(v) || { version: v }).version : v }).filter(function (v) { return semver.valid(v) }) if (!versions.length) return fail() if (inc && (versions.length !== 1 || range.length)) { return failInc() } for (var i = 0, l = range.length; i < l; i++) { versions = versions.filter(function (v) { return semver.satisfies(v, range[i], options) }) if (!versions.length) return fail() } return success(versions) } function failInc () { console.error('--inc can only be used on a single version with no range') fail() } function fail () { process.exit(1) } function success () { var compare = reverse ? 'rcompare' : 'compare' versions.sort(function (a, b) { return semver[compare](a, b, options) }).map(function (v) { return semver.clean(v, options) }).map(function (v) { return inc ? semver.inc(v, inc, options, identifier) : v }).forEach(function (v, i, _) { console.log(v) }) } function help () { console.log(['SemVer ' + version, '', 'A JavaScript implementation of the https://semver.org/ specification', 'Copyright Isaac Z. Schlueter', '', 'Usage: semver [options] <version> [<version> [...]]', 'Prints valid versions sorted by SemVer precedence', '', 'Options:', '-r --range <range>', ' Print versions that match the specified range.', '', '-i --increment [<level>]', ' Increment a version by the specified level. Level can', ' be one of: major, minor, patch, premajor, preminor,', " prepatch, or prerelease. Default level is 'patch'.", ' Only one version may be specified.', '', '--preid <identifier>', ' Identifier to be used to prefix premajor, preminor,', ' prepatch or prerelease version increments.', '', '-l --loose', ' Interpret versions and ranges loosely', '', '-p --include-prerelease', ' Always include prerelease versions in range matching', '', '-c --coerce', ' Coerce a string into SemVer if possible', ' (does not imply --loose)', '', 'Program exits successfully if any valid version satisfies', 'all supplied ranges, and prints all satisfying versions.', '', 'If no satisfying versions are found, then exits failure.', '', 'Versions are printed in ascending order, so supplying', 'multiple versions to the utility will just sort them.' ].join('\n')) }
{ "pile_set_name": "Github" }
// // More tests for N-dimensional polygon querying // // Create a polygon of some shape (no holes) // using turtle graphics. Basically, will look like a very contorted octopus (quad-pus?) shape. // There are no holes, but some edges will probably touch. var numTests = 4; for (var test = 0; test < numTests; test++) { Random.srand(1337 + test); var numTurtles = 4; var gridSize = [20, 20]; var turtleSteps = 500; var bounds = [Random.rand() * -1000000 + 0.00001, Random.rand() * 1000000 + 0.00001]; var rotation = Math.PI * Random.rand(); var bits = Math.floor(Random.rand() * 32); printjson({test: test, rotation: rotation, bits: bits}); var rotatePoint = function(x, y) { if (y == undefined) { y = x[1]; x = x[0]; } xp = x * Math.cos(rotation) - y * Math.sin(rotation); yp = y * Math.cos(rotation) + x * Math.sin(rotation); var scaleX = (bounds[1] - bounds[0]) / 360; var scaleY = (bounds[1] - bounds[0]) / 360; x *= scaleX; y *= scaleY; return [xp, yp]; }; var grid = []; for (var i = 0; i < gridSize[0]; i++) { grid.push(new Array(gridSize[1])); } grid.toString = function() { var gridStr = ""; for (var j = grid[0].length - 1; j >= -1; j--) { for (var i = 0; i < grid.length; i++) { if (i == 0) gridStr += (j == -1 ? " " : (j % 10)) + ": "; if (j != -1) gridStr += "[" + (grid[i][j] != undefined ? grid[i][j] : " ") + "]"; else gridStr += " " + (i % 10) + " "; } gridStr += "\n"; } return gridStr; }; var turtles = []; for (var i = 0; i < numTurtles; i++) { var up = (i % 2 == 0) ? i - 1 : 0; var left = (i % 2 == 1) ? (i - 1) - 1 : 0; turtles[i] = [ [Math.floor(gridSize[0] / 2), Math.floor(gridSize[1] / 2)], [Math.floor(gridSize[0] / 2) + left, Math.floor(gridSize[1] / 2) + up] ]; grid[turtles[i][1][0]][turtles[i][1][1]] = i; } grid[Math.floor(gridSize[0] / 2)][Math.floor(gridSize[1] / 2)] = "S"; // print( grid.toString() ) var pickDirections = function() { var up = Math.floor(Random.rand() * 3); if (up == 2) up = -1; if (up == 0) { var left = Math.floor(Random.rand() * 3); if (left == 2) left = -1; } else left = 0; if (Random.rand() < 0.5) { var swap = left; left = up; up = swap; } return [left, up]; }; for (var s = 0; s < turtleSteps; s++) { for (var t = 0; t < numTurtles; t++) { var dirs = pickDirections(); var up = dirs[0]; var left = dirs[1]; var lastTurtle = turtles[t][turtles[t].length - 1]; var nextTurtle = [lastTurtle[0] + left, lastTurtle[1] + up]; if (nextTurtle[0] >= gridSize[0] || nextTurtle[1] >= gridSize[1] || nextTurtle[0] < 0 || nextTurtle[1] < 0) continue; if (grid[nextTurtle[0]][nextTurtle[1]] == undefined) { turtles[t].push(nextTurtle); grid[nextTurtle[0]][nextTurtle[1]] = t; } } } turtlePaths = []; for (var t = 0; t < numTurtles; t++) { turtlePath = []; var nextSeg = function(currTurtle, prevTurtle) { var pathX = currTurtle[0]; if (currTurtle[1] < prevTurtle[1]) { pathX = currTurtle[0] + 1; pathY = prevTurtle[1]; } else if (currTurtle[1] > prevTurtle[1]) { pathX = currTurtle[0]; pathY = currTurtle[1]; } else if (currTurtle[0] < prevTurtle[0]) { pathX = prevTurtle[0]; pathY = currTurtle[1]; } else if (currTurtle[0] > prevTurtle[0]) { pathX = currTurtle[0]; pathY = currTurtle[1] + 1; } // print( " Prev : " + prevTurtle + " Curr : " + currTurtle + " path // : " // + [pathX, pathY]); return [pathX, pathY]; }; for (var s = 1; s < turtles[t].length; s++) { currTurtle = turtles[t][s]; prevTurtle = turtles[t][s - 1]; turtlePath.push(nextSeg(currTurtle, prevTurtle)); } for (var s = turtles[t].length - 2; s >= 0; s--) { currTurtle = turtles[t][s]; prevTurtle = turtles[t][s + 1]; turtlePath.push(nextSeg(currTurtle, prevTurtle)); } // printjson( turtlePath ) // End of the line is not inside our polygon. var lastTurtle = turtles[t][turtles[t].length - 1]; grid[lastTurtle[0]][lastTurtle[1]] = undefined; fixedTurtlePath = []; for (var s = 1; s < turtlePath.length; s++) { if (turtlePath[s - 1][0] == turtlePath[s][0] && turtlePath[s - 1][1] == turtlePath[s][1]) { continue; } var up = turtlePath[s][1] - turtlePath[s - 1][1]; var right = turtlePath[s][0] - turtlePath[s - 1][0]; var addPoint = (up != 0 && right != 0); if (addPoint && up != right) { fixedTurtlePath.push([turtlePath[s][0], turtlePath[s - 1][1]]); } else if (addPoint) { fixedTurtlePath.push([turtlePath[s - 1][0], turtlePath[s][1]]); } fixedTurtlePath.push(turtlePath[s]); } // printjson( fixedTurtlePath ) turtlePaths.push(fixedTurtlePath); } // Uncomment to print polygon shape // print( grid.toString() ) var polygon = []; for (var t = 0; t < turtlePaths.length; t++) { for (var s = 0; s < turtlePaths[t].length; s++) { polygon.push(rotatePoint(turtlePaths[t][s])); } } // Uncomment to print out polygon // printjson( polygon ) t = db.polytest2; t.drop(); // Test single and multi-location documents var pointsIn = 0; var pointsOut = 0; var allPointsIn = []; var allPointsOut = []; for (var j = grid[0].length - 1; j >= 0; j--) { for (var i = 0; i < grid.length; i++) { var point = rotatePoint([i + 0.5, j + 0.5]); t.insert({loc: point}); if (grid[i][j] != undefined) { allPointsIn.push(point); pointsIn++; } else { allPointsOut.push(point); pointsOut++; } } } var res = t.ensureIndex({loc: "2d"}, {bits: 1 + bits, max: bounds[1], min: bounds[0]}); assert.commandWorked(res); t.insert({loc: allPointsIn}); t.insert({loc: allPointsOut}); allPoints = allPointsIn.concat(allPointsOut); t.insert({loc: allPoints}); print("Points : "); printjson({pointsIn: pointsIn, pointsOut: pointsOut}); // print( t.find( { loc : { "$within" : { "$polygon" : polygon } } } ).count() ) assert.eq(gridSize[0] * gridSize[1] + 3, t.find().count()); assert.eq(2 + pointsIn, t.find({loc: {"$within": {"$polygon": polygon}}}).count()); }
{ "pile_set_name": "Github" }
# -*- coding: utf-8 -*- # # King Phisher documentation build configuration file, created by # sphinx-quickstart on Fri Jun 13 09:54:27 2014. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. GITHUB_BRANCH = 'dev' GITHUB_REPO = 'securestate/king-phisher' import copy import os import re import ssl import sys ssl.HAS_SNI = False # patch this to work around an issue with RTDs _prj_root = os.path.dirname(__file__) _prj_root = os.path.relpath(os.path.join('..', '..'), _prj_root) _prj_root = os.path.abspath(_prj_root) sys.path.insert(1, _prj_root) _pkg = os.path.join(_prj_root, 'king_phisher', 'third_party') sys.path.insert(2, _pkg) del _prj_root, _pkg import king_phisher.its import king_phisher.utilities import king_phisher.version import sphinx import sphinx.domains.python import sphinx.util.docfields # -- General configuration ------------------------------------------------ needs_sphinx = '1.7' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'king_phisher.sphinxext.database', 'king_phisher.sphinxext.graphql', 'king_phisher.sphinxext.rpc', 'sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.extlinks', 'sphinx.ext.graphviz', 'sphinx.ext.intersphinx', 'sphinx.ext.linkcode', 'sphinxcontrib.httpdomain' ] tab_width = 4 extlinks = { 'issue': ("https://github.com/{0}/issues/%s".format(GITHUB_REPO), '#'), 'release': ("https://github.com/{0}/releases/tag/v%s".format(GITHUB_REPO), 'v'), 'wiki': ("https://github.com/{0}/wiki/%s".format(GITHUB_REPO), ''), } def linkcode_resolve(domain, info): if domain != 'py': return None if not info['module']: return None file_name = info['module'].replace('.', '/') + '.py' return "https://github.com/{0}/blob/{1}/{2}".format(GITHUB_REPO, GITHUB_BRANCH, file_name) def gobject_signal_parse(env, sig, signode): match = re.match(r'([a-z\-]+)\((([a-zA-Z_]+, *)*[a-zA-Z_]+)?\)', sig) signode += sphinx.addnodes.desc_name(sig, sig) return match.group(1) def setup(app): app.add_stylesheet('theme_overrides.css') doc_field_types = list(copy.copy(sphinx.domains.python.PyObject.doc_field_types)) doc_field_types.append(sphinx.util.docfields.Field('flags', label='Signal flags', names=['flag', 'flags'], has_arg=False)) app.add_object_type( 'gobject-signal', 'gsig', doc_field_types=doc_field_types, parse_node=gobject_signal_parse ) intersphinx_mapping = { 'advancedhttpserver': ('https://advancedhttpserver.readthedocs.io/en/latest/', None), 'blinker': ('https://pythonhosted.org/blinker/', None), 'glib': ('http://lazka.github.io/pgi-docs/GLib-2.0/', None), 'gobject': ('http://lazka.github.io/pgi-docs/GObject-2.0/', None), 'gtksource': ('https://lazka.github.io/pgi-docs/GtkSource-3.0/', None), 'gtk': ('http://lazka.github.io/pgi-docs/Gtk-3.0/', None), 'jinja2': ('http://jinja.pocoo.org/docs/latest/', None), 'jsonschema': ('https://python-jsonschema.readthedocs.io/en/latest/', None), 'paramiko': ('http://docs.paramiko.org/en/latest/', None), 'py-gfm': ('https://pythonhosted.org/py-gfm/', None), 'python': ('https://docs.python.org/3/', None), 'smokezephyr': ('https://smoke-zephyr.readthedocs.io/en/latest/', None), 'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None), 'webkit2': ('http://lazka.github.io/pgi-docs/WebKit2-4.0/', None) } # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'King Phisher' copyright = '2013-2019, SecureState LLC' # The short X.Y version. version = king_phisher.version.version.split('-')[0] # The full version, including alpha/beta/rc tags. release = king_phisher.version.distutils_version language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = [] # HTTP domain specifc settings http://pythonhosted.org/sphinxcontrib-httpdomain/#additional-configuration http_index_shortname = 'rest-api' http_index_localname = "{0} REST API".format(project) # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). add_module_names = False # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for Graphviz ------------------------------------------------- graphviz_dot = 'dot' # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. if not king_phisher.its.on_rtd: html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'king_phisher_doc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'KingPhisher.tex', u'King Phisher Documentation', u'Spencer McIntyre', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'kingphisher', u'King Phisher Documentation', [u'Spencer McIntyre'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'KingPhisher', u'King Phisher Documentation', u'Spencer McIntyre', 'KingPhisher', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False if king_phisher.its.on_rtd: king_phisher.its.mocked = True # mock specific external packages MOCK_MODULES = [ 'gi', 'gi.repository', 'matplotlib', 'matplotlib.backends', 'matplotlib.backends.backend_gtk3', 'matplotlib.backends.backend_gtk3agg', 'matplotlib.figure' ] sys.modules.update((mod_name, king_phisher.utilities.Mock()) for mod_name in MOCK_MODULES) class PatchedDocsCache(object): def __init__(self, *args, **kwargs): pass def __call__(self, function): return function # monkey-patch this in so the docs build correct for cached functions import smoke_zephyr.utilities smoke_zephyr.utilities.Cache = PatchedDocsCache
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="utf-8"?> <Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003"> <Import Project="..\..\..\..\packages\Xamarin.Build.Download.0.10.0\build\Xamarin.Build.Download.props" Condition="Exists('..\..\..\..\packages\Xamarin.Build.Download.0.10.0\build\Xamarin.Build.Download.props')" /> <PropertyGroup> <Configuration Condition=" '$(Configuration)' == '' ">Debug</Configuration> <Platform Condition=" '$(Platform)' == '' ">iPhoneSimulator</Platform> <ProductVersion>8.0.30703</ProductVersion> <SchemaVersion>2.0</SchemaVersion> <ProjectGuid>{0FFC84D3-DC23-433C-8675-500FD96CCD73}</ProjectGuid> <ProjectTypeGuids>{FEACFBD2-3405-455C-9665-78FE426C6842};{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}</ProjectTypeGuids> <OutputType>Exe</OutputType> <RootNamespace>MobileAdsExample</RootNamespace> <IPhoneResourcePrefix>Resources</IPhoneResourcePrefix> <AssemblyName>MobileAdsExample</AssemblyName> <IntermediateOutputPath>obj\unified\$(Platform)\$(Configuration)</IntermediateOutputPath> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|iPhoneSimulator' "> <DebugSymbols>True</DebugSymbols> <DebugType>full</DebugType> <Optimize>False</Optimize> <OutputPath>bin\unified\iPhoneSimulator\Debug</OutputPath> <DefineConstants>DEBUG;</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>False</ConsolePause> <MtouchDebug>True</MtouchDebug> <MtouchLink>None</MtouchLink> <MtouchI18n> </MtouchI18n> <MtouchArch>i386, x86_64</MtouchArch> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|iPhoneSimulator' "> <DebugType>none</DebugType> <Optimize>True</Optimize> <OutputPath>bin\unified\iPhoneSimulator\Release</OutputPath> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>False</ConsolePause> <MtouchLink>None</MtouchLink> <MtouchI18n> </MtouchI18n> <MtouchArch>i386, x86_64</MtouchArch> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|iPhone' "> <DebugSymbols>True</DebugSymbols> <DebugType>full</DebugType> <Optimize>False</Optimize> <OutputPath>bin\unified\iPhone\Debug</OutputPath> <DefineConstants>DEBUG;</DefineConstants> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>False</ConsolePause> <CodesignKey>iPhone Developer</CodesignKey> <MtouchDebug>True</MtouchDebug> <MtouchI18n> </MtouchI18n> <MtouchArch>ARMv7, ARM64</MtouchArch> <MtouchProfiling>true</MtouchProfiling> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|iPhone' "> <DebugType>none</DebugType> <Optimize>True</Optimize> <OutputPath>bin\unified\iPhone\Release</OutputPath> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>False</ConsolePause> <CodesignKey>iPhone Developer</CodesignKey> <MtouchI18n> </MtouchI18n> <MtouchArch>ARMv7, ARM64</MtouchArch> <MtouchLink>Full</MtouchLink> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Ad-Hoc|iPhone' "> <DebugType>none</DebugType> <Optimize>True</Optimize> <OutputPath>bin\unified\iPhone\Ad-Hoc</OutputPath> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <BuildIpa>True</BuildIpa> <ConsolePause>False</ConsolePause> <CodesignProvision>Automatic:AdHoc</CodesignProvision> <CodesignKey>iPhone Distribution</CodesignKey> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'AppStore|iPhone' "> <DebugType>none</DebugType> <Optimize>True</Optimize> <OutputPath>bin\unified\iPhone\AppStore</OutputPath> <ErrorReport>prompt</ErrorReport> <WarningLevel>4</WarningLevel> <ConsolePause>False</ConsolePause> <CodesignKey>iPhone Distribution</CodesignKey> <CodesignProvision>Automatic:AppStore</CodesignProvision> </PropertyGroup> <ItemGroup> <Reference Include="System" /> <Reference Include="System.Xml" /> <Reference Include="System.Core" /> <Reference Include="Xamarin.iOS" /> <Reference Include="MonoTouch.Dialog-1" /> </ItemGroup> <ItemGroup> <Folder Include="Resources\" /> </ItemGroup> <ItemGroup> <None Include="Info.plist" /> <None Include="packages.config" /> </ItemGroup> <ItemGroup> <Compile Include="Main.cs" /> <Compile Include="AppDelegate.cs" /> </ItemGroup> <Import Project="$(MSBuildExtensionsPath)\Xamarin\iOS\Xamarin.iOS.CSharp.targets" /> <Import Project="$(MSBuildThisFileDirectory)..\..\..\..\source\Firebase\Core\Core.targets" /> <Import Project="$(MSBuildThisFileDirectory)..\..\..\..\source\Google\MobileAds\MobileAds.targets" /> <Import Project="..\..\..\..\packages\Xamarin.Build.Download.0.10.0\build\Xamarin.Build.Download.targets" Condition="Exists('..\..\..\..\packages\Xamarin.Build.Download.0.10.0\build\Xamarin.Build.Download.targets')" /> <ItemGroup> <Content Include="Default-568h%402x.png" /> </ItemGroup> <ItemGroup> <ProjectReference Include="..\..\..\..\source\Google\MobileAds\MobileAds.csproj"> <Project>{1C9DFB3E-FDA5-478B-B063-CF646AA892DB}</Project> <Name>MobileAds</Name> </ProjectReference> <ProjectReference Include="..\..\..\..\source\Firebase\Core\Core.csproj"> <Project>{9335A59B-3B54-4ED5-8201-B68CFD05CAFB}</Project> <Name>Core</Name> </ProjectReference> </ItemGroup> </Project>
{ "pile_set_name": "Github" }
<?php /** * @category Magento * @package Magento_CatalogInventory * Copyright © Magento, Inc. All rights reserved. * See COPYING.txt for license details. */ namespace Magento\CatalogInventory\Model\Indexer\Stock\Action; /** * Class Rows reindex action for mass actions * * @package Magento\CatalogInventory\Model\Indexer\Stock\Action */ class Rows extends \Magento\CatalogInventory\Model\Indexer\Stock\AbstractAction { /** * Execute Rows reindex * * @param array $ids * @throws \Magento\Framework\Exception\LocalizedException * * @return void */ public function execute($ids) { if (empty($ids)) { throw new \Magento\Framework\Exception\LocalizedException( __('Could not rebuild index for empty products array') ); } try { $this->_reindexRows($ids); } catch (\Exception $e) { throw new \Magento\Framework\Exception\LocalizedException(__($e->getMessage()), $e); } } }
{ "pile_set_name": "Github" }
# Building and Testing Protractor This document describes building, testing, releasing Protractor and provides an overview of the repository layout. ## Prerequisite software The prerequisite software (Node.js, npm, git, jdk) are the same as for angular. See https://github.com/angular/angular/blob/master/docs/DEVELOPER.md#prerequisite-software ## Getting the sources Fork Protractor from github, then clone your fork with: ```shell git clone [email protected]:<github username>/protractor.git # Go to the Protractor directory: cd protractor/ # Add the main protractor repository as an upstream remote to your repository: git remote add upstream https://github.com/angular/protractor.git ``` ## Installing and Building All Protractor dependencies come from npm. Install with: ```shell npm install ``` This will also trigger our build step. The build step runs the TypeScript compiler and copies necessary files into the output `built` directory. To run the build step independently, run: ```shell npm run prepublish ``` You can see the other available npm scripts in `package.json`. Note that most of these scripts just call our `gulp` commands, which can be seen in `gulpfile.js`. ## Formatting Protractor uses clang-format to format the source code. If the source code is not properly formatted, the CI will fail and the PR can not be merged. You can automatically format your code by running: ```shell npm run format ``` You can check that you will pass lint tests with: ```shell gulp lint # or if you don't have gulp installed globally: ./node_modules/.bin/gulp lint ``` ## Code layout `docs/` contains markdown documentation files. `lib/` contains the actual Protractor code. `scripts/` contains scripts used for CI setup and running tests. `spec/` contains e2e and unit tests and configuration files for tests. `testapp/` contains the code for the Angular applications that e2e tests run against. `website/` contains code for generating Protractor API documentation and the website at protractortest.org. Most of the code is written in TypeScript, with the exception of a few js files. `lib/debugger` is for element explorer, `browser.pause` and `browser.explore`. `lib/driverProviders` controls how WebDriver instances are created. `lib/frameworks` contains adapters for test frameworks such as Jasmine and Mocha. `lib/selenium-webdriver` and `lib/webdriver-js-extender` are used ONLY for API documentation generation. ## Lightning Code Walkthrough TBD. ## Testing Run `npm test` to run the full test suite. This assumes that you have the testapp and a selenium server running. Start these as separate processes with: ```shell webdriver-manager update webdriver-manager start ``` and ```shell npm start ``` This suite is described in `scripts/test.js`. It uses some small helper functions to run commands as child processes and capture the results, so that we can run protractor commands which should result in failures and verify that we get the expected number and type of failures. The suite contains unit tests, end to end tests using the built binary, and interactive tests. Interactive tests are for testing `browser.pause` and element explorer. End to end tests all have configuration files which live in `spec/`. Many tests do not need an actual Selenium server backing them and use the `mockSelenium` configuration, which saves time by not connecting to a real selenium server. ## Important dependencies Protractor has very close dependencies with several other projects under the Angular umbrella: `jasminewd2` is an extension of the Jasmine test framework that adds utilities for working with selenium-webdriver. [jasminewd](https://github.com/angular/jasminewd) `blocking-proxy` is a separate binary, which handles traffic between a test script and webdriver. It can be turned on via a protractor configuration file, and in the future all logic to wait for Angular will be handled through the blocking proxy. [blocking-proxy](https://github.com/angular/blocking-proxy) `webdriver-manager` is a separate binary which manages installing and starting up the various binaries necessary for running webdriver tests. These binaries include specific drivers for various browsers (e.g. chromedriver) and the selenium standalone server. [webdriver-manager](https://github.com/angular/webdriver-manager) `webdriver-js-extender` extends selenium-webdriver to add Appium commands. [webdriver-js-extender](https://github.com/angular/webdriver-js-extender) ## Continuous Integration PRs or changes submitted to master will automatically trigger continuous integration on two different services - Travis, and Circle CI. We use Travis for tests run with SauceLabs because we have more vm time on Travis and their integration with SauceLabs is smooth. CircleCI gives us greater control over the vm, which allows us to run tests against local browsers and get better logs. Travis runs e2e tests via SauceLabs against a variety of browsers. The essential browsers run a more complete test suite, `specified by spec/ciFullConf.js`. We also run a set of smoke tests against a larger set of browsers, which is allowed to fail - this is configured in `spec/ciSmokeConf.js`. This is due to flakiness in IE, Safari and older browser versions. We also run a small set of tests using BrowserStack to verify that our integration with their Selenium farm works. Circle CI runs a slightly modified version of `npm test` in a single VM. It installs the browsers it needs locally. Circle CI runs unit tests and a set of e2e tests against Chrome. ## Releasing See [release.md](https://github.com/angular/protractor/blob/master/release.md) for full instructions.
{ "pile_set_name": "Github" }
// // Generated by class-dump 3.5 (64 bit) (Debug version compiled Jun 9 2015 22:53:21). // // class-dump is Copyright (C) 1997-1998, 2000-2001, 2004-2014 by Steve Nygard. // #import "WebViewPlus.h" #import "WebUIDelegate-Protocol.h" @class FindBanner, NSString; @interface SearchableWebView : WebViewPlus <WebUIDelegate> { FindBanner *_findInPageBanner; } - (void).cxx_destruct; - (void)layout; - (void)_commonInitialization; - (void)_updateScroll:(id)arg1; - (void)_didScrollDocumentInFrameView:(id)arg1; - (void)_updateScrollForFrameView:(id)arg1; - (void)printOperationDidRun:(id)arg1 success:(BOOL)arg2 contextInfo:(void *)arg3; - (void)printHelpPage:(id)arg1; - (id)currentHelpPageURL; - (id)helpContentFrame; - (id)mainFrameScrollView; - (void)loadHTMLString:(id)arg1 baseURL:(id)arg2; - (void)loadURLInWebappFrame:(id)arg1; - (void)viewDidMoveToWindow; - (void)_uninstallFindBanners; - (void)awakeFromNib; - (void)close; - (id)initWithFrame:(struct CGRect)arg1 frameName:(id)arg2 groupName:(id)arg3; - (void)focusSearchField; - (void)hideFindInPageBannerAllowingAnimation:(BOOL)arg1; - (void)showFindInPageBannerAllowingAnimation:(BOOL)arg1; - (BOOL)currentPageIsSearchable; - (void)findInPageBannerNeedsUpdateInRect:(struct CGRect)arg1; - (BOOL)findBanner:(id)arg1 doSearchFieldCommandBySelector:(SEL)arg2; - (void)findPrevious; - (BOOL)canFindPrevious; - (void)findNext; - (BOOL)canFindNext; - (BOOL)canHideFindInPageBanner; - (BOOL)canShowFindInPageBanner; - (BOOL)canFocusSearchField; - (BOOL)isShowingFindInPageBanner; - (id)findInPageBanner; // Remaining properties @property(readonly, copy) NSString *debugDescription; @property(readonly, copy) NSString *description; @property(readonly) unsigned long long hash; @property(readonly) Class superclass; @end
{ "pile_set_name": "Github" }
- Start Date: 2014-08-27 - RFC PR: [rust-lang/rfcs#214](https://github.com/rust-lang/rfcs/pull/214) - Rust Issue: [rust-lang/rust#17687](https://github.com/rust-lang/rust/issues/17687) # Summary Introduce a new `while let PAT = EXPR { BODY }` construct. This allows for using a refutable pattern match (with optional variable binding) as the condition of a loop. # Motivation Just as `if let` was inspired by Swift, it turns out Swift supports `while let` as well. This was not discovered until much too late to include it in the `if let` RFC. It turns out that this sort of looping is actually useful on occasion. For example, the desugaring `for` loop is actually a variant on this; if `while let` existed it could have been implemented to map `for PAT in EXPR { BODY }` to ```rust // the match here is so `for` can accept an rvalue for the iterator, // and was used in the "real" desugaring version. match &mut EXPR { i => { while let Some(PAT) = i.next() { BODY } } } ``` (note that the non-desugared form of `for` is no longer equivalent). More generally, this construct can be used any time looping + pattern-matching is desired. This also makes the language a bit more consistent; right now, any condition that can be used with `if` can be used with `while`. The new `if let` adds a form of `if` that doesn't map to `while`. Supporting `while let` restores the equivalence of these two control-flow constructs. # Detailed design `while let` operates similarly to `if let`, in that it desugars to existing syntax. Specifically, the syntax ```rust ['ident:] while let PAT = EXPR { BODY } ``` desugars to ```rust ['ident:] loop { match EXPR { PAT => BODY, _ => break } } ``` Just as with `if let`, an irrefutable pattern given to `while let` is considered an error. This is largely an artifact of the fact that the desugared `match` ends up with an unreachable pattern, and is not actually a goal of this syntax. The error may be suppressed in the future, which would be a backwards-compatible change. Just as with `if let`, `while let` will be introduced under a feature gate (named `while_let`). # Drawbacks Yet another addition to the grammar. Unlike `if let`, it's not obvious how useful this syntax will be. # Alternatives As with `if let`, this could plausibly be done with a macro, but it would be ugly and produce bad error messages. `while let` could be extended to support alternative patterns, just as match arms do. This is not part of the main proposal for the same reason it was left out of `if let`, which is that a) it looks weird, and b) it's a bit of an odd coupling with the `let` keyword as alternatives like this aren't going to be introducing variable bindings. However, it would make `while let` more general and able to replace more instances of `loop { match { ... } }` than is possible with the main design. # Unresolved questions None.
{ "pile_set_name": "Github" }
package de.tum.`in`.tumcampusapp.component.tumui.person.adapteritems import android.content.Context import android.content.Intent import de.tum.`in`.tumcampusapp.R class OfficeHoursContactItem(text: String) : AbstractContactItem(R.string.office_hours, text, R.drawable.ic_outline_access_time_24px) { override fun getIntent(context: Context): Intent? = null }
{ "pile_set_name": "Github" }
/* * Copyright (c) 2003, 2004, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test * @bug 4901611 5009693 4920438 * @summary generic class method with vararg gets "java.lang.AssertionError: arraycode T" * @author gafter * * @compile BadSyntax2.java */ class Tclass<T> { T data; public Tclass(T... t){} } public class BadSyntax2 { String s = null; Tclass<String> tc = new Tclass<String>(s); //this gets Assertion public BadSyntax2() {} }
{ "pile_set_name": "Github" }
// Copyright © 2018 SkeletonView. All rights reserved. import Foundation extension Int { var whitespace: String { return whitespaces } var whitespaces: String { return String(repeating: " ", count: self) } }
{ "pile_set_name": "Github" }
[F] Fix Bug. [S] Style Change. [A] Add Feature. [U] Update Config. [Doc] Update DOC. ## 2020/7/10 (V1.2.1) - [S] timeline page style changed. - [F] ribbons not work. ## 2020/7/10 (V1.2.0) - [F] #36 添加 bv 解析 ## 2020/5/18 (V1.1.4) - pjax - owo - ribbons **version tag in candy branch** ## 2019/11/23 [F] #27 - php5 not support <? ## 2019/11/7 [A]: add instantclick [DOC]: CN README ## 2019/11/4 [S] video style at mobile. [F] Change the distance of scroll2post(-50px). ## 2019/11/4 [A] Add video Short Code Support. ## 2019/11/3 [A] You need add the truncation `<!-- more -->` control output (Home Article & ...). ## 2019/11/3 (V1.1.4) - [S] replace Site Title animation. 移除 Title 动画。 - [S] Banner height -> 550px -> 350px (PC) - [F] fix topView post order. ## 2019/11/2 - [F] Fix when the article title is too much, the Toc display error(layout). 修复文章标题过多时,目录显示错位 - [A] add jp & zh_tw lang. - [F] Replace DB insert with typecho custom field. 使用自定义字段替换数据库操作(不会破坏原有数据库结构) - [F] fix site title position (mobile) <!-- --> ## 2018/10/13 v1.1 - [U]: 重构 JS ## 2018/5/2 v1.0.3 - [remove]: remove auto load article & unused code & DNS Prefetch . ## 2018/5/1 - fix [#17](https://github.com/shiyiya/typecho-theme-sagiri/issues/17) - configurable article thumbnails. ## 2018/2/11 -[fix]图片懒加载 ## 2018/1/28 - [fix] header 头配置错误。 - [feature] 可选 banner 是否显示。 - [bug]fix any bugs. <!-- more --> ## 2018/10/30 - 修复 archive 页下缩略图问题 - 调整标题前置 `#` 位置 - 添加块级代码背景颜色 - 更改友链样式 - 其他颜色协调性修正 - Readme 文档更新 ## 2018/11/5 - 图片懒加载 - 目录树更换获取方式(浪费性能,暂不支持嵌套目录 - 文章侧栏相关文章添加 - gulp 配置更改,路径引用使用 min 文件 - 其他 ## 2018/11/18 - [feature]图片懒加载 background-image 方式加载错位 --> 自适应 - [feature]侧栏热门文章、热评文章添加 - [feature]supported >= ie10 - [fix]修复手机头部滑动到文章内容背景距离错误 500 --> 200 - [fix]修复文章卡片内容超出 --> ... - [fix]修复浏览量 php7+ 不显示,自定义字段 --> insert Db //与旧方法不兼容 - [fix]禁止评论不加载 OwO 表情 ## 2018/12/09 - [feature]Fastclick ——FastClick is a simple, easy-to-use library for eliminating the 300ms delay between a physical tap and the firing of a click event on mobile browsers. - [feature]指定一个图片链接作为头图。 - [bug]fix any bugs. ## todo
{ "pile_set_name": "Github" }
/* * lib/route/route.c Routes * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation version 2.1 * of the License. * * Copyright (c) 2003-2006 Thomas Graf <[email protected]> */ /** * @ingroup rtnl * @defgroup route Routing * @brief * @{ */ #include <netlink-local.h> #include <netlink/netlink.h> #include <netlink/cache.h> #include <netlink/utils.h> #include <netlink/data.h> #include <netlink/route/rtnl.h> #include <netlink/route/route.h> #include <netlink/route/link.h> static struct nl_cache_ops rtnl_route_ops; static struct nla_policy route_policy[RTA_MAX+1] = { [RTA_IIF] = { .type = NLA_STRING, .maxlen = IFNAMSIZ, }, [RTA_OIF] = { .type = NLA_U32 }, [RTA_PRIORITY] = { .type = NLA_U32 }, [RTA_FLOW] = { .type = NLA_U32 }, [RTA_MP_ALGO] = { .type = NLA_U32 }, [RTA_CACHEINFO] = { .minlen = sizeof(struct rta_cacheinfo) }, [RTA_METRICS] = { .type = NLA_NESTED }, [RTA_MULTIPATH] = { .type = NLA_NESTED }, }; static void copy_cacheinfo_into_route(struct rta_cacheinfo *ci, struct rtnl_route *route) { struct rtnl_rtcacheinfo nci = { .rtci_clntref = ci->rta_clntref, .rtci_last_use = ci->rta_lastuse, .rtci_expires = ci->rta_expires, .rtci_error = ci->rta_error, .rtci_used = ci->rta_used, .rtci_id = ci->rta_id, .rtci_ts = ci->rta_ts, .rtci_tsage = ci->rta_tsage, }; rtnl_route_set_cacheinfo(route, &nci); } static int route_msg_parser(struct nl_cache_ops *ops, struct sockaddr_nl *who, struct nlmsghdr *nlh, struct nl_parser_param *pp) { struct rtmsg *rtm; struct rtnl_route *route; struct nlattr *tb[RTA_MAX + 1]; struct nl_addr *src = NULL, *dst = NULL, *addr; int err; route = rtnl_route_alloc(); if (!route) { err = nl_errno(ENOMEM); goto errout; } route->ce_msgtype = nlh->nlmsg_type; err = nlmsg_parse(nlh, sizeof(struct rtmsg), tb, RTA_MAX, route_policy); if (err < 0) goto errout; rtm = nlmsg_data(nlh); rtnl_route_set_family(route, rtm->rtm_family); rtnl_route_set_tos(route, rtm->rtm_tos); rtnl_route_set_table(route, rtm->rtm_table); rtnl_route_set_type(route, rtm->rtm_type); rtnl_route_set_scope(route, rtm->rtm_scope); rtnl_route_set_protocol(route, rtm->rtm_protocol); rtnl_route_set_flags(route, rtm->rtm_flags); if (tb[RTA_DST]) { dst = nla_get_addr(tb[RTA_DST], rtm->rtm_family); if (dst == NULL) goto errout_errno; } else { dst = nl_addr_alloc(0); nl_addr_set_family(dst, rtm->rtm_family); } nl_addr_set_prefixlen(dst, rtm->rtm_dst_len); err = rtnl_route_set_dst(route, dst); if (err < 0) goto errout; nl_addr_put(dst); if (tb[RTA_SRC]) { src = nla_get_addr(tb[RTA_SRC], rtm->rtm_family); if (src == NULL) goto errout_errno; } else if (rtm->rtm_src_len) src = nl_addr_alloc(0); if (src) { nl_addr_set_prefixlen(src, rtm->rtm_src_len); rtnl_route_set_src(route, src); nl_addr_put(src); } if (tb[RTA_IIF]) rtnl_route_set_iif(route, nla_get_string(tb[RTA_IIF])); if (tb[RTA_OIF]) rtnl_route_set_oif(route, nla_get_u32(tb[RTA_OIF])); if (tb[RTA_GATEWAY]) { addr = nla_get_addr(tb[RTA_GATEWAY], route->rt_family); if (addr == NULL) goto errout_errno; rtnl_route_set_gateway(route, addr); nl_addr_put(addr); } if (tb[RTA_PRIORITY]) rtnl_route_set_prio(route, nla_get_u32(tb[RTA_PRIORITY])); if (tb[RTA_PREFSRC]) { addr = nla_get_addr(tb[RTA_PREFSRC], route->rt_family); if (addr == NULL) goto errout_errno; rtnl_route_set_pref_src(route, addr); nl_addr_put(addr); } if (tb[RTA_METRICS]) { struct nlattr *mtb[RTAX_MAX + 1]; int i; err = nla_parse_nested(mtb, RTAX_MAX, tb[RTA_METRICS], NULL); if (err < 0) goto errout; for (i = 1; i <= RTAX_MAX; i++) { if (mtb[i] && nla_len(mtb[i]) >= sizeof(uint32_t)) { uint32_t m = nla_get_u32(mtb[i]); if (rtnl_route_set_metric(route, i, m) < 0) goto errout_errno; } } } if (tb[RTA_MULTIPATH]) { struct rtnl_nexthop *nh; struct rtnexthop *rtnh = nla_data(tb[RTA_MULTIPATH]); size_t tlen = nla_len(tb[RTA_MULTIPATH]); while (tlen >= sizeof(*rtnh) && tlen >= rtnh->rtnh_len) { nh = rtnl_route_nh_alloc(); if (!nh) goto errout; rtnl_route_nh_set_weight(nh, rtnh->rtnh_hops); rtnl_route_nh_set_ifindex(nh, rtnh->rtnh_ifindex); rtnl_route_nh_set_flags(nh, rtnh->rtnh_flags); if (rtnh->rtnh_len > sizeof(*rtnh)) { struct nlattr *ntb[RTA_MAX + 1]; nla_parse(ntb, RTA_MAX, (struct nlattr *) RTNH_DATA(rtnh), rtnh->rtnh_len - sizeof(*rtnh), route_policy); if (ntb[RTA_GATEWAY]) { nh->rtnh_gateway = nla_get_addr( ntb[RTA_GATEWAY], route->rt_family); nh->rtnh_mask = NEXTHOP_HAS_GATEWAY; } } rtnl_route_add_nexthop(route, nh); tlen -= RTNH_ALIGN(rtnh->rtnh_len); rtnh = RTNH_NEXT(rtnh); } } if (tb[RTA_FLOW]) rtnl_route_set_realms(route, nla_get_u32(tb[RTA_FLOW])); if (tb[RTA_CACHEINFO]) copy_cacheinfo_into_route(nla_data(tb[RTA_CACHEINFO]), route); if (tb[RTA_MP_ALGO]) rtnl_route_set_mp_algo(route, nla_get_u32(tb[RTA_MP_ALGO])); err = pp->pp_cb((struct nl_object *) route, pp); if (err < 0) goto errout; err = P_ACCEPT; errout: rtnl_route_put(route); return err; errout_errno: err = nl_get_errno(); goto errout; } static int route_request_update(struct nl_cache *c, struct nl_handle *h) { return nl_rtgen_request(h, RTM_GETROUTE, AF_UNSPEC, NLM_F_DUMP); } /** * @name Cache Management * @{ */ /** * Build a route cache holding all routes currently configured in the kernel * @arg handle netlink handle * * Allocates a new cache, initializes it properly and updates it to * contain all routes currently configured in the kernel. * * @note The caller is responsible for destroying and freeing the * cache after using it. * @return The cache or NULL if an error has occured. */ struct nl_cache *rtnl_route_alloc_cache(struct nl_handle *handle) { struct nl_cache *cache; cache = nl_cache_alloc(&rtnl_route_ops); if (!cache) return NULL; if (handle && nl_cache_refill(handle, cache) < 0) { free(cache); return NULL; } return cache; } /** @} */ /** * @name Route Addition * @{ */ static struct nl_msg *build_route_msg(struct rtnl_route *tmpl, int cmd, int flags) { struct nl_msg *msg; struct nl_addr *addr; int scope, i, oif, nmetrics = 0; struct nlattr *metrics; struct rtmsg rtmsg = { .rtm_family = rtnl_route_get_family(tmpl), .rtm_dst_len = rtnl_route_get_dst_len(tmpl), .rtm_src_len = rtnl_route_get_src_len(tmpl), .rtm_tos = rtnl_route_get_tos(tmpl), .rtm_table = rtnl_route_get_table(tmpl), .rtm_type = rtnl_route_get_type(tmpl), .rtm_protocol = rtnl_route_get_protocol(tmpl), .rtm_flags = rtnl_route_get_flags(tmpl), }; if (rtmsg.rtm_family == AF_UNSPEC) { nl_error(EINVAL, "Cannot build route message, address " \ "family is unknown."); return NULL; } scope = rtnl_route_get_scope(tmpl); if (scope == RT_SCOPE_NOWHERE) { if (rtmsg.rtm_type == RTN_LOCAL) scope = RT_SCOPE_HOST; else { /* XXX Change to UNIVERSE if gw || nexthops */ scope = RT_SCOPE_LINK; } } rtmsg.rtm_scope = scope; msg = nlmsg_alloc_simple(cmd, flags); if (msg == NULL) return NULL; if (nlmsg_append(msg, &rtmsg, sizeof(rtmsg), NLMSG_ALIGNTO) < 0) goto nla_put_failure; addr = rtnl_route_get_dst(tmpl); if (addr) NLA_PUT_ADDR(msg, RTA_DST, addr); addr = rtnl_route_get_src(tmpl); if (addr) NLA_PUT_ADDR(msg, RTA_SRC, addr); addr = rtnl_route_get_gateway(tmpl); if (addr) NLA_PUT_ADDR(msg, RTA_GATEWAY, addr); addr = rtnl_route_get_pref_src(tmpl); if (addr) NLA_PUT_ADDR(msg, RTA_PREFSRC, addr); NLA_PUT_U32(msg, RTA_PRIORITY, rtnl_route_get_prio(tmpl)); oif = rtnl_route_get_oif(tmpl); if (oif != RTNL_LINK_NOT_FOUND) NLA_PUT_U32(msg, RTA_OIF, oif); for (i = 1; i <= RTAX_MAX; i++) if (rtnl_route_get_metric(tmpl, i) != UINT_MAX) nmetrics++; if (nmetrics > 0) { unsigned int val; metrics = nla_nest_start(msg, RTA_METRICS); if (metrics == NULL) goto nla_put_failure; for (i = 1; i <= RTAX_MAX; i++) { val = rtnl_route_get_metric(tmpl, i); if (val != UINT_MAX) NLA_PUT_U32(msg, i, val); } nla_nest_end(msg, metrics); } #if 0 RTA_IIF, RTA_MULTIPATH, RTA_PROTOINFO, RTA_FLOW, RTA_CACHEINFO, RTA_SESSION, RTA_MP_ALGO, #endif return msg; nla_put_failure: nlmsg_free(msg); return NULL; } struct nl_msg *rtnl_route_build_add_request(struct rtnl_route *tmpl, int flags) { return build_route_msg(tmpl, RTM_NEWROUTE, NLM_F_CREATE | flags); } int rtnl_route_add(struct nl_handle *handle, struct rtnl_route *route, int flags) { struct nl_msg *msg; int err; msg = rtnl_route_build_add_request(route, flags); if (!msg) return nl_get_errno(); err = nl_send_auto_complete(handle, msg); nlmsg_free(msg); if (err < 0) return err; return nl_wait_for_ack(handle); } struct nl_msg *rtnl_route_build_del_request(struct rtnl_route *tmpl, int flags) { return build_route_msg(tmpl, RTM_DELROUTE, flags); } int rtnl_route_del(struct nl_handle *handle, struct rtnl_route *route, int flags) { struct nl_msg *msg; int err; msg = rtnl_route_build_del_request(route, flags); if (!msg) return nl_get_errno(); err = nl_send_auto_complete(handle, msg); nlmsg_free(msg); if (err < 0) return err; return nl_wait_for_ack(handle); } /** @} */ static struct nl_af_group route_groups[] = { { AF_INET, RTNLGRP_IPV4_ROUTE }, { AF_INET6, RTNLGRP_IPV6_ROUTE }, { AF_DECnet, RTNLGRP_DECnet_ROUTE }, { END_OF_GROUP_LIST }, }; static struct nl_cache_ops rtnl_route_ops = { .co_name = "route/route", .co_hdrsize = sizeof(struct rtmsg), .co_msgtypes = { { RTM_NEWROUTE, NL_ACT_NEW, "new" }, { RTM_DELROUTE, NL_ACT_DEL, "del" }, { RTM_GETROUTE, NL_ACT_GET, "get" }, END_OF_MSGTYPES_LIST, }, .co_protocol = NETLINK_ROUTE, .co_groups = route_groups, .co_request_update = route_request_update, .co_msg_parser = route_msg_parser, .co_obj_ops = &route_obj_ops, }; static void __init route_init(void) { nl_cache_mngt_register(&rtnl_route_ops); } static void __exit route_exit(void) { nl_cache_mngt_unregister(&rtnl_route_ops); } /** @} */
{ "pile_set_name": "Github" }
%YAML 1.1 %TAG !u! tag:unity3d.com,2011: --- !u!11 &1 AudioManager: m_ObjectHideFlags: 0 m_Volume: 1 Rolloff Scale: 1 Doppler Factor: 1 Default Speaker Mode: 2 m_SampleRate: 0 m_DSPBufferSize: 0 m_VirtualVoiceCount: 512 m_RealVoiceCount: 32 m_SpatializerPlugin: m_DisableAudio: 0 m_VirtualizeEffects: 1
{ "pile_set_name": "Github" }
# Changelog All notable changes to this project will be documented in this file. ## [1.3.1] - 2019-10-14 ### Added - Allow custom binary paths to be specified in the config file - This also includes two new env variables: SNIPE_BINARY_MYSQL and SNIPE_BINARY_MYSQLDUMP ## [1.1.4] - 2019-03-14 ### Changed - Changed minimum required PHP version to 7.1 - Downgraded PHPUnit from 8.0 to 7.0 as a result of PHP version change ## [1.0.2] - 2019-02-23 ### Changed - merged PR to recursively scan package migration folders and app migration folders. - Added a trait that can be added to TestsCase.php to remove the need to add a block of code to TestCase.php - Refactored Snipe.php to only scan for migration changes one time to improve performance and clean up file. - Added feature to use Snipe migrations for DatabaseTransactions ***AND*** RefreshDatabase Traits. ## [1.0.2] - 2019-02-23 ### Changed - merged PR to recursively scan migrations folder for changes. ## [1.0.1] - 2019-02-21 ### Changed - merged PR to remove redundant code in setUpTraits() method. ## [1.0.0] - 2019-02-19 ### Added - initial release
{ "pile_set_name": "Github" }
/* * Arnaldo Carvalho de Melo <[email protected]>, 2005 * * Released under the terms of the GNU GPL v2.0 */ #include <stdlib.h> #include <string.h> #include "lkc.h" static char *escape(const char* text, char *bf, int len) { char *bfp = bf; int multiline = strchr(text, '\n') != NULL; int eol = 0; int textlen = strlen(text); if ((textlen > 0) && (text[textlen-1] == '\n')) eol = 1; *bfp++ = '"'; --len; if (multiline) { *bfp++ = '"'; *bfp++ = '\n'; *bfp++ = '"'; len -= 3; } while (*text != '\0' && len > 1) { if (*text == '"') *bfp++ = '\\'; else if (*text == '\n') { *bfp++ = '\\'; *bfp++ = 'n'; *bfp++ = '"'; *bfp++ = '\n'; *bfp++ = '"'; len -= 5; ++text; goto next; } else if (*text == '\\') { *bfp++ = '\\'; len--; } *bfp++ = *text++; next: --len; } if (multiline && eol) bfp -= 3; *bfp++ = '"'; *bfp = '\0'; return bf; } struct file_line { struct file_line *next; const char *file; int lineno; }; static struct file_line *file_line__new(const char *file, int lineno) { struct file_line *self = malloc(sizeof(*self)); if (self == NULL) goto out; self->file = file; self->lineno = lineno; self->next = NULL; out: return self; } struct message { const char *msg; const char *option; struct message *next; struct file_line *files; }; static struct message *message__list; static struct message *message__new(const char *msg, char *option, const char *file, int lineno) { struct message *self = malloc(sizeof(*self)); if (self == NULL) goto out; self->files = file_line__new(file, lineno); if (self->files == NULL) goto out_fail; self->msg = xstrdup(msg); if (self->msg == NULL) goto out_fail_msg; self->option = option; self->next = NULL; out: return self; out_fail_msg: free(self->files); out_fail: free(self); self = NULL; goto out; } static struct message *mesage__find(const char *msg) { struct message *m = message__list; while (m != NULL) { if (strcmp(m->msg, msg) == 0) break; m = m->next; } return m; } static int message__add_file_line(struct message *self, const char *file, int lineno) { int rc = -1; struct file_line *fl = file_line__new(file, lineno); if (fl == NULL) goto out; fl->next = self->files; self->files = fl; rc = 0; out: return rc; } static int message__add(const char *msg, char *option, const char *file, int lineno) { int rc = 0; char bf[16384]; char *escaped = escape(msg, bf, sizeof(bf)); struct message *m = mesage__find(escaped); if (m != NULL) rc = message__add_file_line(m, file, lineno); else { m = message__new(escaped, option, file, lineno); if (m != NULL) { m->next = message__list; message__list = m; } else rc = -1; } return rc; } static void menu_build_message_list(struct menu *menu) { struct menu *child; message__add(menu_get_prompt(menu), NULL, menu->file == NULL ? "Root Menu" : menu->file->name, menu->lineno); if (menu->sym != NULL && menu_has_help(menu)) message__add(menu_get_help(menu), menu->sym->name, menu->file == NULL ? "Root Menu" : menu->file->name, menu->lineno); for (child = menu->list; child != NULL; child = child->next) if (child->prompt != NULL) menu_build_message_list(child); } static void message__print_file_lineno(struct message *self) { struct file_line *fl = self->files; putchar('\n'); if (self->option != NULL) printf("# %s:00000\n", self->option); printf("#: %s:%d", fl->file, fl->lineno); fl = fl->next; while (fl != NULL) { printf(", %s:%d", fl->file, fl->lineno); fl = fl->next; } putchar('\n'); } static void message__print_gettext_msgid_msgstr(struct message *self) { message__print_file_lineno(self); printf("msgid %s\n" "msgstr \"\"\n", self->msg); } static void menu__xgettext(void) { struct message *m = message__list; while (m != NULL) { /* skip empty lines ("") */ if (strlen(m->msg) > sizeof("\"\"")) message__print_gettext_msgid_msgstr(m); m = m->next; } } int main(int ac, char **av) { conf_parse(av[1]); menu_build_message_list(menu_get_root_menu(NULL)); menu__xgettext(); return 0; }
{ "pile_set_name": "Github" }
package Solution func Solution(x bool) bool { return x }
{ "pile_set_name": "Github" }
<?xml version="1.0" encoding="UTF-8" standalone="yes"?> <?mso-application progid="Word.Document"?> <pkg:package xmlns:pkg="http://schemas.microsoft.com/office/2006/xmlPackage"> <pkg:part pkg:name="/_rels/.rels" pkg:contentType="application/vnd.openxmlformats-package.relationships+xml" pkg:padding="512"> <pkg:xmlData> <Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships"> <Relationship Id="rId3" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/extended-properties" Target="docProps/app.xml"/> <Relationship Id="rId2" Type="http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties" Target="docProps/core.xml"/> <Relationship Id="rId1" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/officeDocument" Target="word/document.xml"/> </Relationships> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/_rels/document.xml.rels" pkg:contentType="application/vnd.openxmlformats-package.relationships+xml" pkg:padding="256"> <pkg:xmlData> <Relationships xmlns="http://schemas.openxmlformats.org/package/2006/relationships"> <Relationship Id="rId8" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/image" Target="media/image3.png"/> <Relationship Id="rId3" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/webSettings" Target="webSettings.xml"/> <Relationship Id="rId7" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/image" Target="media/image2.png"/> <Relationship Id="rId12" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/theme" Target="theme/theme1.xml"/> <Relationship Id="rId2" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/settings" Target="settings.xml"/> <Relationship Id="rId1" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/styles" Target="styles.xml"/> <Relationship Id="rId6" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/image" Target="media/image1.png"/> <Relationship Id="rId11" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/fontTable" Target="fontTable.xml"/> <Relationship Id="rId5" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/endnotes" Target="endnotes.xml"/> <Relationship Id="rId10" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/footer" Target="footer1.xml"/> <Relationship Id="rId4" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/footnotes" Target="footnotes.xml"/> <Relationship Id="rId9" Type="http://schemas.openxmlformats.org/officeDocument/2006/relationships/header" Target="header1.xml"/> </Relationships> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/document.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml"> <pkg:xmlData> <w:document xmlns:ve="http://schemas.openxmlformats.org/markup-compatibility/2006" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:wp="http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing" xmlns:w10="urn:schemas-microsoft-com:office:word" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main" xmlns:wne="http://schemas.microsoft.com/office/word/2006/wordml"> <w:body> <w:p w:rsidR="006F733E" w:rsidRDefault="006F733E" w:rsidP="00B96DB7"/> <w:sectPr w:rsidR="006F733E" w:rsidSect="00395111"> <w:headerReference w:type="default" r:id="rId9"/> <w:footerReference w:type="default" r:id="rId10"/> <w:pgSz w:w="11906" w:h="16838"/> <w:pgMar w:top="1440" w:right="1440" w:bottom="1440" w:left="1440" w:header="624" w:footer="624" w:gutter="0"/> <w:cols w:space="708"/> <w:docGrid w:linePitch="360"/> </w:sectPr> </w:body> </w:document> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/footnotes.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml"> <pkg:xmlData> <w:footnotes xmlns:ve="http://schemas.openxmlformats.org/markup-compatibility/2006" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:wp="http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing" xmlns:w10="urn:schemas-microsoft-com:office:word" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main" xmlns:wne="http://schemas.microsoft.com/office/word/2006/wordml"> <w:footnote w:type="separator" w:id="-1"> <w:p w:rsidR="004651E5" w:rsidRDefault="004651E5" w:rsidP="0050335B"> <w:pPr> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> <w:r> <w:separator/> </w:r> </w:p> </w:footnote> <w:footnote w:type="continuationSeparator" w:id="0"> <w:p w:rsidR="004651E5" w:rsidRDefault="004651E5" w:rsidP="0050335B"> <w:pPr> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> <w:r> <w:continuationSeparator/> </w:r> </w:p> </w:footnote> </w:footnotes> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/header1.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.header+xml"> <pkg:xmlData> <w:hdr xmlns:wpc="http://schemas.microsoft.com/office/word/2010/wordprocessingCanvas" xmlns:mc="http://schemas.openxmlformats.org/markup-compatibility/2006" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:wp14="http://schemas.microsoft.com/office/word/2010/wordprocessingDrawing" xmlns:wp="http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing" xmlns:w10="urn:schemas-microsoft-com:office:word" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main" xmlns:w14="http://schemas.microsoft.com/office/word/2010/wordml" xmlns:wpg="http://schemas.microsoft.com/office/word/2010/wordprocessingGroup" xmlns:wpi="http://schemas.microsoft.com/office/word/2010/wordprocessingInk" xmlns:wne="http://schemas.microsoft.com/office/word/2006/wordml" xmlns:wps="http://schemas.microsoft.com/office/word/2010/wordprocessingShape" mc:Ignorable="w14 wp14"> <w:p w:rsidR="00EA10E8" w:rsidRDefault="00CE3F14"> <w:r> <w:rPr> <w:noProof/> </w:rPr> <mc:AlternateContent> <mc:Choice Requires="wps"> <w:drawing> <wp:anchor distT="0" distB="0" distL="114300" distR="114300" simplePos="0" relativeHeight="251659264" behindDoc="0" locked="0" layoutInCell="1" allowOverlap="1"> <wp:simplePos x="0" y="0"/> <wp:positionH relativeFrom="column"> <wp:posOffset>584200</wp:posOffset> </wp:positionH> <wp:positionV relativeFrom="paragraph"> <wp:posOffset>127000</wp:posOffset> </wp:positionV> <wp:extent cx="584200" cy="374650"/> <wp:effectExtent l="0" t="0" r="25400" b="25400"/> <wp:wrapNone/> <wp:docPr id="1" name="Oval 1"/> <wp:cNvGraphicFramePr/> <a:graphic xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/main"> <a:graphicData uri="http://schemas.microsoft.com/office/word/2010/wordprocessingShape"> <wps:wsp> <wps:cNvSpPr/> <wps:spPr> <a:xfrm> <a:off x="0" y="0"/> <a:ext cx="584200" cy="374650"/> </a:xfrm> <a:prstGeom prst="ellipse"> <a:avLst/> </a:prstGeom> </wps:spPr> <wps:style> <a:lnRef idx="2"> <a:schemeClr val="accent1"> <a:shade val="50000"/> </a:schemeClr> </a:lnRef> <a:fillRef idx="1"> <a:schemeClr val="accent1"/> </a:fillRef> <a:effectRef idx="0"> <a:schemeClr val="accent1"/> </a:effectRef> <a:fontRef idx="minor"> <a:schemeClr val="lt1"/> </a:fontRef> </wps:style> <wps:bodyPr rot="0" spcFirstLastPara="0" vertOverflow="overflow" horzOverflow="overflow" vert="horz" wrap="square" lIns="91440" tIns="45720" rIns="91440" bIns="45720" numCol="1" spcCol="0" rtlCol="0" fromWordArt="0" anchor="ctr" anchorCtr="0" forceAA="0" compatLnSpc="1"> <a:prstTxWarp prst="textNoShape"> <a:avLst/> </a:prstTxWarp> <a:noAutofit/> </wps:bodyPr> </wps:wsp> </a:graphicData> </a:graphic> </wp:anchor> </w:drawing> </mc:Choice> <mc:Fallback> <w:pict> <v:oval id="Oval 1" o:spid="_x0000_s1026" style="position:absolute;margin-left:46pt;margin-top:10pt;width:46pt;height:29.5pt;z-index:251659264;visibility:visible;mso-wrap-style:square;mso-wrap-distance-left:9pt;mso-wrap-distance-top:0;mso-wrap-distance-right:9pt;mso-wrap-distance-bottom:0;mso-position-horizontal:absolute;mso-position-horizontal-relative:text;mso-position-vertical:absolute;mso-position-vertical-relative:text;v-text-anchor:middle" o:gfxdata="UEsDBBQABgAIAAAAIQC2gziS/gAAAOEBAAATAAAAW0NvbnRlbnRfVHlwZXNdLnhtbJSRQU7DMBBF&#xA;90jcwfIWJU67QAgl6YK0S0CoHGBkTxKLZGx5TGhvj5O2G0SRWNoz/78nu9wcxkFMGNg6quQqL6RA&#xA;0s5Y6ir5vt9lD1JwBDIwOMJKHpHlpr69KfdHjyxSmriSfYz+USnWPY7AufNIadK6MEJMx9ApD/oD&#xA;OlTrorhX2lFEilmcO2RdNtjC5xDF9pCuTyYBB5bi6bQ4syoJ3g9WQ0ymaiLzg5KdCXlKLjvcW893&#xA;SUOqXwnz5DrgnHtJTxOsQfEKIT7DmDSUCaxw7Rqn8787ZsmRM9e2VmPeBN4uqYvTtW7jvijg9N/y&#xA;JsXecLq0q+WD6m8AAAD//wMAUEsDBBQABgAIAAAAIQA4/SH/1gAAAJQBAAALAAAAX3JlbHMvLnJl&#xA;bHOkkMFqwzAMhu+DvYPRfXGawxijTi+j0GvpHsDYimMaW0Yy2fr2M4PBMnrbUb/Q94l/f/hMi1qR&#xA;JVI2sOt6UJgd+ZiDgffL8ekFlFSbvV0oo4EbChzGx4f9GRdb25HMsYhqlCwG5lrLq9biZkxWOiqY&#xA;22YiTra2kYMu1l1tQD30/bPm3wwYN0x18gb45AdQl1tp5j/sFB2T0FQ7R0nTNEV3j6o9feQzro1i&#xA;OWA14Fm+Q8a1a8+Bvu/d/dMb2JY5uiPbhG/ktn4cqGU/er3pcvwCAAD//wMAUEsDBBQABgAIAAAA&#xA;IQCkeOpucAIAADYFAAAOAAAAZHJzL2Uyb0RvYy54bWysVE1PGzEQvVfqf7B8L5ukCdCIDYpAVJUQ&#xA;oELF2Xht1pLtccdONumv79i7WVBBPVTNwZnxzLz58Js9O985y7YKowFf8+nRhDPlJTTGP9f8x8PV&#xA;p1POYhK+ERa8qvleRX6++vjhrAtLNYMWbKOQEYiPyy7UvE0pLKsqylY5EY8gKE9GDehEIhWfqwZF&#xA;R+jOVrPJ5LjqAJuAIFWMdHvZG/mq4GutZLrVOqrEbM2ptlROLOdTPqvVmVg+owitkUMZ4h+qcMJ4&#xA;SjpCXYok2AbNGyhnJEIEnY4kuAq0NlKVHqib6eSPbu5bEVTphYYTwzim+P9g5c32Dplp6O0488LR&#xA;E91uhWXTPJkuxCU53Ic7HLRIYm5zp9Hlf2qA7co09+M01S4xSZeL0zm9EGeSTJ9P5seLMu3qJThg&#xA;TF8VOJaFmitrTYi5X7EU2+uYKCd5H7xIyfX0FRQp7a3KztZ/V5p6oJyzEl3Yoy4sMmql5kJK5dO0&#xA;N7WiUf31YkK/3CYlGSOKVgAzsjbWjtgDQGbmW+weZvDPoaqQbwye/K2wPniMKJnBpzHYGQ/4HoCl&#xA;robMvf9hSP1o8pSeoNnTCyP01I9BXhka97WI6U4gcZ1eiPY33dKhLXQ1h0HirAX89d599icKkpWz&#xA;jnan5vHnRqDizH7zRM4v0/k8L1tR5ouTGSn42vL02uI37gLomYiAVF0Rs3+yB1EjuEda83XOSibh&#xA;JeWuuUx4UC5Sv9P0oZBqvS5utGBBpGt/H2QGz1PNXHrYPQoMA+cSkfUGDnv2hne9b470sN4k0KaQ&#xA;8mWuw7xpOQtxhg9J3v7XevF6+dytfgMAAP//AwBQSwMEFAAGAAgAAAAhAOtyLKXgAAAACAEAAA8A&#xA;AABkcnMvZG93bnJldi54bWxMj8FOwzAQRO9I/IO1SFxQ61Ch0IZsKopULnAoLQi4bWOTRMTrELtt&#xA;4OvZnuC2qxnNvMnng2vV3vah8YxwOU5AWS69abhCeN4sR1NQIRIbaj1bhG8bYF6cnuSUGX/gJ7tf&#xA;x0pJCIeMEOoYu0zrUNbWURj7zrJoH753FOXtK216Oki4a/UkSVLtqGFpqKmzd7UtP9c7h/CeLhec&#xA;rh4u+LEL5eLlnn7eXr8Qz8+G2xtQ0Q7xzwxHfEGHQpi2fscmqBZhNpEpEUFaQB316ZUcW4TrWQK6&#xA;yPX/AcUvAAAA//8DAFBLAQItABQABgAIAAAAIQC2gziS/gAAAOEBAAATAAAAAAAAAAAAAAAAAAAA&#xA;AABbQ29udGVudF9UeXBlc10ueG1sUEsBAi0AFAAGAAgAAAAhADj9If/WAAAAlAEAAAsAAAAAAAAA&#xA;AAAAAAAALwEAAF9yZWxzLy5yZWxzUEsBAi0AFAAGAAgAAAAhAKR46m5wAgAANgUAAA4AAAAAAAAA&#xA;AAAAAAAALgIAAGRycy9lMm9Eb2MueG1sUEsBAi0AFAAGAAgAAAAhAOtyLKXgAAAACAEAAA8AAAAA&#xA;AAAAAAAAAAAAygQAAGRycy9kb3ducmV2LnhtbFBLBQYAAAAABAAEAPMAAADXBQAAAAA=&#xA;" fillcolor="#4f81bd [3204]" strokecolor="#243f60 [1604]" strokeweight="2pt"/> </w:pict> </mc:Fallback> </mc:AlternateContent> </w:r> </w:p> </w:hdr> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/footer1.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml"> <pkg:xmlData> <w:ftr xmlns:ve="http://schemas.openxmlformats.org/markup-compatibility/2006" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:wp="http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing" xmlns:w10="urn:schemas-microsoft-com:office:word" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main" xmlns:wne="http://schemas.microsoft.com/office/word/2006/wordml"> <w:p w:rsidR="00ED44B0" w:rsidRPr="00ED44B0" w:rsidRDefault="00330EDA" w:rsidP="00E20F78"> <w:pPr> <w:pStyle w:val="Style1"/> <w:jc w:val="left"/> </w:pPr> <w:r> <w:t xml:space="preserve">Transmittal: </w:t> </w:r> <w:sdt> <w:sdtPr> <w:id w:val="6571418"/> <w:text/> </w:sdtPr> <w:sdtContent> <w:r w:rsidR="0021671A"> <w:t>${</w:t> </w:r> <w:proofErr w:type="spellStart"/> <w:r w:rsidR="0021671A"> <w:t>tr.out.ecs_trans_number</w:t> </w:r> <w:r w:rsidR="003D412C"> <w:t>_GIBBA</w:t> </w:r> <w:proofErr w:type="spellEnd"/> <w:r w:rsidR="0021671A"> <w:t>}</w:t> </w:r> </w:sdtContent> </w:sdt> <w:r> <w:tab/> </w:r> <w:r> <w:tab/> </w:r> <w:r w:rsidRPr="00ED44B0"> <w:t xml:space="preserve">Page </w:t> </w:r> <w:r w:rsidR="00CB55CF" w:rsidRPr="00ED44B0"> <w:fldChar w:fldCharType="begin"/> </w:r> <w:r w:rsidRPr="00ED44B0"> <w:instrText xml:space="preserve"> PAGE </w:instrText> </w:r> <w:r w:rsidR="00CB55CF" w:rsidRPr="00ED44B0"> <w:fldChar w:fldCharType="separate"/> </w:r> <w:r w:rsidR="00FB4EEC"> <w:rPr> <w:noProof/> </w:rPr> <w:t>1</w:t> </w:r> <w:r w:rsidR="00CB55CF" w:rsidRPr="00ED44B0"> <w:fldChar w:fldCharType="end"/> </w:r> <w:r w:rsidRPr="00ED44B0"> <w:t xml:space="preserve"> of </w:t> </w:r> <w:r w:rsidR="00CB55CF" w:rsidRPr="00ED44B0"> <w:fldChar w:fldCharType="begin"/> </w:r> <w:r w:rsidRPr="00ED44B0"> <w:instrText xml:space="preserve"> NUMPAGES </w:instrText> </w:r> <w:r w:rsidR="00CB55CF" w:rsidRPr="00ED44B0"> <w:fldChar w:fldCharType="separate"/> </w:r> <w:r w:rsidR="00FB4EEC"> <w:rPr> <w:noProof/> </w:rPr> <w:t>1</w:t> </w:r> <w:r w:rsidR="00CB55CF" w:rsidRPr="00ED44B0"> <w:fldChar w:fldCharType="end"/> </w:r> </w:p> <w:p w:rsidR="00ED44B0" w:rsidRDefault="004651E5"> <w:pPr> <w:pStyle w:val="Footer"/> </w:pPr> </w:p> </w:ftr> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/endnotes.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml"> <pkg:xmlData> <w:endnotes xmlns:ve="http://schemas.openxmlformats.org/markup-compatibility/2006" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:wp="http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing" xmlns:w10="urn:schemas-microsoft-com:office:word" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main" xmlns:wne="http://schemas.microsoft.com/office/word/2006/wordml"> <w:endnote w:type="separator" w:id="-1"> <w:p w:rsidR="004651E5" w:rsidRDefault="004651E5" w:rsidP="0050335B"> <w:pPr> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> <w:r> <w:separator/> </w:r> </w:p> </w:endnote> <w:endnote w:type="continuationSeparator" w:id="0"> <w:p w:rsidR="004651E5" w:rsidRDefault="004651E5" w:rsidP="0050335B"> <w:pPr> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> <w:r> <w:continuationSeparator/> </w:r> </w:p> </w:endnote> </w:endnotes> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/media/image2.png" pkg:contentType="image/png" pkg:compression="store"> <pkg:binaryData> iVBORw0KGgoAAAANSUhEUgAAAMgAAADICAIAAAAiOjnJAAAAAXNSR0IArs4c6QAAAAlwSFlzAAAO xAAADsQBlSsOGwAAA5dJREFUeF7t0rEVgkAAREG4BgjpvzvJsAA9DSiBHzFbwAb/zTrnPN7fxRS4 r8C+jfV1fu479KTAVWAooUBRAKyiqs8FLAiSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKs TsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgK gJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2C xUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAr yeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuB pABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKsTsFiICkAVpLV KVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kB sJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwkq1Ow GEgKgJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAl WZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQ FAAryeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6 BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoWA0kBsJKsTsFiICkA VpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwkq1OwGEgKgJVkdQoW A0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaSAmAlWZ2CxUBSAKwk q1OwGEgKgJVkdQoWA0kBsJKsTsFiICkAVpLVKVgMJAXASrI6BYuBpABYSVanYDGQFAAryeoULAaS AmAlWZ2CxUBSYOwbW0nZJ5/+Uf0Ahm8Ksdfm760AAAAASUVORK5CYII= </pkg:binaryData> </pkg:part> <pkg:part pkg:name="/word/media/image1.png" pkg:contentType="image/png" pkg:compression="store"> <pkg:binaryData> iVBORw0KGgoAAAANSUhEUgAAADYAAABDCAIAAABYyGH4AAAAAXNSR0IArs4c6QAAAAlwSFlzAAAO xAAADsQBlSsOGwAADeNJREFUaEPtWnlwE+cV39VqD52rw7JlyzfY2BzGaWLAENJgUgIhZJJpDkLS Jk0zbdokTNo0d2lJJqTtTOhBCCRpmzABcjAhk7S5CGm5CsQY20AMBuMD25IsyZIlre7dlbZPFhBk pJWW8Ec60288zODZ/b7f947f+723RgVBQL7dS/bthpdEh35zKw75o6MhLsDGfVEefgREUOGYnsJp Sk6T8moDhcvQb2KIS4TIJQR7MLZvwL9/yLd3mBlmYmyY4/iEgCbRwKYYihJKuUFJzCpWzy2nF1Tr a41KFX4pTpMMMSEgewb9mzpsu/p9w74YwvI6LVWooWgS0xPyAoVchqL+GO8Bi7K8N8yP+MNIAqE1 RFOJZnmj+eY6o1GBSzKqNIidjuBrh+1vdToYJlpIKxotdHMJ3WhSWVQkuBUfN+F4+CBwkwDLjYbj J72h1hGmzerv8YQQRFhQa3ywuWzJJIMyb4vmCzHIxte1Wl/ab3W4QyaDcsU089JqY7mGVOPyhCBw iUQciAHC8BxEAAtOBZPimCzKx8Goe23+t7sc7cNenJDfMqPw6QWVMwtV+ZgzL4jOEPvYjr7NrVaC wu+oL1peXzTNqAIEbCLBxXOTFgAlZCj8aw9xn/a73+gaOWNnplfSa2+csqhanxNlbohdo6GHPurZ 85WryqJdNbeqpVSPoUiYjyfNJmWBXSm5DMfQHm9k7WHrx112s16x5oaaH800nw+QjPvlgHjUFfrh tuPH+r3zppiemVPZWKAK8QIY79JYBC4F11PjmC8WX3/U9krbEClHX1xW91BTichlxSDag+xd247v 7nItayhZNbeyREUEuDjkwaXhuxAE5D3k1KZux3N7+tWkbMvy6TfVGrOhzEpUTCz++Of9gA/sN46P ZNhk3H1zfAAlzEN6CfdPMz/aXBEIcL/6qKfDEZQMcd0h69YvhydZ6F/PqSpO4uOlBF6OZ+GesXiC jSfun26+/YqS09bAk5/3eSKZj8hsxXZHcMOBYZLCn55b1VCgCl5WfOd4CVAKCjn26FVls2sKdna5 Nh91ZLxZBogQbhsO2UZcoTtnFC8s00Hy5pO7qWfAPPAzTpF5Ldi8QkPdP7MEI+QvH7T2eaMXv5YB 4u5B3/YjjqIC9fK6QhAAUI5zngacB3mqJjDIAygbGhxTymX5iAfYG1z0XQu9uNbUa2P+2m6/mGYn QgRAfz9s9wfZFdPN9XpFhI+L4BsnEVRDYHDSKW/48zNj7/W4t59277P5rQGWwGQqOSZ+vWRQJgR4 7Af1RWoNubXTcdwNdTJtTSSdPm+k+bUOPC5suXlGjZYClsl2BuCDmgFC65Ar8GGPe6/d54IqmbS4 IMdkk3WKljLdrbVFlVoS8heKpAhWDKQHivxiV+/H3c6Nt0194Mo0mpxoxV0DvlEmeoWFLlUSEM4i +5JJR6Jvdjsf2Xlq61f2YW8E7MEjAgjGKBfvcjDr2oZW/rtnv50Bp4vXD4h+UiZrKYdiiO4d8E0I rTSIIAP2D/oRHmm20BBSIA6ychWKwtW3dDvWHIB6G0MIOYJjCIAGLPAvJkv95ojd/8S+vlYHk9Pj oKxnmFQGNdFhZc740pImDeKgP3bQHtDTFBANUKuIDVVy2QE7s77DGoNIIOWZCR3gEvIhd+hP7VZn mKUwmciGXAIBRTevlD49Gj7iSAvHNIh2hrV6IxYNCUTAZk9k8FuIS2w/7fIEo4i47INYwLFWm+/D fo8cQ6FAZ1tgEZrAagzKRJizMdmtyMR4LsobSAyaj2xOBksoMFmvP3LQziAYlvSs+MLQOC/sG/ZB /cRlIo0B1FZUBQ7BZZ4gd+GWae8ARLiNDjRT9nPHexOk3x/xhLlk2OWzZIgtELUFYsBQIvwAkaUn cbC6IwiJ+nVQpEF0gv8QtEhJpCpE5nKEIpDoIHL53Fr23AbQzXDx0SgHCEUwwn46Uq5IQmSj/NeZ mgYxkkx3AaS82OkCiiQbPCmSB7IcGQ/E851DptuDUYBlwYORdB5Ng1ikwmEfT0TsunFgZpnMoqHk +Tk5CSYh6EisSEEkREs32DjA8iE+XqjCKYjycysNog6Eh4BAKyRelSFQqrWUWUslXZ7PSiDlNAWK WKzaJ92CwtEJNmHRktA/ZIYIwwOcwLwxHsRctsiGV6Gjq9JR15bqkq1oTpB8Qo7LFlUYgSVS9THj Gu8YBaigCBc3KomsGW1Q4Go1MRrm3GFOnj374CCgj9tqTRUFSoQTlboQ1PHE96qM11foAZ+Io6FI Q/0cC7FgTOixs0KsgLpiUo34wqd8EUI01kDnNRSoH28qN6pJJMYnzXnxiifAJLPKdI81lWkJLCra lMGYwBXmDo4wBj01tVCZFSJNYfPKtRwbb3OCxBFEFB9YB/JuaaXxd9dMqi3SIMARABSKIcCCH/BX lJcjyKLJphfmT5pEg6jL0TRCxe/zR7pcgQazZnr6CGAi3S+oNhBKvNXqhYgc1zJZF/AihOziSsMr 19U+3FQ2y0KXaCkVIddSeJVOcW2lYfX86j/Mr66hFWG4rmhWpY750sYgvDCnUgfseOHjE/UiNH4t r3ceswc2LKm/oUKf7Ppy7Z7sOFHUHWEH/FFISSC2YiVRriVpQg48C9cQT/pURXVFuHs/OTEwFv7g nsbFk9NGFBOtqCWxFY1mLsa/dcLhifGgnHMeEOITgAPGiLPM2qWVhkXl+mlGpVKOQbxGc+GDzcFV EPf/6HeftDPzJhuaLJoJJ2ZAcPv0whkVut19nn8NeRUiiX2+vCEQfgIwWoiLQwgHuTjIbACd51AA pORpX/Tt405Sga9sLjUq0rycvMPFRirVECuby6Cob+pyWEMs+DEn9+XD3xmfgYoHgbitxzXgYG5p KLphsuHixzL7EQaV82uNRwfGXu60c4JAiqrRS8YH4KBn+GRwbPMRu1ZHrZxjyThyzgyxQImvua7a XKjc3DEM3QlAhE7qstsSyLJ9NPjC/oFQlFu1sKq5VJvxtlmzYX45vXZprZLE1h44836fG9pkArs8 KOGq4F0otn1M9Nn9A4PO4H1zyx6eZcnmDbGEXTG98InrJoWj/KrdvW+edELmjE+0vtFKKS4tLj8y Gnpid197r2fhjKLnF1aDo7Lti61evVrkzKssGtBee057vhgYA5HYaFLD7YG088zWC3cGcIAC+jIC wz4dHHtmd++xYV9LQ9FLS2tBlIhgyD2lBfr9W8fI6h29Ln9syVTzT2eWzCxQgcTP2cCnVQgUATuB /YYDsXd6Rt/otAZC7I/nlT/bUmXRpOmai7Hmhph6Z0ef98nPeo/0e7U0dUe9+dY6U51eAfIEGBF6 xSQHpvcSqdYCaBk0EUQHJpO5QuzHA57Nxx09NkarJZ9aWPnI7FJok3LGTb4QYaM+X/TVVtvrHXaP O1xaqLl+UsE8Cw1Ai5S4EsegjsBUBPRjMhvG2wCgPPhW5IlyZ5ho2wjzxYD30LAX8C6bVriy2dJS lXsQn0IvAWLqhVZbYGObbftRZzDEwURnskE5q5SerFMmPw2Rci0B7TIKBWYsxgbZhC3Ito8wXc5A MALtomz+ZP3PZ1tumgKcltt4560rGSK8CdHZZg/s7B3b2+/rdAZAFCHQdcPAjsBA0YPxWD4OxRCm sNA5YBpyhknRXKG/vsYwr4wuUE6sb5fT0RfvFeQS3a7QwWG/I8BCUzYSjDlCHPS5JoUcRhoGBVag JBrM6jlltF60NxdHeSlWzLZjUkYktVfySypwU07z5PnA5YSY55FSH5MQtlK3vlzPS7NiLBrrdrMq NVWjG2/ShET/aNgvYGVq2QgTo9TKGs1Z/0YjsRM+1kQrdEJ8MMDB8CCp/gUBhr4mNVWmljAokAYx ODZ2/5bjnZjh9bvq5+lk3d1Dd703YG6oWd9MPfPu8U7cuGVF3VVa8Ay/6Z9fPXYs9uLdDZZB6z2f WmPj36lBXUOo3nRN3br5BWTeRpbmaLVOd9+VxpF+2x87PLFIZP3uwc44efcVBdUm+qYa7alTtjXt Hjh6uH/k+S9HyyuLFhVTBI4V6xXFeqrKgPvDEYc7xsMUN298476SurjIb7d+qX7+4L3vduqe3v3g AU+yqAgCH2R+snGP7IXD2wYDa94+qHru0EcuUMMCfHKFDpBLxA8c7St5ZufUN04dCYAOkbCkOTp1 +VG7844t3btG+Mamqve/X11Fnu1lB/utS9/tdQtYjBfubJn656sLzgsEt81586ZuO1349t21s3XS +Eiao1MQDWqqUJkcM1aa1CXn8MHvK6qLn/iOwekI6S1FK5u+xsd4xn72zolOmfovt9ZIxQfbXgLE +LYDgzs8aG2FYl/70FuDkQviCls4xTjNpJg1xTTlXDqEff7Ht5/YwarWLZ++zCztDzdSO0uGODzk /H2ra8rUyg/urLsSCfzms8FTka+7GuASBYbCpCX1q0Qs8tLOU6/2sQubymfrhC5X6Igz1B+AGilh SYMYDjDPfnD6GKr55dUl9SUFDzcX2U4OP7JrxHvuGxeMRqCRALWbghj0BD4+6QeVtq+198aNhxe9 enjxhrYH/jPKSMEoDeLJM+6jPPFgy6RlxcmQX9JU+dRsvdU21jF2doRHUkRDGV2nPZsQKI43lutn VGjNGoJSEjT8RYqW1MGXqvwnvFL1Yozlo4hMCx3reUcJCdCFmBwD/6YoDKZAACHVEcN/wa4TPiOC oIQhbP4gL4V0JMTR5XhUmqMvx4mS9/g/RMkmy/DC/4AV/wv0qOA7HwMfCQAAAABJRU5ErkJggg== </pkg:binaryData> </pkg:part> <pkg:part pkg:name="/word/theme/theme1.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.theme+xml"> <pkg:xmlData> <a:theme name="Office Theme" xmlns:a="http://schemas.openxmlformats.org/drawingml/2006/main"> <a:themeElements> <a:clrScheme name="Office"> <a:dk1> <a:sysClr val="windowText" lastClr="000000"/> </a:dk1> <a:lt1> <a:sysClr val="window" lastClr="FFFFFF"/> </a:lt1> <a:dk2> <a:srgbClr val="1F497D"/> </a:dk2> <a:lt2> <a:srgbClr val="EEECE1"/> </a:lt2> <a:accent1> <a:srgbClr val="4F81BD"/> </a:accent1> <a:accent2> <a:srgbClr val="C0504D"/> </a:accent2> <a:accent3> <a:srgbClr val="9BBB59"/> </a:accent3> <a:accent4> <a:srgbClr val="8064A2"/> </a:accent4> <a:accent5> <a:srgbClr val="4BACC6"/> </a:accent5> <a:accent6> <a:srgbClr val="F79646"/> </a:accent6> <a:hlink> <a:srgbClr val="0000FF"/> </a:hlink> <a:folHlink> <a:srgbClr val="800080"/> </a:folHlink> </a:clrScheme> <a:fontScheme name="Office"> <a:majorFont> <a:latin typeface="Cambria"/> <a:ea typeface=""/> <a:cs typeface=""/> <a:font script="Jpan" typeface="MS ゴシック"/> <a:font script="Hang" typeface="맑은 고딕"/> <a:font script="Hans" typeface="宋体"/> <a:font script="Hant" typeface="新細明體"/> <a:font script="Arab" typeface="Times New Roman"/> <a:font script="Hebr" typeface="Times New Roman"/> <a:font script="Thai" typeface="Angsana New"/> <a:font script="Ethi" typeface="Nyala"/> <a:font script="Beng" typeface="Vrinda"/> <a:font script="Gujr" typeface="Shruti"/> <a:font script="Khmr" typeface="MoolBoran"/> <a:font script="Knda" typeface="Tunga"/> <a:font script="Guru" typeface="Raavi"/> <a:font script="Cans" typeface="Euphemia"/> <a:font script="Cher" typeface="Plantagenet Cherokee"/> <a:font script="Yiii" typeface="Microsoft Yi Baiti"/> <a:font script="Tibt" typeface="Microsoft Himalaya"/> <a:font script="Thaa" typeface="MV Boli"/> <a:font script="Deva" typeface="Mangal"/> <a:font script="Telu" typeface="Gautami"/> <a:font script="Taml" typeface="Latha"/> <a:font script="Syrc" typeface="Estrangelo Edessa"/> <a:font script="Orya" typeface="Kalinga"/> <a:font script="Mlym" typeface="Kartika"/> <a:font script="Laoo" typeface="DokChampa"/> <a:font script="Sinh" typeface="Iskoola Pota"/> <a:font script="Mong" typeface="Mongolian Baiti"/> <a:font script="Viet" typeface="Times New Roman"/> <a:font script="Uigh" typeface="Microsoft Uighur"/> </a:majorFont> <a:minorFont> <a:latin typeface="Calibri"/> <a:ea typeface=""/> <a:cs typeface=""/> <a:font script="Jpan" typeface="MS 明朝"/> <a:font script="Hang" typeface="맑은 고딕"/> <a:font script="Hans" typeface="宋体"/> <a:font script="Hant" typeface="新細明體"/> <a:font script="Arab" typeface="Arial"/> <a:font script="Hebr" typeface="Arial"/> <a:font script="Thai" typeface="Cordia New"/> <a:font script="Ethi" typeface="Nyala"/> <a:font script="Beng" typeface="Vrinda"/> <a:font script="Gujr" typeface="Shruti"/> <a:font script="Khmr" typeface="DaunPenh"/> <a:font script="Knda" typeface="Tunga"/> <a:font script="Guru" typeface="Raavi"/> <a:font script="Cans" typeface="Euphemia"/> <a:font script="Cher" typeface="Plantagenet Cherokee"/> <a:font script="Yiii" typeface="Microsoft Yi Baiti"/> <a:font script="Tibt" typeface="Microsoft Himalaya"/> <a:font script="Thaa" typeface="MV Boli"/> <a:font script="Deva" typeface="Mangal"/> <a:font script="Telu" typeface="Gautami"/> <a:font script="Taml" typeface="Latha"/> <a:font script="Syrc" typeface="Estrangelo Edessa"/> <a:font script="Orya" typeface="Kalinga"/> <a:font script="Mlym" typeface="Kartika"/> <a:font script="Laoo" typeface="DokChampa"/> <a:font script="Sinh" typeface="Iskoola Pota"/> <a:font script="Mong" typeface="Mongolian Baiti"/> <a:font script="Viet" typeface="Arial"/> <a:font script="Uigh" typeface="Microsoft Uighur"/> </a:minorFont> </a:fontScheme> <a:fmtScheme name="Office"> <a:fillStyleLst> <a:solidFill> <a:schemeClr val="phClr"/> </a:solidFill> <a:gradFill rotWithShape="1"> <a:gsLst> <a:gs pos="0"> <a:schemeClr val="phClr"> <a:tint val="50000"/> <a:satMod val="300000"/> </a:schemeClr> </a:gs> <a:gs pos="35000"> <a:schemeClr val="phClr"> <a:tint val="37000"/> <a:satMod val="300000"/> </a:schemeClr> </a:gs> <a:gs pos="100000"> <a:schemeClr val="phClr"> <a:tint val="15000"/> <a:satMod val="350000"/> </a:schemeClr> </a:gs> </a:gsLst> <a:lin ang="16200000" scaled="1"/> </a:gradFill> <a:gradFill rotWithShape="1"> <a:gsLst> <a:gs pos="0"> <a:schemeClr val="phClr"> <a:shade val="51000"/> <a:satMod val="130000"/> </a:schemeClr> </a:gs> <a:gs pos="80000"> <a:schemeClr val="phClr"> <a:shade val="93000"/> <a:satMod val="130000"/> </a:schemeClr> </a:gs> <a:gs pos="100000"> <a:schemeClr val="phClr"> <a:shade val="94000"/> <a:satMod val="135000"/> </a:schemeClr> </a:gs> </a:gsLst> <a:lin ang="16200000" scaled="0"/> </a:gradFill> </a:fillStyleLst> <a:lnStyleLst> <a:ln w="9525" cap="flat" cmpd="sng" algn="ctr"> <a:solidFill> <a:schemeClr val="phClr"> <a:shade val="95000"/> <a:satMod val="105000"/> </a:schemeClr> </a:solidFill> <a:prstDash val="solid"/> </a:ln> <a:ln w="25400" cap="flat" cmpd="sng" algn="ctr"> <a:solidFill> <a:schemeClr val="phClr"/> </a:solidFill> <a:prstDash val="solid"/> </a:ln> <a:ln w="38100" cap="flat" cmpd="sng" algn="ctr"> <a:solidFill> <a:schemeClr val="phClr"/> </a:solidFill> <a:prstDash val="solid"/> </a:ln> </a:lnStyleLst> <a:effectStyleLst> <a:effectStyle> <a:effectLst> <a:outerShdw blurRad="40000" dist="20000" dir="5400000" rotWithShape="0"> <a:srgbClr val="000000"> <a:alpha val="38000"/> </a:srgbClr> </a:outerShdw> </a:effectLst> </a:effectStyle> <a:effectStyle> <a:effectLst> <a:outerShdw blurRad="40000" dist="23000" dir="5400000" rotWithShape="0"> <a:srgbClr val="000000"> <a:alpha val="35000"/> </a:srgbClr> </a:outerShdw> </a:effectLst> </a:effectStyle> <a:effectStyle> <a:effectLst> <a:outerShdw blurRad="40000" dist="23000" dir="5400000" rotWithShape="0"> <a:srgbClr val="000000"> <a:alpha val="35000"/> </a:srgbClr> </a:outerShdw> </a:effectLst> <a:scene3d> <a:camera prst="orthographicFront"> <a:rot lat="0" lon="0" rev="0"/> </a:camera> <a:lightRig rig="threePt" dir="t"> <a:rot lat="0" lon="0" rev="1200000"/> </a:lightRig> </a:scene3d> <a:sp3d> <a:bevelT w="63500" h="25400"/> </a:sp3d> </a:effectStyle> </a:effectStyleLst> <a:bgFillStyleLst> <a:solidFill> <a:schemeClr val="phClr"/> </a:solidFill> <a:gradFill rotWithShape="1"> <a:gsLst> <a:gs pos="0"> <a:schemeClr val="phClr"> <a:tint val="40000"/> <a:satMod val="350000"/> </a:schemeClr> </a:gs> <a:gs pos="40000"> <a:schemeClr val="phClr"> <a:tint val="45000"/> <a:shade val="99000"/> <a:satMod val="350000"/> </a:schemeClr> </a:gs> <a:gs pos="100000"> <a:schemeClr val="phClr"> <a:shade val="20000"/> <a:satMod val="255000"/> </a:schemeClr> </a:gs> </a:gsLst> <a:path path="circle"> <a:fillToRect l="50000" t="-80000" r="50000" b="180000"/> </a:path> </a:gradFill> <a:gradFill rotWithShape="1"> <a:gsLst> <a:gs pos="0"> <a:schemeClr val="phClr"> <a:tint val="80000"/> <a:satMod val="300000"/> </a:schemeClr> </a:gs> <a:gs pos="100000"> <a:schemeClr val="phClr"> <a:shade val="30000"/> <a:satMod val="200000"/> </a:schemeClr> </a:gs> </a:gsLst> <a:path path="circle"> <a:fillToRect l="50000" t="50000" r="50000" b="50000"/> </a:path> </a:gradFill> </a:bgFillStyleLst> </a:fmtScheme> </a:themeElements> <a:objectDefaults/> <a:extraClrSchemeLst/> </a:theme> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/media/image3.png" pkg:contentType="image/png" pkg:compression="store"> <pkg:binaryData> iVBORw0KGgoAAAANSUhEUgAAAGsAAABeCAIAAACran1FAAAAAXNSR0IArs4c6QAAAAlwSFlzAAAO xAAADsQBlSsOGwAAA7JJREFUeF7tnD1u4zAQhaU9iRfbpNhD2EjjIG1u4DSLtA62jFMGdhtsI98g bZA0QXSILdIsopt4TYqUSIo/lp5UBHgq5Xmc4cdHcowgzg+HQ8YHIPAN0FIqCJAg6gMSJEGUAKqn B0kQJYDq6UESRAmg+nyKjjq/z9G6ptQf7sb8EsFdjK4VCaIEp93Fm7sNWuB4+s29Koa7eDyoY4zE XYxSJEESRAmgenqQBFECqJ4eJEGUAKqnB0kQJYDq6UESRAmgenqQBFECqJ4eJEGUAKqnB0kQJYDq 6UESRAmgenqQBFECqJ4eJEGUAKqnB0kQJYDq6UESRAmgenqQBFECqJ4eJEGUAKqnB0kQJYDq6UES RAmgenqQBFECqJ4eJEGUAKqnB0kQJYDq6UESRAmgenqQBFECqH7a/45Fq5tGz/+OnYbr0FF5Dg4l p3WT7GK0qC+lpwfR5SJBEkQJoHp6kARRAqieHiRBlACqj3iwvM19z22pcwYC8rwN0aHV/sIcqxvQ zENFqohgCjeJk+BiXxlkIqNYgXoQszzfOxv68bffAs/7OrQ863cpCQdkmQoRYZ/F0jfQsvj0JVbR 6RRthkAdbYJIoU4VutZmcCU1puMWnSUJmmKdoM7rH1291aU1/Loz9jL0EoxMoFnHNqZJ2U2gPgqs nmELa4aR8OOvefciaFMLrE/9WmWNxphO1XX0JOhHIt7G1ieKpCk5jVsU3e8urvYPO4nnbBba4Trk 5w8RUj7L+PXl3I6fX0rMf/+Zx9WAM716e3oV9VydWwXNVi+Hl1Wwxnii+VYy3C2+Xx/HXhZ/4gOl Ce4W7R0gxzwC+W0OagbkeR2yLG4MZh7gszNxOL5+nEbQTiHqsa+KerlGexRDOY8EvwG/iS42wNZx lFW63CGDDTAaBWQgvY+OS3z9WCZGSnvQOcW7bJxr0+Mrz6vqQ1o1fBpYdXdvErsM+DQws1X7X/X2 LerNHGm9hCxN8NTFnG/VybtbtFtMnXe7B6s/y8zDUjVrus4a7anP7PxKngZPb/ZxIAZNzDyUonzU x9/qRvZhbvGusOddbIZ371lPH9H0YqFuxm7BnPsv3Y55uiWkm3ESOq2ZD1bfbiZO0Gif24Yh1VF7 PncbWp9fkg2zr2VJNCgeYEmGYxM0vqqYM3AouZOzPjYPvZO/TSQSON2mvx/UyexT1/4W0XUh/05y 6pEbihvvJkEr+ap6EkRXjgRJECWA6ulBEkQJoHp6kARRAqieHiRBlACqpwdRgv8BLLihkhVatZgA AAAASUVORK5CYII= </pkg:binaryData> </pkg:part> <pkg:part pkg:name="/word/settings.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml"> <pkg:xmlData> <w:settings xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:m="http://schemas.openxmlformats.org/officeDocument/2006/math" xmlns:v="urn:schemas-microsoft-com:vml" xmlns:w10="urn:schemas-microsoft-com:office:word" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main" xmlns:sl="http://schemas.openxmlformats.org/schemaLibrary/2006/main"> <w:zoom w:percent="140"/> <w:proofState w:spelling="clean" w:grammar="clean"/> <w:defaultTabStop w:val="708"/> <w:hyphenationZone w:val="283"/> <w:characterSpacingControl w:val="doNotCompress"/> <w:footnotePr> <w:footnote w:id="-1"/> <w:footnote w:id="0"/> </w:footnotePr> <w:endnotePr> <w:endnote w:id="-1"/> <w:endnote w:id="0"/> </w:endnotePr> <w:compat/> <w:rsids> <w:rsidRoot w:val="00B411DD"/> <w:rsid w:val="00126BDF"/> <w:rsid w:val="0021671A"/> <w:rsid w:val="00330EDA"/> <w:rsid w:val="003D412C"/> <w:rsid w:val="003F5FBD"/> <w:rsid w:val="004651E5"/> <w:rsid w:val="0050335B"/> <w:rsid w:val="00521218"/> <w:rsid w:val="006C1D14"/> <w:rsid w:val="006F733E"/> <w:rsid w:val="00720DB2"/> <w:rsid w:val="00767112"/> <w:rsid w:val="00771D35"/> <w:rsid w:val="00777754"/> <w:rsid w:val="007E2643"/> <w:rsid w:val="00956CD7"/> <w:rsid w:val="00966E4D"/> <w:rsid w:val="009F40C5"/> <w:rsid w:val="00A50E1F"/> <w:rsid w:val="00B411DD"/> <w:rsid w:val="00B56F96"/> <w:rsid w:val="00B924BD"/> <w:rsid w:val="00B96DB7"/> <w:rsid w:val="00BE1F9A"/> <w:rsid w:val="00BF4CC5"/> <w:rsid w:val="00CB55CF"/> <w:rsid w:val="00CC7B66"/> <w:rsid w:val="00EE6629"/> <w:rsid w:val="00F134D3"/> <w:rsid w:val="00FB4EEC"/> </w:rsids> <m:mathPr> <m:mathFont m:val="Cambria Math"/> <m:brkBin m:val="before"/> <m:brkBinSub m:val="--"/> <m:smallFrac m:val="off"/> <m:dispDef/> <m:lMargin m:val="0"/> <m:rMargin m:val="0"/> <m:defJc m:val="centerGroup"/> <m:wrapIndent m:val="1440"/> <m:intLim m:val="subSup"/> <m:naryLim m:val="undOvr"/> </m:mathPr> <w:themeFontLang w:val="it-IT"/> <w:clrSchemeMapping w:bg1="light1" w:t1="dark1" w:bg2="light2" w:t2="dark2" w:accent1="accent1" w:accent2="accent2" w:accent3="accent3" w:accent4="accent4" w:accent5="accent5" w:accent6="accent6" w:hyperlink="hyperlink" w:followedHyperlink="followedHyperlink"/> <w:shapeDefaults> <o:shapedefaults v:ext="edit" spidmax="5122"/> <o:shapelayout v:ext="edit"> <o:idmap v:ext="edit" data="1"/> </o:shapelayout> </w:shapeDefaults> <w:decimalSymbol w:val=","/> <w:listSeparator w:val=";"/> </w:settings> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/fontTable.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.fontTable+xml"> <pkg:xmlData> <w:fonts xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main"> <w:font w:name="Calibri"> <w:panose1 w:val="020F0502020204030204"/> <w:charset w:val="00"/> <w:family w:val="swiss"/> <w:pitch w:val="variable"/> <w:sig w:usb0="E10002FF" w:usb1="4000ACFF" w:usb2="00000009" w:usb3="00000000" w:csb0="0000019F" w:csb1="00000000"/> </w:font> <w:font w:name="Times New Roman"> <w:panose1 w:val="02020603050405020304"/> <w:charset w:val="00"/> <w:family w:val="roman"/> <w:pitch w:val="variable"/> <w:sig w:usb0="E0002AFF" w:usb1="C0007841" w:usb2="00000009" w:usb3="00000000" w:csb0="000001FF" w:csb1="00000000"/> </w:font> <w:font w:name="Arial"> <w:panose1 w:val="020B0604020202020204"/> <w:charset w:val="00"/> <w:family w:val="swiss"/> <w:pitch w:val="variable"/> <w:sig w:usb0="E0002AFF" w:usb1="C0007843" w:usb2="00000009" w:usb3="00000000" w:csb0="000001FF" w:csb1="00000000"/> </w:font> <w:font w:name="Tahoma"> <w:panose1 w:val="020B0604030504040204"/> <w:charset w:val="00"/> <w:family w:val="swiss"/> <w:notTrueType/> <w:pitch w:val="variable"/> <w:sig w:usb0="00000003" w:usb1="00000000" w:usb2="00000000" w:usb3="00000000" w:csb0="00000001" w:csb1="00000000"/> </w:font> <w:font w:name="Cambria"> <w:panose1 w:val="02040503050406030204"/> <w:charset w:val="00"/> <w:family w:val="roman"/> <w:pitch w:val="variable"/> <w:sig w:usb0="E00002FF" w:usb1="400004FF" w:usb2="00000000" w:usb3="00000000" w:csb0="0000019F" w:csb1="00000000"/> </w:font> </w:fonts> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/webSettings.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.webSettings+xml"> <pkg:xmlData> <w:webSettings xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main"> <w:optimizeForBrowser/> </w:webSettings> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/docProps/app.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.extended-properties+xml" pkg:padding="256"> <pkg:xmlData> <Properties xmlns="http://schemas.openxmlformats.org/officeDocument/2006/extended-properties" xmlns:vt="http://schemas.openxmlformats.org/officeDocument/2006/docPropsVTypes"> <Template>Normal.dotm</Template> <TotalTime>294</TotalTime> <Pages>1</Pages> <Words>85</Words> <Characters>488</Characters> <Application>Microsoft Office Word</Application> <DocSecurity>0</DocSecurity> <Lines>4</Lines> <Paragraphs>1</Paragraphs> <ScaleCrop>false</ScaleCrop> <HeadingPairs> <vt:vector size="2" baseType="variant"> <vt:variant> <vt:lpstr>Title</vt:lpstr> </vt:variant> <vt:variant> <vt:i4>1</vt:i4> </vt:variant> </vt:vector> </HeadingPairs> <TitlesOfParts> <vt:vector size="1" baseType="lpstr"> <vt:lpstr/> </vt:vector> </TitlesOfParts> <Company/> <LinksUpToDate>false</LinksUpToDate> <CharactersWithSpaces>572</CharactersWithSpaces> <SharedDoc>false</SharedDoc> <HyperlinksChanged>false</HyperlinksChanged> <AppVersion>12.0000</AppVersion> </Properties> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/docProps/core.xml" pkg:contentType="application/vnd.openxmlformats-package.core-properties+xml" pkg:padding="256"> <pkg:xmlData> <cp:coreProperties xmlns:cp="http://schemas.openxmlformats.org/package/2006/metadata/core-properties" xmlns:dc="http://purl.org/dc/elements/1.1/" xmlns:dcterms="http://purl.org/dc/terms/" xmlns:dcmitype="http://purl.org/dc/dcmitype/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"> <dc:creator>zizzog1</dc:creator> <cp:lastModifiedBy>zizzog1</cp:lastModifiedBy> <cp:revision>17</cp:revision> <dcterms:created xsi:type="dcterms:W3CDTF">2011-07-07T09:53:00Z</dcterms:created> <dcterms:modified xsi:type="dcterms:W3CDTF">2011-07-08T12:38:00Z</dcterms:modified> </cp:coreProperties> </pkg:xmlData> </pkg:part> <pkg:part pkg:name="/word/styles.xml" pkg:contentType="application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml"> <pkg:xmlData> <w:styles xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships" xmlns:w="http://schemas.openxmlformats.org/wordprocessingml/2006/main"> <w:docDefaults> <w:rPrDefault> <w:rPr> <w:rFonts w:asciiTheme="minorHAnsi" w:eastAsiaTheme="minorHAnsi" w:hAnsiTheme="minorHAnsi" w:cstheme="minorBidi"/> <w:sz w:val="22"/> <w:szCs w:val="22"/> <w:lang w:val="it-IT" w:eastAsia="en-US" w:bidi="ar-SA"/> </w:rPr> </w:rPrDefault> <w:pPrDefault> <w:pPr> <w:spacing w:after="200" w:line="276" w:lineRule="auto"/> </w:pPr> </w:pPrDefault> </w:docDefaults> <w:latentStyles w:defLockedState="0" w:defUIPriority="99" w:defSemiHidden="1" w:defUnhideWhenUsed="1" w:defQFormat="0" w:count="267"> <w:lsdException w:name="Normal" w:semiHidden="0" w:uiPriority="0" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="heading 1" w:semiHidden="0" w:uiPriority="9" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="heading 2" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 3" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 4" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 5" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 6" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 7" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 8" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="heading 9" w:uiPriority="9" w:qFormat="1"/> <w:lsdException w:name="toc 1" w:uiPriority="39"/> <w:lsdException w:name="toc 2" w:uiPriority="39"/> <w:lsdException w:name="toc 3" w:uiPriority="39"/> <w:lsdException w:name="toc 4" w:uiPriority="39"/> <w:lsdException w:name="toc 5" w:uiPriority="39"/> <w:lsdException w:name="toc 6" w:uiPriority="39"/> <w:lsdException w:name="toc 7" w:uiPriority="39"/> <w:lsdException w:name="toc 8" w:uiPriority="39"/> <w:lsdException w:name="toc 9" w:uiPriority="39"/> <w:lsdException w:name="footer" w:uiPriority="0"/> <w:lsdException w:name="caption" w:uiPriority="35" w:qFormat="1"/> <w:lsdException w:name="Title" w:semiHidden="0" w:uiPriority="10" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Default Paragraph Font" w:uiPriority="1"/> <w:lsdException w:name="Subtitle" w:semiHidden="0" w:uiPriority="11" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Strong" w:semiHidden="0" w:uiPriority="22" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Emphasis" w:semiHidden="0" w:uiPriority="20" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Table Grid" w:semiHidden="0" w:uiPriority="59" w:unhideWhenUsed="0"/> <w:lsdException w:name="Placeholder Text" w:unhideWhenUsed="0"/> <w:lsdException w:name="No Spacing" w:semiHidden="0" w:uiPriority="1" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Light Shading" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 2" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Shading Accent 1" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List Accent 1" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid Accent 1" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1 Accent 1" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2 Accent 1" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1 Accent 1" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Revision" w:unhideWhenUsed="0"/> <w:lsdException w:name="List Paragraph" w:semiHidden="0" w:uiPriority="34" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Quote" w:semiHidden="0" w:uiPriority="29" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Intense Quote" w:semiHidden="0" w:uiPriority="30" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Medium List 2 Accent 1" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1 Accent 1" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2 Accent 1" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3 Accent 1" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List Accent 1" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading Accent 1" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List Accent 1" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid Accent 1" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Shading Accent 2" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List Accent 2" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid Accent 2" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1 Accent 2" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2 Accent 2" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1 Accent 2" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 2 Accent 2" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1 Accent 2" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2 Accent 2" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3 Accent 2" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List Accent 2" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading Accent 2" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List Accent 2" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid Accent 2" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Shading Accent 3" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List Accent 3" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid Accent 3" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1 Accent 3" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2 Accent 3" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1 Accent 3" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 2 Accent 3" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1 Accent 3" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2 Accent 3" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3 Accent 3" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List Accent 3" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading Accent 3" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List Accent 3" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid Accent 3" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Shading Accent 4" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List Accent 4" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid Accent 4" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1 Accent 4" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2 Accent 4" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1 Accent 4" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 2 Accent 4" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1 Accent 4" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2 Accent 4" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3 Accent 4" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List Accent 4" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading Accent 4" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List Accent 4" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid Accent 4" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Shading Accent 5" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List Accent 5" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid Accent 5" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1 Accent 5" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2 Accent 5" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1 Accent 5" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 2 Accent 5" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1 Accent 5" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2 Accent 5" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3 Accent 5" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List Accent 5" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading Accent 5" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List Accent 5" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid Accent 5" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Shading Accent 6" w:semiHidden="0" w:uiPriority="60" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light List Accent 6" w:semiHidden="0" w:uiPriority="61" w:unhideWhenUsed="0"/> <w:lsdException w:name="Light Grid Accent 6" w:semiHidden="0" w:uiPriority="62" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 1 Accent 6" w:semiHidden="0" w:uiPriority="63" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Shading 2 Accent 6" w:semiHidden="0" w:uiPriority="64" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 1 Accent 6" w:semiHidden="0" w:uiPriority="65" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium List 2 Accent 6" w:semiHidden="0" w:uiPriority="66" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 1 Accent 6" w:semiHidden="0" w:uiPriority="67" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 2 Accent 6" w:semiHidden="0" w:uiPriority="68" w:unhideWhenUsed="0"/> <w:lsdException w:name="Medium Grid 3 Accent 6" w:semiHidden="0" w:uiPriority="69" w:unhideWhenUsed="0"/> <w:lsdException w:name="Dark List Accent 6" w:semiHidden="0" w:uiPriority="70" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Shading Accent 6" w:semiHidden="0" w:uiPriority="71" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful List Accent 6" w:semiHidden="0" w:uiPriority="72" w:unhideWhenUsed="0"/> <w:lsdException w:name="Colorful Grid Accent 6" w:semiHidden="0" w:uiPriority="73" w:unhideWhenUsed="0"/> <w:lsdException w:name="Subtle Emphasis" w:semiHidden="0" w:uiPriority="19" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Intense Emphasis" w:semiHidden="0" w:uiPriority="21" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Subtle Reference" w:semiHidden="0" w:uiPriority="31" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Intense Reference" w:semiHidden="0" w:uiPriority="32" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Book Title" w:semiHidden="0" w:uiPriority="33" w:unhideWhenUsed="0" w:qFormat="1"/> <w:lsdException w:name="Bibliography" w:uiPriority="37"/> <w:lsdException w:name="TOC Heading" w:uiPriority="39" w:qFormat="1"/> </w:latentStyles> <w:style w:type="paragraph" w:default="1" w:styleId="Normal"> <w:name w:val="Normal"/> <w:qFormat/> <w:rsid w:val="006F733E"/> <w:rPr> <w:rFonts w:ascii="Arial" w:eastAsia="Arial" w:hAnsi="Arial" w:cs="Times New Roman"/> <w:lang w:val="en-GB"/> </w:rPr> </w:style> <w:style w:type="character" w:default="1" w:styleId="DefaultParagraphFont"> <w:name w:val="Default Paragraph Font"/> <w:uiPriority w:val="1"/> <w:semiHidden/> <w:unhideWhenUsed/> </w:style> <w:style w:type="table" w:default="1" w:styleId="TableNormal"> <w:name w:val="Normal Table"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:unhideWhenUsed/> <w:qFormat/> <w:tblPr> <w:tblInd w:w="0" w:type="dxa"/> <w:tblCellMar> <w:top w:w="0" w:type="dxa"/> <w:left w:w="108" w:type="dxa"/> <w:bottom w:w="0" w:type="dxa"/> <w:right w:w="108" w:type="dxa"/> </w:tblCellMar> </w:tblPr> </w:style> <w:style w:type="numbering" w:default="1" w:styleId="NoList"> <w:name w:val="No List"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:unhideWhenUsed/> </w:style> <w:style w:type="paragraph" w:styleId="Footer"> <w:name w:val="footer"/> <w:basedOn w:val="Normal"/> <w:link w:val="FooterChar"/> <w:rsid w:val="006F733E"/> <w:pPr> <w:tabs> <w:tab w:val="center" w:pos="4513"/> <w:tab w:val="right" w:pos="9026"/> </w:tabs> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> </w:style> <w:style w:type="character" w:customStyle="1" w:styleId="FooterChar"> <w:name w:val="Footer Char"/> <w:basedOn w:val="DefaultParagraphFont"/> <w:link w:val="Footer"/> <w:rsid w:val="006F733E"/> <w:rPr> <w:rFonts w:ascii="Arial" w:eastAsia="Arial" w:hAnsi="Arial" w:cs="Times New Roman"/> <w:lang w:val="en-GB"/> </w:rPr> </w:style> <w:style w:type="paragraph" w:customStyle="1" w:styleId="Style1"> <w:name w:val="Style1"/> <w:basedOn w:val="Footer"/> <w:link w:val="Style1Char"/> <w:rsid w:val="006F733E"/> <w:pPr> <w:pBdr> <w:top w:val="single" w:sz="4" w:space="1" w:color="auto"/> </w:pBdr> <w:jc w:val="right"/> </w:pPr> <w:rPr> <w:sz w:val="18"/> <w:szCs w:val="18"/> </w:rPr> </w:style> <w:style w:type="paragraph" w:customStyle="1" w:styleId="Style2"> <w:name w:val="Style2"/> <w:basedOn w:val="Header"/> <w:link w:val="Style2Char"/> <w:rsid w:val="006F733E"/> <w:pPr> <w:pBdr> <w:bottom w:val="single" w:sz="4" w:space="1" w:color="auto"/> </w:pBdr> <w:tabs> <w:tab w:val="clear" w:pos="4819"/> <w:tab w:val="clear" w:pos="9638"/> <w:tab w:val="center" w:pos="4513"/> <w:tab w:val="right" w:pos="9026"/> </w:tabs> </w:pPr> <w:rPr> <w:sz w:val="18"/> </w:rPr> </w:style> <w:style w:type="character" w:customStyle="1" w:styleId="Style1Char"> <w:name w:val="Style1 Char"/> <w:basedOn w:val="FooterChar"/> <w:link w:val="Style1"/> <w:rsid w:val="006F733E"/> <w:rPr> <w:sz w:val="18"/> <w:szCs w:val="18"/> </w:rPr> </w:style> <w:style w:type="character" w:customStyle="1" w:styleId="Style2Char"> <w:name w:val="Style2 Char"/> <w:basedOn w:val="HeaderChar"/> <w:link w:val="Style2"/> <w:rsid w:val="006F733E"/> <w:rPr> <w:sz w:val="18"/> </w:rPr> </w:style> <w:style w:type="paragraph" w:styleId="Header"> <w:name w:val="header"/> <w:basedOn w:val="Normal"/> <w:link w:val="HeaderChar"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:unhideWhenUsed/> <w:rsid w:val="006F733E"/> <w:pPr> <w:tabs> <w:tab w:val="center" w:pos="4819"/> <w:tab w:val="right" w:pos="9638"/> </w:tabs> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> </w:style> <w:style w:type="character" w:customStyle="1" w:styleId="HeaderChar"> <w:name w:val="Header Char"/> <w:basedOn w:val="DefaultParagraphFont"/> <w:link w:val="Header"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:rsid w:val="006F733E"/> <w:rPr> <w:rFonts w:ascii="Arial" w:eastAsia="Arial" w:hAnsi="Arial" w:cs="Times New Roman"/> <w:lang w:val="en-GB"/> </w:rPr> </w:style> <w:style w:type="paragraph" w:styleId="BalloonText"> <w:name w:val="Balloon Text"/> <w:basedOn w:val="Normal"/> <w:link w:val="BalloonTextChar"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:unhideWhenUsed/> <w:rsid w:val="006F733E"/> <w:pPr> <w:spacing w:after="0" w:line="240" w:lineRule="auto"/> </w:pPr> <w:rPr> <w:rFonts w:ascii="Tahoma" w:hAnsi="Tahoma" w:cs="Tahoma"/> <w:sz w:val="16"/> <w:szCs w:val="16"/> </w:rPr> </w:style> <w:style w:type="character" w:customStyle="1" w:styleId="BalloonTextChar"> <w:name w:val="Balloon Text Char"/> <w:basedOn w:val="DefaultParagraphFont"/> <w:link w:val="BalloonText"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:rsid w:val="006F733E"/> <w:rPr> <w:rFonts w:ascii="Tahoma" w:eastAsia="Arial" w:hAnsi="Tahoma" w:cs="Tahoma"/> <w:sz w:val="16"/> <w:szCs w:val="16"/> <w:lang w:val="en-GB"/> </w:rPr> </w:style> <w:style w:type="character" w:styleId="PlaceholderText"> <w:name w:val="Placeholder Text"/> <w:basedOn w:val="DefaultParagraphFont"/> <w:uiPriority w:val="99"/> <w:semiHidden/> <w:rsid w:val="0050335B"/> <w:rPr> <w:color w:val="808080"/> </w:rPr> </w:style> </w:styles> </pkg:xmlData> </pkg:part> </pkg:package>
{ "pile_set_name": "Github" }
Checkout the project from git At the top level, run cmake: cmake -G 'Unix Makefiles' \ -D BUILD_EXAMPLES=ON \ -D WEBSOCKETPP_ROOT=/tmp/cm1 \ -D ENABLE_CPP11=OFF . and then make the example: make -C examples/sip_client Now run it: bin/sip_client ws://ws-server:80 It has been tested against the repro SIP proxy from reSIProcate http://www.resiprocate.org/WebRTC_and_SIP_Over_WebSockets
{ "pile_set_name": "Github" }
using Volo.Abp.Reflection; namespace Volo.Abp.Identity { public static class IdentityPermissions { public const string GroupName = "AbpIdentity"; public static class Roles { public const string Default = GroupName + ".Roles"; public const string Create = Default + ".Create"; public const string Update = Default + ".Update"; public const string Delete = Default + ".Delete"; public const string ManagePermissions = Default + ".ManagePermissions"; } public static class Users { public const string Default = GroupName + ".Users"; public const string Create = Default + ".Create"; public const string Update = Default + ".Update"; public const string Delete = Default + ".Delete"; public const string ManagePermissions = Default + ".ManagePermissions"; } public static class UserLookup { public const string Default = GroupName + ".UserLookup"; } public static string[] GetAll() { return ReflectionHelper.GetPublicConstantsRecursively(typeof(IdentityPermissions)); } } }
{ "pile_set_name": "Github" }
// Copyright 2015 Google Inc. All rights reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "benchmark/benchmark.h" #include "benchmark_api_internal.h" #include "internal_macros.h" #ifndef BENCHMARK_OS_WINDOWS #include <sys/resource.h> #include <sys/time.h> #include <unistd.h> #endif #include <algorithm> #include <atomic> #include <condition_variable> #include <cstdio> #include <cstdlib> #include <cstring> #include <fstream> #include <iostream> #include <memory> #include <thread> #include "check.h" #include "colorprint.h" #include "commandlineflags.h" #include "complexity.h" #include "counter.h" #include "internal_macros.h" #include "log.h" #include "mutex.h" #include "re.h" #include "statistics.h" #include "string_util.h" #include "timers.h" DEFINE_bool(benchmark_list_tests, false, "Print a list of benchmarks. This option overrides all other " "options."); DEFINE_string(benchmark_filter, ".", "A regular expression that specifies the set of benchmarks " "to execute. If this flag is empty, no benchmarks are run. " "If this flag is the string \"all\", all benchmarks linked " "into the process are run."); DEFINE_double(benchmark_min_time, 0.5, "Minimum number of seconds we should run benchmark before " "results are considered significant. For cpu-time based " "tests, this is the lower bound on the total cpu time " "used by all threads that make up the test. For real-time " "based tests, this is the lower bound on the elapsed time " "of the benchmark execution, regardless of number of " "threads."); DEFINE_int32(benchmark_repetitions, 1, "The number of runs of each benchmark. If greater than 1, the " "mean and standard deviation of the runs will be reported."); DEFINE_bool(benchmark_report_aggregates_only, false, "Report the result of each benchmark repetitions. When 'true' is " "specified only the mean, standard deviation, and other statistics " "are reported for repeated benchmarks."); DEFINE_string(benchmark_format, "console", "The format to use for console output. Valid values are " "'console', 'json', or 'csv'."); DEFINE_string(benchmark_out_format, "json", "The format to use for file output. Valid values are " "'console', 'json', or 'csv'."); DEFINE_string(benchmark_out, "", "The file to write additonal output to"); DEFINE_string(benchmark_color, "auto", "Whether to use colors in the output. Valid values: " "'true'/'yes'/1, 'false'/'no'/0, and 'auto'. 'auto' means to use " "colors if the output is being sent to a terminal and the TERM " "environment variable is set to a terminal type that supports " "colors."); DEFINE_bool(benchmark_counters_tabular, false, "Whether to use tabular format when printing user counters to " "the console. Valid values: 'true'/'yes'/1, 'false'/'no'/0." "Defaults to false."); DEFINE_int32(v, 0, "The level of verbose logging to output"); namespace benchmark { namespace { static const size_t kMaxIterations = 1000000000; } // end namespace namespace internal { void UseCharPointer(char const volatile*) {} class ThreadManager { public: ThreadManager(int num_threads) : alive_threads_(num_threads), start_stop_barrier_(num_threads) {} Mutex& GetBenchmarkMutex() const RETURN_CAPABILITY(benchmark_mutex_) { return benchmark_mutex_; } bool StartStopBarrier() EXCLUDES(end_cond_mutex_) { return start_stop_barrier_.wait(); } void NotifyThreadComplete() EXCLUDES(end_cond_mutex_) { start_stop_barrier_.removeThread(); if (--alive_threads_ == 0) { MutexLock lock(end_cond_mutex_); end_condition_.notify_all(); } } void WaitForAllThreads() EXCLUDES(end_cond_mutex_) { MutexLock lock(end_cond_mutex_); end_condition_.wait(lock.native_handle(), [this]() { return alive_threads_ == 0; }); } public: struct Result { double real_time_used = 0; double cpu_time_used = 0; double manual_time_used = 0; int64_t bytes_processed = 0; int64_t items_processed = 0; int complexity_n = 0; std::string report_label_; std::string error_message_; bool has_error_ = false; UserCounters counters; }; GUARDED_BY(GetBenchmarkMutex()) Result results; private: mutable Mutex benchmark_mutex_; std::atomic<int> alive_threads_; Barrier start_stop_barrier_; Mutex end_cond_mutex_; Condition end_condition_; }; // Timer management class class ThreadTimer { public: ThreadTimer() = default; // Called by each thread void StartTimer() { running_ = true; start_real_time_ = ChronoClockNow(); start_cpu_time_ = ThreadCPUUsage(); } // Called by each thread void StopTimer() { CHECK(running_); running_ = false; real_time_used_ += ChronoClockNow() - start_real_time_; // Floating point error can result in the subtraction producing a negative // time. Guard against that. cpu_time_used_ += std::max<double>(ThreadCPUUsage() - start_cpu_time_, 0); } // Called by each thread void SetIterationTime(double seconds) { manual_time_used_ += seconds; } bool running() const { return running_; } // REQUIRES: timer is not running double real_time_used() { CHECK(!running_); return real_time_used_; } // REQUIRES: timer is not running double cpu_time_used() { CHECK(!running_); return cpu_time_used_; } // REQUIRES: timer is not running double manual_time_used() { CHECK(!running_); return manual_time_used_; } private: bool running_ = false; // Is the timer running double start_real_time_ = 0; // If running_ double start_cpu_time_ = 0; // If running_ // Accumulated time so far (does not contain current slice if running_) double real_time_used_ = 0; double cpu_time_used_ = 0; // Manually set iteration time. User sets this with SetIterationTime(seconds). double manual_time_used_ = 0; }; namespace { BenchmarkReporter::Run CreateRunReport( const benchmark::internal::Benchmark::Instance& b, const internal::ThreadManager::Result& results, size_t iters, double seconds) { // Create report about this benchmark run. BenchmarkReporter::Run report; report.benchmark_name = b.name; report.error_occurred = results.has_error_; report.error_message = results.error_message_; report.report_label = results.report_label_; // Report the total iterations across all threads. report.iterations = static_cast<int64_t>(iters) * b.threads; report.time_unit = b.time_unit; if (!report.error_occurred) { double bytes_per_second = 0; if (results.bytes_processed > 0 && seconds > 0.0) { bytes_per_second = (results.bytes_processed / seconds); } double items_per_second = 0; if (results.items_processed > 0 && seconds > 0.0) { items_per_second = (results.items_processed / seconds); } if (b.use_manual_time) { report.real_accumulated_time = results.manual_time_used; } else { report.real_accumulated_time = results.real_time_used; } report.cpu_accumulated_time = results.cpu_time_used; report.bytes_per_second = bytes_per_second; report.items_per_second = items_per_second; report.complexity_n = results.complexity_n; report.complexity = b.complexity; report.complexity_lambda = b.complexity_lambda; report.statistics = b.statistics; report.counters = results.counters; internal::Finish(&report.counters, seconds, b.threads); } return report; } // Execute one thread of benchmark b for the specified number of iterations. // Adds the stats collected for the thread into *total. void RunInThread(const benchmark::internal::Benchmark::Instance* b, size_t iters, int thread_id, internal::ThreadManager* manager) { internal::ThreadTimer timer; State st(iters, b->arg, thread_id, b->threads, &timer, manager); b->benchmark->Run(st); CHECK(st.iterations() == st.max_iterations) << "Benchmark returned before State::KeepRunning() returned false!"; { MutexLock l(manager->GetBenchmarkMutex()); internal::ThreadManager::Result& results = manager->results; results.cpu_time_used += timer.cpu_time_used(); results.real_time_used += timer.real_time_used(); results.manual_time_used += timer.manual_time_used(); results.bytes_processed += st.bytes_processed(); results.items_processed += st.items_processed(); results.complexity_n += st.complexity_length_n(); internal::Increment(&results.counters, st.counters); } manager->NotifyThreadComplete(); } std::vector<BenchmarkReporter::Run> RunBenchmark( const benchmark::internal::Benchmark::Instance& b, std::vector<BenchmarkReporter::Run>* complexity_reports) { std::vector<BenchmarkReporter::Run> reports; // return value const bool has_explicit_iteration_count = b.iterations != 0; size_t iters = has_explicit_iteration_count ? b.iterations : 1; std::unique_ptr<internal::ThreadManager> manager; std::vector<std::thread> pool(b.threads - 1); const int repeats = b.repetitions != 0 ? b.repetitions : FLAGS_benchmark_repetitions; const bool report_aggregates_only = repeats != 1 && (b.report_mode == internal::RM_Unspecified ? FLAGS_benchmark_report_aggregates_only : b.report_mode == internal::RM_ReportAggregatesOnly); for (int repetition_num = 0; repetition_num < repeats; repetition_num++) { for (;;) { // Try benchmark VLOG(2) << "Running " << b.name << " for " << iters << "\n"; manager.reset(new internal::ThreadManager(b.threads)); for (std::size_t ti = 0; ti < pool.size(); ++ti) { pool[ti] = std::thread(&RunInThread, &b, iters, static_cast<int>(ti + 1), manager.get()); } RunInThread(&b, iters, 0, manager.get()); manager->WaitForAllThreads(); for (std::thread& thread : pool) thread.join(); internal::ThreadManager::Result results; { MutexLock l(manager->GetBenchmarkMutex()); results = manager->results; } manager.reset(); // Adjust real/manual time stats since they were reported per thread. results.real_time_used /= b.threads; results.manual_time_used /= b.threads; VLOG(2) << "Ran in " << results.cpu_time_used << "/" << results.real_time_used << "\n"; // Base decisions off of real time if requested by this benchmark. double seconds = results.cpu_time_used; if (b.use_manual_time) { seconds = results.manual_time_used; } else if (b.use_real_time) { seconds = results.real_time_used; } const double min_time = !IsZero(b.min_time) ? b.min_time : FLAGS_benchmark_min_time; // Determine if this run should be reported; Either it has // run for a sufficient amount of time or because an error was reported. const bool should_report = repetition_num > 0 || has_explicit_iteration_count // An exact iteration count was requested || results.has_error_ || iters >= kMaxIterations || seconds >= min_time // the elapsed time is large enough // CPU time is specified but the elapsed real time greatly exceeds the // minimum time. Note that user provided timers are except from this // sanity check. || ((results.real_time_used >= 5 * min_time) && !b.use_manual_time); if (should_report) { BenchmarkReporter::Run report = CreateRunReport(b, results, iters, seconds); if (!report.error_occurred && b.complexity != oNone) complexity_reports->push_back(report); reports.push_back(report); break; } // See how much iterations should be increased by // Note: Avoid division by zero with max(seconds, 1ns). double multiplier = min_time * 1.4 / std::max(seconds, 1e-9); // If our last run was at least 10% of FLAGS_benchmark_min_time then we // use the multiplier directly. Otherwise we use at most 10 times // expansion. // NOTE: When the last run was at least 10% of the min time the max // expansion should be 14x. bool is_significant = (seconds / min_time) > 0.1; multiplier = is_significant ? multiplier : std::min(10.0, multiplier); if (multiplier <= 1.0) multiplier = 2.0; double next_iters = std::max(multiplier * iters, iters + 1.0); if (next_iters > kMaxIterations) { next_iters = kMaxIterations; } VLOG(3) << "Next iters: " << next_iters << ", " << multiplier << "\n"; iters = static_cast<int>(next_iters + 0.5); } } // Calculate additional statistics auto stat_reports = ComputeStats(reports); if ((b.complexity != oNone) && b.last_benchmark_instance) { auto additional_run_stats = ComputeBigO(*complexity_reports); stat_reports.insert(stat_reports.end(), additional_run_stats.begin(), additional_run_stats.end()); complexity_reports->clear(); } if (report_aggregates_only) reports.clear(); reports.insert(reports.end(), stat_reports.begin(), stat_reports.end()); return reports; } } // namespace } // namespace internal State::State(size_t max_iters, const std::vector<int>& ranges, int thread_i, int n_threads, internal::ThreadTimer* timer, internal::ThreadManager* manager) : started_(false), finished_(false), total_iterations_(max_iters + 1), range_(ranges), bytes_processed_(0), items_processed_(0), complexity_n_(0), error_occurred_(false), counters(), thread_index(thread_i), threads(n_threads), max_iterations(max_iters), timer_(timer), manager_(manager) { CHECK(max_iterations != 0) << "At least one iteration must be run"; CHECK(total_iterations_ != 0) << "max iterations wrapped around"; CHECK_LT(thread_index, threads) << "thread_index must be less than threads"; } void State::PauseTiming() { // Add in time accumulated so far CHECK(started_ && !finished_ && !error_occurred_); timer_->StopTimer(); } void State::ResumeTiming() { CHECK(started_ && !finished_ && !error_occurred_); timer_->StartTimer(); } void State::SkipWithError(const char* msg) { CHECK(msg); error_occurred_ = true; { MutexLock l(manager_->GetBenchmarkMutex()); if (manager_->results.has_error_ == false) { manager_->results.error_message_ = msg; manager_->results.has_error_ = true; } } total_iterations_ = 1; if (timer_->running()) timer_->StopTimer(); } void State::SetIterationTime(double seconds) { timer_->SetIterationTime(seconds); } void State::SetLabel(const char* label) { MutexLock l(manager_->GetBenchmarkMutex()); manager_->results.report_label_ = label; } void State::StartKeepRunning() { CHECK(!started_ && !finished_); started_ = true; manager_->StartStopBarrier(); if (!error_occurred_) ResumeTiming(); } void State::FinishKeepRunning() { CHECK(started_ && (!finished_ || error_occurred_)); if (!error_occurred_) { PauseTiming(); } // Total iterations has now wrapped around zero. Fix this. total_iterations_ = 1; finished_ = true; manager_->StartStopBarrier(); } namespace internal { namespace { void RunBenchmarks(const std::vector<Benchmark::Instance>& benchmarks, BenchmarkReporter* console_reporter, BenchmarkReporter* file_reporter) { // Note the file_reporter can be null. CHECK(console_reporter != nullptr); // Determine the width of the name field using a minimum width of 10. bool has_repetitions = FLAGS_benchmark_repetitions > 1; size_t name_field_width = 10; size_t stat_field_width = 0; for (const Benchmark::Instance& benchmark : benchmarks) { name_field_width = std::max<size_t>(name_field_width, benchmark.name.size()); has_repetitions |= benchmark.repetitions > 1; for(const auto& Stat : *benchmark.statistics) stat_field_width = std::max<size_t>(stat_field_width, Stat.name_.size()); } if (has_repetitions) name_field_width += 1 + stat_field_width; // Print header here BenchmarkReporter::Context context; context.name_field_width = name_field_width; // Keep track of runing times of all instances of current benchmark std::vector<BenchmarkReporter::Run> complexity_reports; // We flush streams after invoking reporter methods that write to them. This // ensures users get timely updates even when streams are not line-buffered. auto flushStreams = [](BenchmarkReporter* reporter) { if (!reporter) return; std::flush(reporter->GetOutputStream()); std::flush(reporter->GetErrorStream()); }; if (console_reporter->ReportContext(context) && (!file_reporter || file_reporter->ReportContext(context))) { flushStreams(console_reporter); flushStreams(file_reporter); for (const auto& benchmark : benchmarks) { std::vector<BenchmarkReporter::Run> reports = RunBenchmark(benchmark, &complexity_reports); console_reporter->ReportRuns(reports); if (file_reporter) file_reporter->ReportRuns(reports); flushStreams(console_reporter); flushStreams(file_reporter); } } console_reporter->Finalize(); if (file_reporter) file_reporter->Finalize(); flushStreams(console_reporter); flushStreams(file_reporter); } std::unique_ptr<BenchmarkReporter> CreateReporter( std::string const& name, ConsoleReporter::OutputOptions output_opts) { typedef std::unique_ptr<BenchmarkReporter> PtrType; if (name == "console") { return PtrType(new ConsoleReporter(output_opts)); } else if (name == "json") { return PtrType(new JSONReporter); } else if (name == "csv") { return PtrType(new CSVReporter); } else { std::cerr << "Unexpected format: '" << name << "'\n"; std::exit(1); } } } // end namespace bool IsZero(double n) { return std::abs(n) < std::numeric_limits<double>::epsilon(); } ConsoleReporter::OutputOptions GetOutputOptions(bool force_no_color) { int output_opts = ConsoleReporter::OO_Defaults; if ((FLAGS_benchmark_color == "auto" && IsColorTerminal()) || IsTruthyFlagValue(FLAGS_benchmark_color)) { output_opts |= ConsoleReporter::OO_Color; } else { output_opts &= ~ConsoleReporter::OO_Color; } if(force_no_color) { output_opts &= ~ConsoleReporter::OO_Color; } if(FLAGS_benchmark_counters_tabular) { output_opts |= ConsoleReporter::OO_Tabular; } else { output_opts &= ~ConsoleReporter::OO_Tabular; } return static_cast< ConsoleReporter::OutputOptions >(output_opts); } } // end namespace internal size_t RunSpecifiedBenchmarks() { return RunSpecifiedBenchmarks(nullptr, nullptr); } size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter) { return RunSpecifiedBenchmarks(console_reporter, nullptr); } size_t RunSpecifiedBenchmarks(BenchmarkReporter* console_reporter, BenchmarkReporter* file_reporter) { std::string spec = FLAGS_benchmark_filter; if (spec.empty() || spec == "all") spec = "."; // Regexp that matches all benchmarks // Setup the reporters std::ofstream output_file; std::unique_ptr<BenchmarkReporter> default_console_reporter; std::unique_ptr<BenchmarkReporter> default_file_reporter; if (!console_reporter) { default_console_reporter = internal::CreateReporter( FLAGS_benchmark_format, internal::GetOutputOptions()); console_reporter = default_console_reporter.get(); } auto& Out = console_reporter->GetOutputStream(); auto& Err = console_reporter->GetErrorStream(); std::string const& fname = FLAGS_benchmark_out; if (fname.empty() && file_reporter) { Err << "A custom file reporter was provided but " "--benchmark_out=<file> was not specified." << std::endl; std::exit(1); } if (!fname.empty()) { output_file.open(fname); if (!output_file.is_open()) { Err << "invalid file name: '" << fname << std::endl; std::exit(1); } if (!file_reporter) { default_file_reporter = internal::CreateReporter( FLAGS_benchmark_out_format, ConsoleReporter::OO_None); file_reporter = default_file_reporter.get(); } file_reporter->SetOutputStream(&output_file); file_reporter->SetErrorStream(&output_file); } std::vector<internal::Benchmark::Instance> benchmarks; if (!FindBenchmarksInternal(spec, &benchmarks, &Err)) return 0; if (benchmarks.empty()) { Err << "Failed to match any benchmarks against regex: " << spec << "\n"; return 0; } if (FLAGS_benchmark_list_tests) { for (auto const& benchmark : benchmarks) Out << benchmark.name << "\n"; } else { internal::RunBenchmarks(benchmarks, console_reporter, file_reporter); } return benchmarks.size(); } namespace internal { void PrintUsageAndExit() { fprintf(stdout, "benchmark" " [--benchmark_list_tests={true|false}]\n" " [--benchmark_filter=<regex>]\n" " [--benchmark_min_time=<min_time>]\n" " [--benchmark_repetitions=<num_repetitions>]\n" " [--benchmark_report_aggregates_only={true|false}\n" " [--benchmark_format=<console|json|csv>]\n" " [--benchmark_out=<filename>]\n" " [--benchmark_out_format=<json|console|csv>]\n" " [--benchmark_color={auto|true|false}]\n" " [--benchmark_counters_tabular={true|false}]\n" " [--v=<verbosity>]\n"); exit(0); } void ParseCommandLineFlags(int* argc, char** argv) { using namespace benchmark; for (int i = 1; i < *argc; ++i) { if (ParseBoolFlag(argv[i], "benchmark_list_tests", &FLAGS_benchmark_list_tests) || ParseStringFlag(argv[i], "benchmark_filter", &FLAGS_benchmark_filter) || ParseDoubleFlag(argv[i], "benchmark_min_time", &FLAGS_benchmark_min_time) || ParseInt32Flag(argv[i], "benchmark_repetitions", &FLAGS_benchmark_repetitions) || ParseBoolFlag(argv[i], "benchmark_report_aggregates_only", &FLAGS_benchmark_report_aggregates_only) || ParseStringFlag(argv[i], "benchmark_format", &FLAGS_benchmark_format) || ParseStringFlag(argv[i], "benchmark_out", &FLAGS_benchmark_out) || ParseStringFlag(argv[i], "benchmark_out_format", &FLAGS_benchmark_out_format) || ParseStringFlag(argv[i], "benchmark_color", &FLAGS_benchmark_color) || // "color_print" is the deprecated name for "benchmark_color". // TODO: Remove this. ParseStringFlag(argv[i], "color_print", &FLAGS_benchmark_color) || ParseBoolFlag(argv[i], "benchmark_counters_tabular", &FLAGS_benchmark_counters_tabular) || ParseInt32Flag(argv[i], "v", &FLAGS_v)) { for (int j = i; j != *argc - 1; ++j) argv[j] = argv[j + 1]; --(*argc); --i; } else if (IsFlag(argv[i], "help")) { PrintUsageAndExit(); } } for (auto const* flag : {&FLAGS_benchmark_format, &FLAGS_benchmark_out_format}) if (*flag != "console" && *flag != "json" && *flag != "csv") { PrintUsageAndExit(); } if (FLAGS_benchmark_color.empty()) { PrintUsageAndExit(); } } int InitializeStreams() { static std::ios_base::Init init; return 0; } } // end namespace internal void Initialize(int* argc, char** argv) { internal::ParseCommandLineFlags(argc, argv); internal::LogLevel() = FLAGS_v; } bool ReportUnrecognizedArguments(int argc, char** argv) { for (int i = 1; i < argc; ++i) { fprintf(stderr, "%s: error: unrecognized command-line flag: %s\n", argv[0], argv[i]); } return argc > 1; } } // end namespace benchmark
{ "pile_set_name": "Github" }
/* Range */ input[type=range] { -webkit-appearance: none; width: 100%; margin: 5.7px 0; padding: 0; border: none; &:focus { outline: none; } &::-webkit-slider-runnable-track { width: 100%; height: 8.6px; cursor: pointer; box-shadow: 2.6px 2.6px 0.4px #cccccc, 0px 0px 2.6px #d9d9d9; background: rgba(255, 255, 255, 0.2); border-radius: 13.6px; border: 0px solid #ffffff; } &::-webkit-slider-thumb { box-shadow: 0px 0px 0px rgba(255, 221, 0, 0.37), 0px 0px 0px rgba(255, 224, 26, 0.37); border: 7.9px solid $color-link; height: 20px; width: 20px; border-radius: 50px; background: $color-link; cursor: pointer; -webkit-appearance: none; margin-top: -5.7px; } &:focus::-webkit-slider-runnable-track { background: rgba(255, 255, 255, 0.2); } &::-moz-range-track { width: 100%; height: 8.6px; cursor: pointer; box-shadow: 2.6px 2.6px 0.4px #cccccc, 0px 0px 2.6px #d9d9d9; background: rgba(255, 255, 255, 0.2); border-radius: 13.6px; border: 0px solid #ffffff; } &::-moz-range-thumb { box-shadow: 0px 0px 0px rgba(255, 221, 0, 0.37), 0px 0px 0px rgba(255, 224, 26, 0.37); border: 7.9px solid $color-link; height: 20px; width: 20px; border-radius: 50px; background: $color-link; cursor: pointer; } &::-ms-track { width: 100%; height: 8.6px; cursor: pointer; background: transparent; border-color: transparent; color: transparent; } &::-ms-fill-lower { background: rgba(199, 199, 199, 0.2); border: 0px solid #ffffff; border-radius: 27.2px; box-shadow: 2.6px 2.6px 0.4px #cccccc, 0px 0px 2.6px #d9d9d9; } &::-ms-fill-upper { background: rgba(255, 255, 255, 0.2); border: 0px solid #ffffff; border-radius: 27.2px; box-shadow: 2.6px 2.6px 0.4px #cccccc, 0px 0px 2.6px #d9d9d9; } &::-ms-thumb { box-shadow: 0px 0px 0px rgba(255, 221, 0, 0.37), 0px 0px 0px rgba(255, 224, 26, 0.37); border: 7.9px solid $color-link; height: 20px; width: 20px; border-radius: 50px; background: $color-link; cursor: pointer; height: 8.6px; } &:focus::-ms-fill-lower { background: rgba(255, 255, 255, 0.2); } &:focus::-ms-fill-upper { background: rgba(255, 255, 255, 0.2); } }
{ "pile_set_name": "Github" }
<?php // the current program global $_PROG; global $_IDL_FORMAT; class thelpers { public static function begin_with($haystack, $needle) { if (strlen($needle) > strlen($haystack)) return FALSE; else return substr($haystack, 0, strlen($needle)) === $needle; } public static function is_container_type($full_name) { return thelpers::begin_with($full_name, "vector<") || thelpers::begin_with($full_name, "list<") || thelpers::begin_with($full_name, "map<") || thelpers::begin_with($full_name, "set<") ; } public static function get_container_type($full_name) { if (thelpers::is_container_type($full_name)) return trim(substr($full_name, 0, strpos($full_name, "<", 0))); else return FALSE; } public static function get_container_key_type($full_name) { if (thelpers::is_container_type($full_name)) { $pos = strpos($full_name, "<"); $kvs = trim(substr($full_name, $pos + 1, strrpos($full_name, ">") - $pos - 1)); if (strpos($kvs, ",") == FALSE) return $kvs; else return trim(substr($kvs, 0, strpos($kvs, ","))); } else return FALSE; } public static function get_container_value_type($full_name) { if (thelpers::is_container_type($full_name)) { $pos = strpos($full_name, "<"); $kvs = trim(substr($full_name, $pos + 1, strrpos($full_name, ">") - $pos - 1)); if (strpos($kvs, ",") == FALSE) return FALSE; else return trim(substr($kvs, strpos($kvs, ",") + 1)); } else return FALSE; } public static function base_type_to_cpp_type($base_type) { //echo "base_type_to_cpp_type'".$base_type."'".PHP_EOL; switch ($base_type) { case "list": return "std::list"; case "map": return "std::map"; case "set": return "std::set"; case "vector": return "std::vector"; case "string": return "std::string"; case "double": return "double"; case "float": return "float"; case "i64": return "int64_t"; case "int64": return "int64_t"; case "int64_t": return "int64_t"; case "ui64": return "uint64_t"; case "uint64": return "uint64_t"; case "uint64_t": return "uint64_t"; case "i32": return "int32_t"; case "int32": return "int32_t"; case "int32_t": return "int32_t"; case "ui32": return "uint32_t"; case "uint32": return "uint32_t"; case "uint32_t": return "uint32_t"; case "byte": return "byte"; case "BYTE": return "byte"; case "Byte": return "byte"; case "bool": return "bool"; case "BOOL": return "bool"; case "Bool": return "bool"; case "sint32": return "int32_t"; case "sint64": return "int64_t"; case "fixed32": return "int32_t"; case "fixed64": return "int64_t"; case "sfixed32": return "int32_t"; case "sfixed64": return "int64_t"; default: return $base_type; } } public static function base_type_to_csharp_type($base_type) { //echo "base_type_to_cpp_type'".$base_type."'".PHP_EOL; switch ($base_type) { case "list": return "List"; case "map": return "Dictionary"; case "set": return "HashSet"; case "vector": return "List"; case "string": return "string"; case "double": return "double"; case "float": return "float"; case "i64": return "Int64"; case "int64": return "Int64"; case "int64_t": return "Int64"; case "ui64": return "UInt64"; case "uint64": return "UInt64"; case "uint64_t": return "UInt64"; case "i32": return "Int32"; case "int32": return "Int32"; case "int32_t": return "Int32"; case "ui32": return "UInt32"; case "uint32": return "UInt32"; case "uint32_t": return "UInt32"; case "byte": return "byte"; case "BYTE": return "byte"; case "Byte": return "byte"; case "bool": return "bool"; case "BOOL": return "bool"; case "Bool": return "bool"; case "sint32": return "Int32"; case "sint64": return "Int64"; case "fixed32": return "Int32"; case "fixed64": return "Int64"; case "sfixed32": return "Int32"; case "sfixed64": return "Int64"; default: return $base_type; } } public static function is_base_type($type) { static $base_types = array( "list", "map", "set", "vector", "string", "double", "float", "i64", "int64", "int64_t", "uint64", "uint64_t", "i32", "int32", "uint32_t", "byte", "BYTE", "Byte", "bool", "BOOL", "Bool", "sint32", "sint64", "fixed32", "fixed64", "sfixed32", "sfixed64" ); return in_array($type, $base_types); } public static function get_cpp_type_name($full_name) { global $_PROG; if (thelpers::is_container_type($full_name)) return thelpers::get_cpp_name_internal($full_name); else { $pos = strrpos($full_name, "."); if (FALSE == $pos) return thelpers::get_cpp_name_internal($full_name); else { // check cpp namespace as prefix $prog = NULL; $left = ""; if (thelpers::begin_with($full_name, $_PROG->get_namespace("cpp").".")) { $left = substr($full_name, strlen($_PROG->get_namespace("cpp")) + 1); $prog = $_PROG; } else { foreach ($_PROG->includes as $pn => $p) { if (thelpers::begin_with($full_name, $p->get_namespace("cpp").".")) { $left = substr($full_name, strlen($p->get_namespace("cpp")) + 1); $prog = $p; break; } } } // check package as prefix if ($prog == NULL) { if (thelpers::begin_with($full_name, $_PROG->name.".")) { $left = substr($full_name, strlen($_PROG->name) + 1); $prog = $_PROG; } else { foreach ($_PROG->includes as $pn => $p) { if (thelpers::begin_with($full_name, $p->name.".")) { $left = substr($full_name, strlen($p->name) + 1); $prog = $p; break; } } } } if (NULL == $prog) { return "full type translation from '". $full_name. "' failed."; } return $prog == $_PROG ? thelpers::get_cpp_name_internal($left) : $prog->get_cpp_namespace() . thelpers::get_cpp_name_internal($left); } } } private static function get_cpp_name_internal($full_name) { if (thelpers::is_container_type($full_name)) { $kt = thelpers::get_container_key_type($full_name); $vt = thelpers::get_container_value_type($full_name); $ct = thelpers::get_container_type($full_name); return thelpers::base_type_to_cpp_type($ct)."< ". ($vt == FALSE ? thelpers::get_cpp_type_name($kt) : (thelpers::get_cpp_type_name($kt).", ".thelpers::get_cpp_type_name($vt))) .">"; } else if (FALSE != strpos($full_name, ".")) { return str_replace(".", "_", $full_name); } else { return thelpers::base_type_to_cpp_type($full_name); } } public static function get_csharp_type_name($full_name) { global $_PROG; if (thelpers::is_container_type($full_name)) return thelpers::get_csharp_name_internal($full_name); else { $pos = strrpos($full_name, "."); if (FALSE == $pos) return thelpers::get_csharp_name_internal($full_name); else { // check cpp namespace as prefix $prog = NULL; $left = ""; if (thelpers::begin_with($full_name, $_PROG->get_csharp_namespace().".")) { $left = substr($full_name, strlen($_PROG->get_csharp_namespace()) + 1); $prog = $_PROG; } else { foreach ($_PROG->includes as $pn => $p) { if (thelpers::begin_with($full_name, $p->get_csharp_namespace().".")) { $left = substr($full_name, strlen($p->get_csharp_namespace()) + 1); $prog = $p; break; } } } // check package as prefix if ($prog == NULL) { if (thelpers::begin_with($full_name, $_PROG->name.".")) { $left = substr($full_name, strlen($_PROG->name) + 1); $prog = $_PROG; } else { foreach ($_PROG->includes as $pn => $p) { if (thelpers::begin_with($full_name, $p->name.".")) { $left = substr($full_name, strlen($p->name) + 1); $prog = $p; break; } } } } if (NULL == $prog) { return "full type translation from '". $full_name. "' failed."; } return $prog == $_PROG ? thelpers::get_csharp_name_internal($left) : $prog->get_csharp_namespace() . thelpers::get_csharp_name_internal($left); } } } private static function get_csharp_name_internal($full_name) { if (thelpers::is_container_type($full_name)) { $kt = thelpers::get_container_key_type($full_name); $vt = thelpers::get_container_value_type($full_name); $ct = thelpers::get_container_type($full_name); return thelpers::base_type_to_csharp_type($ct)."< ". ($vt == FALSE ? thelpers::get_csharp_type_name($kt) : (thelpers::get_csharp_type_name($kt).", ".thelpers::get_csharp_type_name($vt))) .">"; } else if (FALSE != strpos($full_name, ".")) { return str_replace(".", "_", $full_name); } else { return thelpers::base_type_to_csharp_type($full_name); } } } class t_program { var $name; var $namespaces; var $includes; var $typedefs; var $enums; var $structs; var $services; var $types; var $annotations; function __construct($name) { $this->name = $name; $this->namespaces = array(); $this->includes = array(); $this->typedefs = array(); $this->enums = array(); $this->structs = array(); $this->services = array(); $this->types = array(); $this->annotations = array(); } function get_test_task_code() { return "LPC" ."_". strtoupper($this->name) ."_TEST_TIMER" ; } function get_cpp_namespace() { if (!array_key_exists("cpp", $this->namespaces)) { return ""; } $nms = $this->namespaces["cpp"]; $nms = explode(".", $nms); $rt = "::"; foreach ($nms as $nm) { $rt .= $nm ."::"; } return $rt; } function get_namespace($lang) { if (!array_key_exists($lang, $this->namespaces)) { return FALSE; } return $this->namespaces[$lang]; } function get_cpp_namespace_begin() { if (!array_key_exists("cpp", $this->namespaces)) { return ""; } $nms = $this->namespaces["cpp"]; $nms = explode(".", $nms); $rt = ""; foreach ($nms as $nm) { $rt .= "namespace ". $nm ." { "; } return $rt; } function get_cpp_namespace_end() { if (!array_key_exists("cpp", $this->namespaces)) { return ""; } $nms = $this->namespaces["cpp"]; $nms = explode(".", $nms); $rt = ""; foreach ($nms as $nm) { $rt .= "} "; } return $rt; } function get_csharp_namespace() { if (!array_key_exists("csharp", $this->namespaces)) { return $this->namespaces["cpp"]; } return $this->namespaces["csharp"]; } function add_annotations($atts) { $this->annotations = $atts; foreach ($this->structs as $s) { $s->on_annotations(); } foreach ($this->services as $s) { $s->on_annotations(); } } } class t_type { var $program; var $name; function __construct($program, $name) { if (thelpers::begin_with($name, $program->get_namespace("cpp").".")) { $name = substr($name, strlen($program->get_namespace("cpp")) + 1); } else if (thelpers::begin_with($name, $program->name.".")) { $name = substr($name, strlen($program->name) + 1); } $this->program = $program; $this->name = $name; $program->types[] = $this; } function get_cpp_name() { $pos = strpos($this->name, "."); if ($pos == FALSE) return $this->name; else { $prefix = substr($this->name, 0, $pos); if (0 == strcmp($prefix, $this->program->name) || 0 == strcmp($prefix, $this->program->get_namespace("cpp"))) { $prefix = substr($this->name, $pos + 1); } else { $prefix = $this->name; } return str_replace(".", "_", $prefix); } } function get_csharp_name() { $pos = strpos($this->name, "."); if ($pos == FALSE) return $this->name; else { $prefix = substr($this->name, 0, $pos); if (0 == strcmp($prefix, $this->program->name) || 0 == strcmp($prefix, $this->program->get_csharp_namespace())) { $prefix = substr($this->name, $pos + 1); } else { $prefix = $this->name; } return str_replace(".", "_", $prefix); } } function is_void() { return false; } function is_enum() { return false; } function is_alias() { return false; } function is_base_type() { return thelpers::is_base_type($this->name); } } class t_typedef extends t_type { var $type; function __construct($program, $type, $alias) { parent::__construct($program, $alias); $this->type = $type; $program->typedefs[] = $this; } function is_alias() { return true; } } class t_enum extends t_type { var $values; function __construct($program, $name) { parent::__construct($program, $name); $this->values = array(); $program->enums[] = $this; } function add_value($name, $value) { $this->values[$name] = $value; } function is_enum() { return true; } } class t_field { var $name; var $type_name; var $id; function __construct($name, $type_name, $id) { $this->name = $name; $this->type_name = $type_name; $this->id = $id; } function get_type() { return $this->type_name; } function get_cpp_type() { return thelpers::get_cpp_type_name($this->type_name); } function get_csharp_type() { return thelpers::get_csharp_type_name($this->type_name); } function is_base_type() { return thelpers::is_base_type($this->type_name); } } class t_struct extends t_type { var $fields; function __construct($program, $name) { parent::__construct($program, $name); $this->fields = array(); $program->structs[] = $this; } function add_field($name, $type_name, $id = "") { $this->fields[] = new t_field($name, $type_name, $id); } function is_base_type() { return false; } function on_annotations() { // nothing to do for now } } class t_function { var $service; var $ret; var $name; var $params; var $is_write; function __construct($service, $ret, $name) { $this->service = $service; $this->ret = $ret; $this->name = $name; $this->is_write = false; $this->params = array(); } function add_param($name, $type_name, $id = "") { $this->params[] = new t_field($name, $type_name, $id); } function get_return_type() { return $this->ret; } function get_cpp_return_type() { return thelpers::get_cpp_type_name($this->ret); } function get_csharp_return_type() { return thelpers::get_csharp_type_name($this->ret); } function get_first_param() { return $this->params[0]; } function get_request_type_name() { return $this->params[0]->get_type(); } function get_cpp_request_type_name() { return $this->params[0]->get_cpp_type(); } function get_csharp_request_type_name() { return $this->params[0]->get_csharp_type(); } function get_rpc_code() { return "RPC" ."_". strtoupper($this->service->program->name) ."_". strtoupper($this->service->name) ."_". strtoupper($this->name) ; } function is_one_way() { return $this->ret == "void" || $this->ret == "VOID"; } function on_annotations() { $atts = $this->service->program->annotations; // [function.service.add] // write = true ; service.add is a write function $key = "function.".$this->service->name.".".$this->name; if (array_key_exists($key, $atts)) { if (array_key_exists("write", $atts[$key])) $b = $atts[$key]["write"]; else $b = NULL; $this->is_write = ($b != NULL && ($b == "1" || $b == 1)); } } } class t_service extends t_type { var $functions; var $is_stateful; function __construct($program, $name) { parent::__construct($program, $name); $this->functions = array(); $this->is_stateful = false; $program->services[] = $this; } function add_function($ret, $name) { $f = new t_function($this, $ret, $name); $this->functions[] = $f; return $f; } function on_annotations() { $atts = $this->program->annotations; // [service.counter] // stateful = true ; counter is a stateful service $key = "service.".$this->name; if (array_key_exists($key, $atts)) { if (array_key_exists("stateful", $atts[$key])) $b = $atts[$key]["stateful"]; else $b = NULL; $this->is_stateful = ($b != NULL && ($b == "1" || $b == 1)); } // continue for each function foreach ($this->functions as $f) { $f->on_annotations(); } } } ?>
{ "pile_set_name": "Github" }
{ "name": "helloworld", "version": "1.0.0", "description": "This is helloworld project", "main": "index.js", "scripts": { "start": "nodemon ./app.js" }, "author": "", "license": "ISC", "devDependencies": { "log4js": "^3.0.6", "mysql": "^2.15.0", "redis": "^2.8.0" }, "dependencies": { "express": "^4.15.4", "nodemon": "^1.18.6" } }
{ "pile_set_name": "Github" }
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd"> <HTML style="overflow:auto;"> <HEAD> <meta name="generator" content="JDiff v1.1.0"> <!-- Generated by the JDiff Javadoc doclet --> <!-- (http://www.jdiff.org) --> <meta name="description" content="JDiff is a Javadoc doclet which generates an HTML report of all the packages, classes, constructors, methods, and fields which have been removed, added or changed in any way, including their documentation, when two APIs are compared."> <meta name="keywords" content="diff, jdiff, javadiff, java diff, java difference, API difference, difference between two APIs, API diff, Javadoc, doclet"> <TITLE> android.net.wifi.ScanResult </TITLE> <link href="../../../../assets/android-developer-docs.css" rel="stylesheet" type="text/css" /> <link href="../stylesheet-jdiff.css" rel="stylesheet" type="text/css" /> <noscript> <style type="text/css"> body{overflow:auto;} #body-content{position:relative; top:0;} #doc-content{overflow:visible;border-left:3px solid #666;} #side-nav{padding:0;} #side-nav .toggle-list ul {display:block;} #resize-packages-nav{border-bottom:3px solid #666;} </style> </noscript> <style type="text/css"> </style> </HEAD> <BODY> <!-- Start of nav bar --> <a name="top"></a> <div id="header" style="margin-bottom:0;padding-bottom:0;"> <div id="headerLeft"> <a href="../../../../index.html" tabindex="-1" target="_top"><img src="../../../../assets/images/bg_logo.png" alt="Android Developers" /></a> </div> <div id="headerRight"> <div id="headerLinks"> <!-- <img src="/assets/images/icon_world.jpg" alt="" /> --> <span class="text"> <!-- &nbsp;<a href="#">English</a> | --> <nobr><a href="http://developer.android.com" target="_top">Android Developers</a> | <a href="http://www.android.com" target="_top">Android.com</a></nobr> </span> </div> <div class="and-diff-id" style="margin-top:6px;margin-right:8px;"> <table class="diffspectable"> <tr> <td colspan="2" class="diffspechead">API Diff Specification</td> </tr> <tr> <td class="diffspec" style="padding-top:.25em">To Level:</td> <td class="diffvaluenew" style="padding-top:.25em">23</td> </tr> <tr> <td class="diffspec">From Level:</td> <td class="diffvalueold">22</td> </tr> <tr> <td class="diffspec">Generated</td> <td class="diffvalue">2015.08.14 14:28</td> </tr> </table> </div><!-- End and-diff-id --> <div class="and-diff-id" style="margin-right:8px;"> <table class="diffspectable"> <tr> <td class="diffspec" colspan="2"><a href="jdiff_statistics.html">Statistics</a> </tr> </table> </div> <!-- End and-diff-id --> </div> <!-- End headerRight --> </div> <!-- End header --> <div id="body-content" xstyle="padding:12px;padding-right:18px;"> <div id="doc-content" style="position:relative;"> <div id="mainBodyFluid"> <H2> Class android.net.wifi.<A HREF="../../../../reference/android/net/wifi/ScanResult.html" target="_top"><font size="+2"><code>ScanResult</code></font></A> </H2> <a NAME="constructors"></a> <a NAME="methods"></a> <p> <a NAME="Added"></a> <TABLE summary="Added Methods" WIDTH="100%"> <TR> <TH VALIGN="TOP" COLSPAN=2>Added Methods</FONT></TD> </TH> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.is80211mcResponder_added()"></A> <nobr><code>boolean</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#is80211mcResponder()" target="_top"><code>is80211mcResponder</code></A>()</nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.isPasspointNetwork_added()"></A> <nobr><code>boolean</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#isPasspointNetwork()" target="_top"><code>isPasspointNetwork</code></A>()</nobr> </TD> <TD>&nbsp;</TD> </TR> </TABLE> &nbsp; <a NAME="fields"></a> <p> <a NAME="Added"></a> <TABLE summary="Added Fields" WIDTH="100%"> <TR> <TH VALIGN="TOP" COLSPAN=2>Added Fields</FONT></TD> </TH> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.CHANNEL_WIDTH_160MHZ"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#CHANNEL_WIDTH_160MHZ" target="_top"><code>CHANNEL_WIDTH_160MHZ</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.CHANNEL_WIDTH_20MHZ"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#CHANNEL_WIDTH_20MHZ" target="_top"><code>CHANNEL_WIDTH_20MHZ</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.CHANNEL_WIDTH_40MHZ"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#CHANNEL_WIDTH_40MHZ" target="_top"><code>CHANNEL_WIDTH_40MHZ</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.CHANNEL_WIDTH_80MHZ"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#CHANNEL_WIDTH_80MHZ" target="_top"><code>CHANNEL_WIDTH_80MHZ</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.CHANNEL_WIDTH_80MHZ_PLUS_MHZ"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#CHANNEL_WIDTH_80MHZ_PLUS_MHZ" target="_top"><code>CHANNEL_WIDTH_80MHZ_PLUS_MHZ</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.centerFreq0"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#centerFreq0" target="_top"><code>centerFreq0</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.centerFreq1"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#centerFreq1" target="_top"><code>centerFreq1</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.channelWidth"></A> <nobr><code>int</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#channelWidth" target="_top"><code>channelWidth</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.operatorFriendlyName"></A> <nobr><code>CharSequence</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#operatorFriendlyName" target="_top"><code>operatorFriendlyName</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> <TR BGCOLOR="#FFFFFF" CLASS="TableRowColor"> <TD VALIGN="TOP" WIDTH="25%"> <A NAME="android.net.wifi.ScanResult.venueName"></A> <nobr><code>CharSequence</code>&nbsp;<A HREF="../../../../reference/android/net/wifi/ScanResult.html#venueName" target="_top"><code>venueName</code></A></nobr> </TD> <TD>&nbsp;</TD> </TR> </TABLE> &nbsp; </div> <div id="footer"> <div id="copyright"> Except as noted, this content is licensed under <a href="http://creativecommons.org/licenses/by/2.5/"> Creative Commons Attribution 2.5</a>. For details and restrictions, see the <a href="/license.html">Content License</a>. </div> <div id="footerlinks"> <p> <a href="http://www.android.com/terms.html">Site Terms of Service</a> - <a href="http://www.android.com/privacy.html">Privacy Policy</a> - <a href="http://www.android.com/branding.html">Brand Guidelines</a> </p> </div> </div> <!-- end footer --> </div><!-- end doc-content --> </div> <!-- end body-content --> <script src="https://www.google-analytics.com/ga.js" type="text/javascript"></script> <script type="text/javascript"> try { var pageTracker = _gat._getTracker("UA-5831155-1"); pageTracker._setAllowAnchor(true); pageTracker._initData(); pageTracker._trackPageview(); } catch(e) {} </script> </BODY> </HTML>
{ "pile_set_name": "Github" }
/* Copyright The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ // Code generated by client-gen. DO NOT EDIT. package internalversion
{ "pile_set_name": "Github" }
#ifndef QRCODEV4_H #define QRCODEV4_H #include <QRCode.h> #include <string.h> class QRCodeV4 : public QRCode { protected: private: void InitCapabilities(void); void InitECInfo(void); void InitAlignementPatterns(void); public: QRCodeV4(); ~QRCodeV4(); }; #endif // QRCODEV4_H
{ "pile_set_name": "Github" }
require 'bundler/gem_tasks' require 'rake' require File.join(File.dirname(__FILE__), 'lib', 'tasks', 'churn_tasks') require 'rake/testtask' Rake::TestTask.new(:test) do |test| test.libs << 'lib' << 'test' test.pattern = 'test/**/*_test.rb' test.verbose = true end begin require 'rcov/rcovtask' Rcov::RcovTask.new do |test| test.libs << 'test' test.pattern = 'test/**/*_test.rb' test.verbose = true end rescue LoadError task :rcov do abort "RCov is not available. In order to run rcov, you must: sudo gem install spicycode-rcov" end end task :default => :test desc "Build Gem" task :build_gem do `gem build churn.gemspec` `mv churn*.gem pkg/` end require 'rdoc/task' $:.push File.expand_path("../lib", __FILE__) require "churn/version" Rake::RDocTask.new do |rdoc| rdoc.rdoc_dir = 'rdoc' rdoc.title = "churn #{Churn::VERSION}" rdoc.rdoc_files.include('README*') rdoc.rdoc_files.include('lib/**/*.rb') end desc "Build man page using ronn" task :build_man_page do |t| puts "<#{t.name}> Building man page using ronn" `ronn -b README.md` end
{ "pile_set_name": "Github" }
using System; using System.Linq; using System.Windows.Input; using Xamarin.Essentials; using Xamarin.Forms; namespace Samples.ViewModel { public class GeocodingViewModel : BaseViewModel { string lat = 47.67398.ToString(); string lon = (-122.121513).ToString(); string address = "Microsoft Building 25 Redmond WA USA"; string geocodeAddress; string geocodePosition; public GeocodingViewModel() { GetAddressCommand = new Command(OnGetAddress); GetPositionCommand = new Command(OnGetPosition); } public ICommand GetAddressCommand { get; } public ICommand GetPositionCommand { get; } public string Latitude { get => lat; set => SetProperty(ref lat, value); } public string Longitude { get => lon; set => SetProperty(ref lon, value); } public string GeocodeAddress { get => geocodeAddress; set => SetProperty(ref geocodeAddress, value); } public string Address { get => address; set => SetProperty(ref address, value); } public string GeocodePosition { get => geocodePosition; set => SetProperty(ref geocodePosition, value); } async void OnGetPosition() { if (IsBusy) return; IsBusy = true; try { var locations = await Geocoding.GetLocationsAsync(Address); var location = locations?.FirstOrDefault(); if (location == null) { GeocodePosition = "Unable to detect locations"; } else { GeocodePosition = $"{nameof(location.Latitude)}: {location.Latitude}\n" + $"{nameof(location.Longitude)}: {location.Longitude}\n"; } } catch (Exception ex) { GeocodePosition = $"Unable to detect locations: {ex.Message}"; } finally { IsBusy = false; } } async void OnGetAddress() { if (IsBusy) return; IsBusy = true; try { double.TryParse(lat, out var lt); double.TryParse(lon, out var ln); var placemarks = await Geocoding.GetPlacemarksAsync(lt, ln); var placemark = placemarks?.FirstOrDefault(); if (placemark == null) { GeocodeAddress = "Unable to detect placemarks."; } else { GeocodeAddress = $"{nameof(placemark.AdminArea)}: {placemark.AdminArea}\n" + $"{nameof(placemark.CountryCode)}: {placemark.CountryCode}\n" + $"{nameof(placemark.CountryName)}: {placemark.CountryName}\n" + $"{nameof(placemark.FeatureName)}: {placemark.FeatureName}\n" + $"{nameof(placemark.Locality)}: {placemark.Locality}\n" + $"{nameof(placemark.PostalCode)}: {placemark.PostalCode}\n" + $"{nameof(placemark.SubAdminArea)}: {placemark.SubAdminArea}\n" + $"{nameof(placemark.SubLocality)}: {placemark.SubLocality}\n" + $"{nameof(placemark.SubThoroughfare)}: {placemark.SubThoroughfare}\n" + $"{nameof(placemark.Thoroughfare)}: {placemark.Thoroughfare}\n"; } } catch (Exception ex) { GeocodeAddress = $"Unable to detect placemarks: {ex.Message}"; } finally { IsBusy = false; } } } }
{ "pile_set_name": "Github" }
/* * SpanDSP - a series of DSP components for telephony * * g722.h - The ITU G.722 codec. * * Written by Steve Underwood <[email protected]> * * Copyright (C) 2005 Steve Underwood * * Despite my general liking of the GPL, I place my own contributions * to this code in the public domain for the benefit of all mankind - * even the slimy ones who might try to proprietize my work and use it * to my detriment. * * Based on a single channel G.722 codec which is: * ***** Copyright (c) CMU 1993 ***** * Computer Science, Speech Group * Chengxiang Lu and Alex Hauptmann * * $Id: g722.h,v 1.10 2006/06/16 12:45:53 steveu Exp $ * * Modifications for WebRtc, 2011/04/28, by tlegrand: * -Changed to use WebRtc types * -Added new defines for minimum and maximum values of short int */ /*! \file */ #if !defined(_G722_ENC_DEC_H_) #define _G722_ENC_DEC_H_ #include "tinydav_config.h" /*! \page g722_page G.722 encoding and decoding \section g722_page_sec_1 What does it do? The G.722 module is a bit exact implementation of the ITU G.722 specification for all three specified bit rates - 64000bps, 56000bps and 48000bps. It passes the ITU tests. To allow fast and flexible interworking with narrow band telephony, the encoder and decoder support an option for the linear audio to be an 8k samples/second stream. In this mode the codec is considerably faster, and still fully compatible with wideband terminals using G.722. \section g722_page_sec_2 How does it work? ???. */ #define TDAV_INT16_MAX 32767 #define TDAV_INT16_MIN -32768 enum { G722_SAMPLE_RATE_8000 = 0x0001, G722_PACKED = 0x0002 }; typedef struct { /*! TRUE if the operating in the special ITU test mode, with the band split filters disabled. */ int itu_test_mode; /*! TRUE if the G.722 data is packed */ int packed; /*! TRUE if encode from 8k samples/second */ int eight_k; /*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */ int bits_per_sample; /*! Signal history for the QMF */ int x[24]; struct { int s; int sp; int sz; int r[3]; int a[3]; int ap[3]; int p[3]; int d[7]; int b[7]; int bp[7]; int sg[7]; int nb; int det; } band[2]; unsigned int in_buffer; int in_bits; unsigned int out_buffer; int out_bits; } g722_encode_state_t; typedef struct { /*! TRUE if the operating in the special ITU test mode, with the band split filters disabled. */ int itu_test_mode; /*! TRUE if the G.722 data is packed */ int packed; /*! TRUE if decode to 8k samples/second */ int eight_k; /*! 6 for 48000kbps, 7 for 56000kbps, or 8 for 64000kbps. */ int bits_per_sample; /*! Signal history for the QMF */ int x[24]; struct { int s; int sp; int sz; int r[3]; int a[3]; int ap[3]; int p[3]; int d[7]; int b[7]; int bp[7]; int sg[7]; int nb; int det; } band[2]; unsigned int in_buffer; int in_bits; unsigned int out_buffer; int out_bits; } g722_decode_state_t; #ifdef __cplusplus extern "C" { #endif g722_encode_state_t *g722_encode_init(g722_encode_state_t *s, int rate, int options); int g722_encode_release(g722_encode_state_t *s); int g722_encode(g722_encode_state_t *s, uint8_t g722_data[], const int16_t amp[], int len); g722_decode_state_t *g722_decode_init(g722_decode_state_t *s, int rate, int options); int g722_decode_release(g722_decode_state_t *s); int g722_decode(g722_decode_state_t *s, int16_t amp[], const uint8_t g722_data[], int len); #ifdef __cplusplus } #endif #endif
{ "pile_set_name": "Github" }