prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from ._policy_tracked_resources_operations import PolicyTrackedResourcesOperations from ._remediations_operations import RemediationsOperations from ._policy_events_operations import PolicyEventsOperations from ._policy_states_operations import PolicyStatesOperations from ._operations import Operations from ._policy_metadata_operations import PolicyMetadataOperations from ._policy_restrictions_operations import PolicyRestrictionsOperations from ._attestations_operations import AttestationsOperations __all__ = [ 'PolicyTrackedResourcesOperations', 'RemediationsOperations', 'PolicyEventsOperations',<|fim▁hole|> 'PolicyMetadataOperations', 'PolicyRestrictionsOperations', 'AttestationsOperations', ]<|fim▁end|>
'PolicyStatesOperations', 'Operations',
<|file_name|>sdk.ts<|end_file_name|><|fim▁begin|>import { NativeModules } from "react-native"; const { AMapSdk } = NativeModules; export function init(apiKey?: string) { AMapSdk.initSDK(apiKey); } <|fim▁hole|>}<|fim▁end|>
export function getVersion(): Promise<string> { return AMapSdk.getVersion();
<|file_name|>edsplib.cpp<|end_file_name|><|fim▁begin|>// kX E-DSP Control utility // Copyright (c) Eugene Gavrilov, 2008-2014. // www.kxproject.com // All rights reserved /* * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ #include "stdafx.h" #include "interface/kxapi.h" #include "vers.h" #include "emu.h" #ifdef _MSC_VER #pragma warning(disable:4100) #endif #include "hana_netlist.h" // this is v1 card - hana_netlist #include "emu1010b_netlist.h" // this is v2 card - emu1010b_netlist #include "emu0404_netlist.h" // this is v1 0404 - emu0404_netlist #include "emu1010_notebook_netlist.h" // this is v2 cardbus - emu1010_notebook_netlist #include "audio_dock_netlist.h" // this is original card-powered dock - audio_dock_netlist #include "micro_dock_netlist.h" // this is v2 self-powered microdock - micro_dock_netlist // fpga functions: int is_fpga_programmed(iKX *ikx); int is_dock_connected(iKX *ikx); int is_dock_online(iKX *ikx); int upload_card_firmware(iKX *ikx); int upload_dock_firmware(iKX *ikx); void load_defaults(iKX *ikx); void print_status(iKX *ikx); int is_fpga_programmed(iKX *ikx) { byte reg=0; ikx->fpga_read(EMU_HANA_ID, &reg); return ((reg & 0x3f) == 0x15); } int is_dock_connected(iKX *ikx) { byte reg=0; ikx->fpga_read(EMU_HANA_OPTION_CARDS, &reg ); // OPTIONS: Which cards are attached to the EMU return ((reg & EMU_HANA_OPTION_DOCK_OFFLINE) || (reg & EMU_HANA_OPTION_DOCK_ONLINE)); } int is_dock_online(iKX *ikx) { byte reg=0; ikx->fpga_read(EMU_HANA_OPTION_CARDS, &reg ); // OPTIONS: Which cards are attached to the EMU return (reg & EMU_HANA_OPTION_DOCK_ONLINE); } int upload_card_firmware(iKX *ikx) { dword is_k8=0; ikx->get_dword(KX_DWORD_IS_K8,&is_k8); dword is_cardbus=0; ikx->get_dword(KX_DWORD_IS_CARDBUS,&is_cardbus); dword subsys=0; ikx->get_dword(KX_DWORD_SUBSYS,&subsys); // power-down audiodock first [won't work for microdock/v2-cards] if(!is_k8) ikx->fpga_write(EMU_HANA_DOCK_PWR,0); if(is_fpga_programmed(ikx)) { // FPGA netlist already present so clear it // Return to programming mode ikx->fpga_write(EMU_HANA_FPGA_CONFIG, EMU_HANA_FPGA_CONFIG_HANA); } if(is_fpga_programmed(ikx)) { printf("Error: Failed to set FPGA to programming mode\n"); return -5; } int ret; if(is_cardbus) // 1616 cardbus { if(is_k8) { if(subsys==0x42011102) // EM8950, 1616 cardbus ret=ikx->upload_fpga_firmware(emu1010_notebook_netlist,sizeof(emu1010_notebook_netlist)); else { printf("Error: your E-DSP card is cardbus, but not EM8950\n"); // ret=ikx->upload_fpga_firmware(emu1010_notebook_netlist,sizeof(emu1010_notebook_netlist)); ret=-11; } } else { printf("Error: your E-DSP card is cardbus/PCMCIA, but not p17v-based\n"); ret=-11; } } if(is_k8) // v2 EM8960 or v2 0404 or PCIe 0404 EM8982 { if(subsys==0x40021102 || subsys==0x40051102) // 0404 or EM8982 PCIe 0404 { ret=ikx->upload_fpga_firmware(emu0404_netlist,sizeof(emu0404_netlist)); // 0404 v2 } else if(subsys==0x40041102) // v2 EM8960 ('PCI' series) ret=ikx->upload_fpga_firmware(emu1010b_netlist,sizeof(emu1010b_netlist)); else if(subsys==0x40071102) // EM8982 - 1010 ('PCIe' series) { ret=ikx->upload_fpga_firmware(emu1010b_netlist,sizeof(emu1010b_netlist)); } else { printf("Error: your E-DSP card is not recognized, assume it is v2 EM8960-like\n"); ret=ikx->upload_fpga_firmware(emu1010b_netlist,sizeof(emu1010b_netlist)); } } else // v1 1010 or v1 0404 { if(subsys==0x40021102) ret=ikx->upload_fpga_firmware(emu0404_netlist,sizeof(emu0404_netlist)); // 0404 v1 else if(subsys==0x40011102) ret=ikx->upload_fpga_firmware(hana_netlist,sizeof(hana_netlist)); // original v1 1010 else { printf("Warning: your E-DSP card is not recognized\n"); // ret=ikx->upload_fpga_firmware(hana_netlist,sizeof(hana_netlist)); ret=-11; } } if(ret) printf("Error: FPGA could not be programmed [%x]\n",ret); // check if it was OK if(ret==0) { if(!is_fpga_programmed(ikx)) { printf("Error: FPGA programming failed!\n"); ret=-5; } } if(ret==0) { printf("FPGA firmware uploaded successfully\n"); byte v1,v2,v3,v4,v5; ikx->fpga_read(EMU_HANA_MAJOR_REV,&v1); ikx->fpga_read(EMU_HANA_MINOR_REV,&v2); ikx->fpga_read(EMU_DOCK_MAJOR_REV,&v3); ikx->fpga_read(EMU_DOCK_MINOR_REV,&v4); ikx->fpga_read(EMU_DOCK_BOARD_ID,&v5); printf("FPGA Version: Hana: %d.%d, Dock: %d.%d, Board: %d\n",v1,v2,v3,v4,v5); // Power-On AudioDock if(!is_k8) ikx->fpga_write(EMU_HANA_DOCK_PWR,EMU_HANA_DOCK_PWR_ON); #if defined(WIN32) Sleep(100); #elif defined(__APPLE__) sleep(1); #else #error unknown architecture #endif // Unmute all. Default is muted after a firmware load ikx->fpga_write(EMU_HANA_UNMUTE,EMU_UNMUTE); } return ret; } int upload_dock_firmware(iKX *ikx) { dword is_k8=0; ikx->get_dword(KX_DWORD_IS_K8,&is_k8); dword is_cardbus=0; ikx->get_dword(KX_DWORD_IS_CARDBUS,&is_cardbus); dword subsys=0; ikx->get_dword(KX_DWORD_SUBSYS,&subsys); int ret=-1; if (is_dock_connected(ikx)) { // Audio Dock attached // Return to Audio Dock programming mode */ printf("Found AudioDock in OFFLINE state, upload FPGA firmware now...\n"); ikx->fpga_write(EMU_HANA_UNMUTE,EMU_MUTE); ikx->fpga_write(EMU_HANA_FPGA_CONFIG, EMU_HANA_FPGA_CONFIG_AUDIODOCK); if(is_k8) { if(subsys==0x42011102 || // this is microdock subsys==0x40041102 || // this is v2 EM8960 subsys==0x40071102) // this is PCIe 1010 { ret=ikx->upload_fpga_firmware(micro_dock_netlist,sizeof(micro_dock_netlist)); } else { printf("Error: your Dock seems to be MicroDock, but it is not EM8960, 1010-PCIe or EM8950\n"); // ret=ikx->upload_fpga_firmware(micro_dock_netlist,sizeof(micro_dock_netlist)); ret=-11;<|fim▁hole|> } } else { if(subsys==0x40011102) // original v1 { ret=ikx->upload_fpga_firmware(audio_dock_netlist,sizeof(audio_dock_netlist)); } else { printf("Warning: your Dock seems to be AudioDock, but it is not recognized\n"); // ret=ikx->upload_fpga_firmware(audio_dock_netlist,sizeof(audio_dock_netlist)); ret=-11; } } if(ret) printf("Error: FPGA could not be programmed [%x]\n",ret); ikx->fpga_write(EMU_HANA_FPGA_CONFIG, 0); // FIXME: do we need this?.. byte tmp=0; ikx->fpga_read(EMU_HANA_IRQ_STATUS, &tmp); if(ret==0) if(!is_fpga_programmed(ikx)) { // FPGA failed to be programmed printf("Failed to program Dock FPGA!\n"); ret=-12; } byte v1,v2,v3,v4,v5; ikx->fpga_read(EMU_HANA_MAJOR_REV,&v1); ikx->fpga_read(EMU_HANA_MINOR_REV,&v2); ikx->fpga_read(EMU_DOCK_MAJOR_REV,&v3); ikx->fpga_read(EMU_DOCK_MINOR_REV,&v4); ikx->fpga_read(EMU_DOCK_BOARD_ID,&v5); printf("FPGA Version: Hana: %d.%d, Dock: %d.%d, Board: %d\n",v1,v2,v3,v4,v5); // Sync clocking between 1010 and the Dock // Allow DLL to settle #if defined(WIN32) Sleep(100); #elif defined(__APPLE__) sleep(1); #else #error unknown architecture #endif // Unmute all. Default is muted after a firmware load ikx->fpga_write(EMU_HANA_UNMUTE,EMU_UNMUTE); } else printf("Dock is not connected\n"); return ret; } void load_defaults(iKX *ikx) { dword is_k8=0; ikx->get_dword(KX_DWORD_IS_K8,&is_k8); // usually 1 for new p17v-based E-DSP: 1616m cardbus, 1616m PCI, 1212m PCI, ... // mute all first ikx->fpga_write(EMU_HANA_UNMUTE,EMU_MUTE); byte val=0; byte cards=0; ikx->fpga_read(EMU_HANA_OPTION_CARDS,&cards); ikx->fpga_read(EMU_HANA_DOCK_PWR,&val); // MicroDock has no power control, while AudioDock has one; for MicroDock FPGA returns 0x1f in this register if(val!=0x1f) ikx->fpga_write(EMU_HANA_DOCK_PWR,EMU_HANA_DOCK_PWR_ON); ikx->fpga_write(EMU_HANA_WCLOCK,EMU_HANA_WCLOCK_INT_48K); ikx->fpga_write(EMU_HANA_DEFCLOCK,EMU_HANA_DEFCLOCK_48K); ikx->fpga_write(EMU_HANA_IRQ_ENABLE,EMU_HANA_IRQ_WCLK_CHANGED|EMU_HANA_IRQ_ADAT|EMU_HANA_IRQ_DOCK|EMU_HANA_IRQ_DOCK_LOST); ikx->fpga_write(EMU_HANA_SPDIF_MODE,EMU_HANA_SPDIF_MODE_TX_COMSUMER|EMU_HANA_SPDIF_MODE_RX_COMSUMER); ikx->fpga_write(EMU_HANA_OPTICAL_TYPE,EMU_HANA_OPTICAL_IN_SPDIF|EMU_HANA_OPTICAL_OUT_SPDIF); byte midi_in=0; if(cards&EMU_HANA_OPTION_HAMOA) { midi_in|=EMU_HANA_MIDI_INA_FROM_HAMOA; // hamoa + dock? if(cards&EMU_HANA_OPTION_DOCK_ONLINE) midi_in|=EMU_HANA_MIDI_INA_FROM_DOCK1; } else if(cards&EMU_HANA_OPTION_DOCK_ONLINE) midi_in=EMU_HANA_MIDI_INA_FROM_DOCK1|EMU_HANA_MIDI_INA_FROM_DOCK2; ikx->fpga_write(EMU_HANA_MIDI_IN,midi_in); // MIDI Out: // HAMOA: MIDI A // Dock1: MIDI A // Dock2: MIDI B // Sync: MIDI B ikx->fpga_write(EMU_HANA_MIDI_OUT,EMU_HANA_MIDI_OUT_DOCK2|EMU_HANA_MIDI_OUT_SYNC2); ikx->fpga_write(EMU_HANA_DOCK_LEDS_1,0x0); ikx->fpga_write(EMU_HANA_DOCK_LEDS_2,0x12); ikx->fpga_write(EMU_HANA_DOCK_LEDS_3,0x0); ikx->fpga_write(EMU_HANA_ADC_PADS,0x0 /* EMU_HANA_DOCK_ADC_PAD1|EMU_HANA_DOCK_ADC_PAD2|EMU_HANA_DOCK_ADC_PAD3|EMU_HANA_0202_ADC_PAD1*/ ); // +4dB PRO ikx->fpga_write(EMU_HANA_DAC_PADS,0x0 /* EMU_HANA_DOCK_DAC_PAD1|EMU_HANA_DOCK_DAC_PAD2|EMU_HANA_DOCK_DAC_PAD3|EMU_HANA_DOCK_DAC_PAD4|EMU_HANA_0202_DAC_PAD1*/); // +4dB PRO // FIXME !! // microdock: // ikx->fpga_write(EMU_HANA_DOCK_MISC,0x1f); // audiodock: // ikx->fpga_write(EMU_HANA_DOCK_MISC,EMU_HANA_DOCK_PHONES_192_DAC1); // default connections: // ******************** // out10,out11,out12,out13 [epilog's out17,out18,out30,out31] -> unused // out0,out1: // for 1212m, 0404 -> output [300,301]; 1616m -> headphones [300,301] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+0,EMU_DST_HAMOA_DAC_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+1,EMU_DST_HAMOA_DAC_RIGHT1); // for audiodock: -> DAC1 [100,104] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+0,EMU_DST_DOCK_DAC1_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+1,EMU_DST_DOCK_DAC1_RIGHT1); // and -> headphones [112,116] if(!is_k8) { ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+0,EMU_DST_DOCK_PHONES_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+1,EMU_DST_DOCK_PHONES_RIGHT1); } // out2,out3: // on-board SPDIF [200, 201] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+2,EMU_DST_HANA_SPDIF_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+3,EMU_DST_HANA_SPDIF_RIGHT1); // and dock SPDIF [11a,11e] (AudioDock only, not MicroDock) if(!is_k8) { ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+2,EMU_DST_DOCK_SPDIF_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+3,EMU_DST_DOCK_SPDIF_RIGHT1); } else // and dock SPDIF [112,116] (MicroDock, not AudioDock) { ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+2,EMU_DST_MDOCK_SPDIF_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+3,EMU_DST_MDOCK_SPDIF_RIGHT1); } // out4,out5: // for audiodock: -> DAC2 [108,10c] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+4,EMU_DST_DOCK_DAC2_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+5,EMU_DST_DOCK_DAC2_RIGHT1); // out6,out7: // for audiodock: -> DAC3 [110,114] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+6,EMU_DST_DOCK_DAC3_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+7,EMU_DST_DOCK_DAC3_RIGHT1); // out8,out9: if(!is_k8) { // for audiodock: -> DAC4 [118,11c] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+8,EMU_DST_DOCK_DAC4_LEFT1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+9,EMU_DST_DOCK_DAC4_RIGHT1); } // else: for MicroDock these outputs are already used by ADAT if(is_k8) { // MicroDock ADAT [118..11f] ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+2,EMU_DST_MDOCK_ADAT+0); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+3,EMU_DST_MDOCK_ADAT+1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+4,EMU_DST_MDOCK_ADAT+2); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+5,EMU_DST_MDOCK_ADAT+3); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+6,EMU_DST_MDOCK_ADAT+4); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+7,EMU_DST_MDOCK_ADAT+5); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+8,EMU_DST_MDOCK_ADAT+6); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+9,EMU_DST_MDOCK_ADAT+7); } // out2,3 4,5 6,7 8,9: // on-board ADAT (400..407) ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+2,EMU_DST_HANA_ADAT+0); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+3,EMU_DST_HANA_ADAT+1); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+4,EMU_DST_HANA_ADAT+2); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+5,EMU_DST_HANA_ADAT+3); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+6,EMU_DST_HANA_ADAT+4); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+7,EMU_DST_HANA_ADAT+5); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+8,EMU_DST_HANA_ADAT+6); ikx->fpga_link_src2dst(EMU_SRC_ALICE_EMU32A+9,EMU_DST_HANA_ADAT+7); // inputs // ****** int input=0; int table[16] = { 0xf,0x0,0x1,0x2,0x3,0x4,0x5,0x6,0x7,0x8,0x9,0xa,0xb,0xc,0xd,0xe }; // in 0,1 <- A,B // in 2,3 <- ADC1 // in 4,5 <- ADC2 // in 6,7 <- ADC3 ikx->fpga_link_src2dst(EMU_SRC_DOCK_MIC_A1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_MIC_B1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_ADC1_LEFT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_ADC1_RIGHT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_ADC2_LEFT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_ADC2_RIGHT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_ADC3_LEFT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_DOCK_ADC3_RIGHT1,table[input++]); // in 8,9 SPDIF-In ikx->fpga_link_src2dst(EMU_SRC_HANA_SPDIF_LEFT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_HANA_SPDIF_RIGHT1,table[input++]); // in 10,11: // for 0404 and 1212, use their inputs first ikx->fpga_link_src2dst(EMU_SRC_HAMOA_ADC_LEFT1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_HAMOA_ADC_RIGHT1,table[input++]); // in 12,13, 14,15 ADAT-In [Channel 0,1 only] // (default 'prolog' will not display 14,15) ikx->fpga_link_src2dst(EMU_SRC_HANA_ADAT+0,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_HANA_ADAT+1,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_HANA_ADAT+2,table[input++]); ikx->fpga_link_src2dst(EMU_SRC_HANA_ADAT+3,table[input++]); // NOTE: // for MicroDock, we can also use: // EMU_SRC_MDOCK_SPDIF_xxx // and EMU_SRC_MDOCK_ADAT // unmute now ikx->fpga_write(EMU_HANA_UNMUTE,EMU_UNMUTE); printf("==== E-DSP Parameters were reset to defaults\n"); } void print_status(iKX *ikx) { printf("==== Current E-DSP status\n"); printf("Card: '%s'\n",ikx->get_device_name()); byte val; ikx->fpga_read(EMU_HANA_OPTION_CARDS,&val); printf("Options: %x - %s %s %s %s\n", val, val&EMU_HANA_OPTION_HAMOA?"Hamoa":"", val&EMU_HANA_OPTION_SYNC?"Sync":"", val&EMU_HANA_OPTION_DOCK_ONLINE?"Dock_online":"", val&EMU_HANA_OPTION_DOCK_OFFLINE?"Dock_offline":""); ikx->fpga_read(EMU_HANA_ID,&val); printf("Hana ID: %x\n",val); byte v1,v2,v3,v4,v5; ikx->fpga_read(EMU_HANA_MAJOR_REV,&v1); ikx->fpga_read(EMU_HANA_MINOR_REV,&v2); ikx->fpga_read(EMU_DOCK_MAJOR_REV,&v3); ikx->fpga_read(EMU_DOCK_MINOR_REV,&v4); ikx->fpga_read(EMU_DOCK_BOARD_ID,&v5); printf("Versions: %d.%d, %d.%d, %d\n",v1,v2,v3,v4,v5); ikx->fpga_read(EMU_HANA_DOCK_PWR,&val); if(val!=0x1f) printf("Dock is %s. [0x%x]\n",val&EMU_HANA_DOCK_PWR_ON?"Powered-On":"Powered-off",val); else printf("Dock power state is not known, probably, self-powered MicroDock [%x]\n",val); const char *wclock_list[]= { "Int 48", "Int 44.1", "HANA SPDIF-In", "HANA ADAT-In", "Sync BNC", "2nd Hana", "Reserved", "OFF" }; ikx->fpga_read(EMU_HANA_WCLOCK,&val); printf("WClock: %x [%s]\n",val&EMU_HANA_WCLOCK_SRC_MASK,wclock_list[val&EMU_HANA_WCLOCK_SRC_MASK]); ikx->fpga_read(EMU_HANA_DEFCLOCK,&val); printf("Def clock: %d\n",(val==EMU_HANA_DEFCLOCK_48K)?48000:(val==EMU_HANA_DEFCLOCK_44_1K)?44100:-1); ikx->fpga_read(EMU_HANA_UNMUTE,&val); printf("HANA is %s\n",(val==EMU_MUTE)?"Muted":(val==EMU_UNMUTE)?"UnMuted":"Unknown!"); ikx->fpga_read(EMU_HANA_IRQ_ENABLE,&val); printf("HANA IRQs: %x\n",val); ikx->fpga_read(EMU_HANA_SPDIF_MODE,&val); printf("SPDIF mode [%x]:\n\tOut: %s%s\n\tIn: %s%s%s\n",val, (val&EMU_HANA_SPDIF_MODE_TX_PRO)?"Professional":"Consumer", (val&EMU_HANA_SPDIF_MODE_TX_NOCOPY)?"; Copy-protected":"", (val&EMU_HANA_SPDIF_MODE_RX_PRO)?"; Professional":"Consumer", (val&EMU_HANA_SPDIF_MODE_RX_NOCOPY)?"; Copy-protected":"", (val&EMU_HANA_SPDIF_MODE_RX_INVALID)?"; Not Valid":""); ikx->fpga_read(EMU_HANA_OPTICAL_TYPE,&val); printf("Optical Type: [%x] -- Input: %s, Output: %s\n",val, (val&EMU_HANA_OPTICAL_IN_ADAT)?"ADAT":"SPDIF", (val&EMU_HANA_OPTICAL_OUT_ADAT)?"ADAT":"SPDIF"); const char *midi_in[4]={ "none", "0202", "dock1", "dock2 " }; ikx->fpga_read(EMU_HANA_MIDI_IN,&val); printf("MIDI In: %s, %s [%x]\n", midi_in[(val&3)],midi_in[(val>>3)&3],val); ikx->fpga_read(EMU_HANA_MIDI_OUT,&val); printf("MIDI Out: %x\n",val); ikx->fpga_read(EMU_HANA_DOCK_LEDS_1,&v1); ikx->fpga_read(EMU_HANA_DOCK_LEDS_2,&v2); ikx->fpga_read(EMU_HANA_DOCK_LEDS_3,&v3); printf("LEDs: %x %x %x\n",v1,v2,v3); ikx->fpga_read(EMU_HANA_ADC_PADS,&val); printf("ADC pads: %x - %s%s%s%s\n", val, (val&EMU_HANA_DOCK_ADC_PAD1)?"Dock_1 ":"", (val&EMU_HANA_DOCK_ADC_PAD2)?"Dock_2 ":"", (val&EMU_HANA_DOCK_ADC_PAD3)?"Dock_3 ":"", (val&EMU_HANA_0202_ADC_PAD1)?"0202_1 ":""); ikx->fpga_read(EMU_HANA_DAC_PADS,&val); printf("DAC pads: %x - %s%s%s%s%s\n", val, (val&EMU_HANA_DOCK_DAC_PAD1)?"Dock_1 ":"", (val&EMU_HANA_DOCK_DAC_PAD2)?"Dock_2 ":"", (val&EMU_HANA_DOCK_DAC_PAD3)?"Dock_3 ":"", (val&EMU_HANA_DOCK_DAC_PAD4)?"Dock_4 ":"", (val&EMU_HANA_0202_DAC_PAD1)?"0202_1 ":""); // FIXME !! this register is not valid for MicroDock ?? ikx->fpga_read(EMU_HANA_DOCK_MISC,&val); if(val!=0x1f) printf("Dock misc: [%x] DAC mute: %d%d%d%d headphones: %d\n",val, (val&EMU_HANA_DOCK_DAC1_MUTE)?1:0, (val&EMU_HANA_DOCK_DAC2_MUTE)?1:0, (val&EMU_HANA_DOCK_DAC3_MUTE)?1:0, (val&EMU_HANA_DOCK_DAC4_MUTE)?1:0, val>>4); else printf("Dock misc flags are set to %x, probably, MicroDock\n",val); ikx->fpga_read(0x38,&val); // FIXME !! register not documented if ((val & 0x1) == 0) { byte val1, val2; ikx->fpga_read(EMU_HANA_WC_ADAT_HI,&val1); ikx->fpga_read(EMU_HANA_WC_ADAT_LO,&val2); printf("ADAT Locked : %d\n", 0x1770000 / (((val1 << 5) | val2)+1)); } else printf("ADAT Unlocked\n"); ikx->fpga_read(EMU_HANA_IRQ_STATUS,&val); if ((val & EMU_HANA_IRQ_DOCK) == 0) // ??? FIXME !! this is not correct { byte val1,val2; ikx->fpga_read(EMU_HANA_WC_SPDIF_HI,&val1); ikx->fpga_read(EMU_HANA_WC_SPDIF_LO,&val2); printf("SPDIF Locked : %d\n", 0x1770000 / (((val1 << 5) | val2)+1)); } else printf("SPDIF Unlocked\n"); if( 1 ) // FIXME don't know BNC lock bit location { byte val1,val2; ikx->fpga_read(EMU_HANA_WC_BNC_HI,&val1); ikx->fpga_read(EMU_HANA_WC_BNC_LO,&val2); printf("BNC Locked : %d\n", 0x1770000 / (((val1 << 5) | val2)+1)); } else printf("Sync card unlocked\n"); if ( 1 ) // FIXME don't know SPDIF lock bit location { byte val1,val2; ikx->fpga_read(EMU_HANA2_WC_SPDIF_HI,&val1); ikx->fpga_read(EMU_HANA2_WC_SPDIF_LO,&val2); printf("SPDIF Locked : %d\n", 0x1770000 / (((val1 << 5) | val2)+1)); } else printf("SPDIF Unlocked\n"); printf("reg 14-1f:"); for(int i=0x14;i<0x20;i++) { ikx->fpga_read((byte)i,&val); printf("[%x] ",val); } printf("\n"); printf("reg 30-3f:"); for(int i=0x30;i<0x3f;i++) { ikx->fpga_read((byte)i,&val); printf("[%x] ",val); } printf("\n"); }<|fim▁end|>
<|file_name|>terminal.ts<|end_file_name|><|fim▁begin|>/*--------------------------------------------------------------------------------------------- * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import { Event } from 'vs/base/common/event'; import { IDisposable } from 'vs/base/common/lifecycle'; import { URI } from 'vs/base/common/uri'; import { FindReplaceState } from 'vs/editor/contrib/find/findState'; import { createDecorator } from 'vs/platform/instantiation/common/instantiation'; import { IShellLaunchConfig, ITerminalChildProcess, ITerminalDimensions, ITerminalLaunchError, ITerminalProfile, ITerminalTabLayoutInfoById, TerminalIcon, TitleEventSource, TerminalShellType, IExtensionTerminalProfile, ITerminalProfileType, TerminalLocation, ICreateContributedTerminalProfileOptions } from 'vs/platform/terminal/common/terminal'; import { ICommandTracker, INavigationMode, IOffProcessTerminalService, IRemoteTerminalAttachTarget, IStartExtensionTerminalRequest, ITerminalConfigHelper, ITerminalProcessExtHostProxy } from 'vs/workbench/contrib/terminal/common/terminal'; import type { Terminal as XTermTerminal } from 'xterm'; import type { SearchAddon as XTermSearchAddon } from 'xterm-addon-search'; import type { Unicode11Addon as XTermUnicode11Addon } from 'xterm-addon-unicode11'; import type { WebglAddon as XTermWebglAddon } from 'xterm-addon-webgl'; import { ITerminalStatusList } from 'vs/workbench/contrib/terminal/browser/terminalStatusList'; import { ICompleteTerminalConfiguration } from 'vs/workbench/contrib/terminal/common/remoteTerminalChannel'; import { Orientation } from 'vs/base/browser/ui/splitview/splitview'; import { IEditableData } from 'vs/workbench/common/views'; import { DeserializedTerminalEditorInput } from 'vs/workbench/contrib/terminal/browser/terminalEditorSerializer'; import { TerminalEditorInput } from 'vs/workbench/contrib/terminal/browser/terminalEditorInput'; import { EditorGroupColumn } from 'vs/workbench/services/editor/common/editorGroupColumn'; export const ITerminalService = createDecorator<ITerminalService>('terminalService'); export const ITerminalEditorService = createDecorator<ITerminalEditorService>('terminalEditorService'); export const ITerminalGroupService = createDecorator<ITerminalGroupService>('terminalGroupService'); export const ITerminalInstanceService = createDecorator<ITerminalInstanceService>('terminalInstanceService'); export const IRemoteTerminalService = createDecorator<IRemoteTerminalService>('remoteTerminalService'); /** * A service used by TerminalInstance (and components owned by it) that allows it to break its * dependency on electron-browser and node layers, while at the same time avoiding a cyclic * dependency on ITerminalService. */ export interface ITerminalInstanceService { readonly _serviceBrand: undefined; onDidCreateInstance: Event<ITerminalInstance>; getXtermConstructor(): Promise<typeof XTermTerminal>; getXtermSearchConstructor(): Promise<typeof XTermSearchAddon>; getXtermUnicode11Constructor(): Promise<typeof XTermUnicode11Addon>; getXtermWebglConstructor(): Promise<typeof XTermWebglAddon>; /** * Takes a path and returns the properly escaped path to send to the terminal. * On Windows, this included trying to prepare the path for WSL if needed. * * @param executable The executable off the shellLaunchConfig * @param title The terminal's title * @param path The path to be escaped and formatted. * @param isRemote Whether the terminal's pty is remote. * @returns An escaped version of the path to be execuded in the terminal. */ preparePathForTerminalAsync(path: string, executable: string | undefined, title: string, shellType: TerminalShellType, isRemote: boolean): Promise<string>; createInstance(launchConfig: IShellLaunchConfig, target?: TerminalLocation, resource?: URI): ITerminalInstance; } export interface IBrowserTerminalConfigHelper extends ITerminalConfigHelper { panelContainer: HTMLElement | undefined; } export const enum Direction { Left = 0, Right = 1, Up = 2, Down = 3 } export interface ITerminalGroup { activeInstance: ITerminalInstance | undefined; terminalInstances: ITerminalInstance[]; title: string; readonly onDidDisposeInstance: Event<ITerminalInstance>; readonly onDisposed: Event<ITerminalGroup>; readonly onInstancesChanged: Event<void>; readonly onPanelOrientationChanged: Event<Orientation>; focusPreviousPane(): void; focusNextPane(): void; resizePane(direction: Direction): void; resizePanes(relativeSizes: number[]): void; setActiveInstanceByIndex(index: number, force?: boolean): void; attachToElement(element: HTMLElement): void; addInstance(instance: ITerminalInstance): void; removeInstance(instance: ITerminalInstance): void; moveInstance(instance: ITerminalInstance, index: number): void; setVisible(visible: boolean): void; layout(width: number, height: number): void; addDisposable(disposable: IDisposable): void; split(shellLaunchConfig: IShellLaunchConfig): ITerminalInstance; getLayoutInfo(isActive: boolean): ITerminalTabLayoutInfoById; } export const enum TerminalConnectionState { Connecting, Connected } export interface ITerminalService extends ITerminalInstanceHost { readonly _serviceBrand: undefined; /** Gets all terminal instances, including editor and terminal view (group) instances. */ readonly instances: readonly ITerminalInstance[]; configHelper: ITerminalConfigHelper; isProcessSupportRegistered: boolean; readonly connectionState: TerminalConnectionState; readonly availableProfiles: ITerminalProfile[]; readonly allProfiles: ITerminalProfileType[] | undefined; readonly profilesReady: Promise<void>; readonly defaultLocation: TerminalLocation; initializeTerminals(): Promise<void>; onDidChangeActiveGroup: Event<ITerminalGroup | undefined>; onDidDisposeGroup: Event<ITerminalGroup>; onDidCreateInstance: Event<ITerminalInstance>; onDidReceiveProcessId: Event<ITerminalInstance>; onDidChangeInstanceDimensions: Event<ITerminalInstance>; onDidMaximumDimensionsChange: Event<ITerminalInstance>; onDidRequestStartExtensionTerminal: Event<IStartExtensionTerminalRequest>; onDidChangeInstanceTitle: Event<ITerminalInstance | undefined>; onDidChangeInstanceIcon: Event<ITerminalInstance | undefined>; onDidChangeInstanceColor: Event<ITerminalInstance | undefined>; onDidChangeInstancePrimaryStatus: Event<ITerminalInstance>; onDidInputInstanceData: Event<ITerminalInstance>; onDidRegisterProcessSupport: Event<void>; onDidChangeConnectionState: Event<void>; onDidChangeAvailableProfiles: Event<ITerminalProfile[]>; /** * Creates a terminal. * @param options The options to create the terminal with, when not specified the default * profile will be used at the default target. */ createTerminal(options?: ICreateTerminalOptions): Promise<ITerminalInstance>; /** * Creates a raw terminal instance, this should not be used outside of the terminal part. */ getInstanceFromId(terminalId: number): ITerminalInstance | undefined; getInstanceFromIndex(terminalIndex: number): ITerminalInstance; getActiveOrCreateInstance(): Promise<ITerminalInstance>; moveToEditor(source: ITerminalInstance): void; moveToTerminalView(source?: ITerminalInstance | URI): Promise<void>; getOffProcessTerminalService(): IOffProcessTerminalService | undefined; /** * Perform an action with the active terminal instance, if the terminal does * not exist the callback will not be called. * @param callback The callback that fires with the active terminal */ doWithActiveInstance<T>(callback: (terminal: ITerminalInstance) => T): T | void; /** * Fire the onActiveTabChanged event, this will trigger the terminal dropdown to be updated, * among other things. */ refreshActiveGroup(): void; registerProcessSupport(isSupported: boolean): void; /** * Registers a link provider that enables integrators to add links to the terminal. * @param linkProvider When registered, the link provider is asked whenever a cell is hovered * for links at that position. This lets the terminal know all links at a given area and also * labels for what these links are going to do. */ registerLinkProvider(linkProvider: ITerminalExternalLinkProvider): IDisposable; registerTerminalProfileProvider(extensionIdenfifier: string, id: string, profileProvider: ITerminalProfileProvider): IDisposable; showProfileQuickPick(type: 'setDefault' | 'createInstance', cwd?: string | URI): Promise<ITerminalInstance | undefined>; setContainers(panelContainer: HTMLElement, terminalContainer: HTMLElement): void; requestStartExtensionTerminal(proxy: ITerminalProcessExtHostProxy, cols: number, rows: number): Promise<ITerminalLaunchError | undefined>; isAttachedToTerminal(remoteTerm: IRemoteTerminalAttachTarget): boolean; getEditableData(instance: ITerminalInstance): IEditableData | undefined; setEditable(instance: ITerminalInstance, data: IEditableData | null): Promise<void>; safeDisposeTerminal(instance: ITerminalInstance): Promise<void>; getDefaultInstanceHost(): ITerminalInstanceHost; getInstanceHost(target: ITerminalLocationOptions | undefined): ITerminalInstanceHost; getFindHost(instance?: ITerminalInstance): ITerminalFindHost; getDefaultProfileName(): string; resolveLocation(location?: ITerminalLocationOptions): TerminalLocation | undefined } /** * This service is responsible for integrating with the editor service and managing terminal * editors. */ export interface ITerminalEditorService extends ITerminalInstanceHost, ITerminalFindHost { readonly _serviceBrand: undefined; /** Gets all _terminal editor_ instances. */ readonly instances: readonly ITerminalInstance[]; openEditor(instance: ITerminalInstance, editorOptions?: TerminalEditorLocation): Promise<void>; detachActiveEditorInstance(): ITerminalInstance; detachInstance(instance: ITerminalInstance): void; splitInstance(instanceToSplit: ITerminalInstance, shellLaunchConfig?: IShellLaunchConfig): ITerminalInstance; revealActiveEditor(preserveFocus?: boolean): void; resolveResource(instance: ITerminalInstance | URI): URI; reviveInput(deserializedInput: DeserializedTerminalEditorInput): TerminalEditorInput; getInputFromResource(resource: URI): TerminalEditorInput; } export type ITerminalLocationOptions = TerminalLocation | TerminalEditorLocation | { parentTerminal: ITerminalInstance } | { splitActiveTerminal: boolean }; export interface ICreateTerminalOptions { /** * The shell launch config or profile to launch with, when not specified the default terminal * profile will be used. */ config?: IShellLaunchConfig | ITerminalProfile | IExtensionTerminalProfile; /** * The current working directory to start with, this will override IShellLaunchConfig.cwd if * specified. */ cwd?: string | URI; /** * The terminal's resource, passed when the terminal has moved windows. */ resource?: URI; /** * The terminal's location (editor or panel), it's terminal parent (split to the right), or editor group */ location?: ITerminalLocationOptions; } export interface TerminalEditorLocation { viewColumn: EditorGroupColumn, preserveFocus?: boolean } /** * This service is responsible for managing terminal groups, that is the terminals that are hosted * within the terminal panel, not in an editor. */ export interface ITerminalGroupService extends ITerminalInstanceHost, ITerminalFindHost { readonly _serviceBrand: undefined; /** Gets all _terminal view_ instances, ie. instances contained within terminal groups. */ readonly instances: readonly ITerminalInstance[]; readonly groups: readonly ITerminalGroup[]; activeGroup: ITerminalGroup | undefined; readonly activeGroupIndex: number; readonly onDidChangeActiveGroup: Event<ITerminalGroup | undefined>; readonly onDidDisposeGroup: Event<ITerminalGroup>; /** Fires when a group is created, disposed of, or shown (in the case of a background group). */ readonly onDidChangeGroups: Event<void>; readonly onDidChangePanelOrientation: Event<Orientation>; createGroup(shellLaunchConfig?: IShellLaunchConfig): ITerminalGroup; createGroup(instance?: ITerminalInstance): ITerminalGroup; getGroupForInstance(instance: ITerminalInstance): ITerminalGroup | undefined; /** * Moves a terminal instance's group to the target instance group's position. * @param source The source instance to move. * @param target The target instance to move the source instance to. */ moveGroup(source: ITerminalInstance, target: ITerminalInstance): void; moveGroupToEnd(source: ITerminalInstance): void; moveInstance(source: ITerminalInstance, target: ITerminalInstance, side: 'before' | 'after'): void; unsplitInstance(instance: ITerminalInstance): void; joinInstances(instances: ITerminalInstance[]): void; instanceIsSplit(instance: ITerminalInstance): boolean; getGroupLabels(): string[]; setActiveGroupByIndex(index: number): void; setActiveGroupToNext(): void; setActiveGroupToPrevious(): void; setActiveInstanceByIndex(terminalIndex: number): void; setContainer(container: HTMLElement): void; showPanel(focus?: boolean): Promise<void>; hidePanel(): void; focusTabs(): void; showTabs(): void; } /** * An interface that indicates the implementer hosts terminal instances, exposing a common set of * properties and events. */ export interface ITerminalInstanceHost { readonly activeInstance: ITerminalInstance | undefined; readonly instances: readonly ITerminalInstance[]; readonly onDidDisposeInstance: Event<ITerminalInstance>; readonly onDidFocusInstance: Event<ITerminalInstance>; readonly onDidChangeActiveInstance: Event<ITerminalInstance | undefined>; readonly onDidChangeInstances: Event<void>; setActiveInstance(instance: ITerminalInstance): void; /** * Gets an instance from a resource if it exists. This MUST be used instead of getInstanceFromId * when you only know about a terminal's URI. (a URI's instance ID may not be this window's instance ID) */ getInstanceFromResource(resource: URI | undefined): ITerminalInstance | undefined; } export interface ITerminalFindHost { focusFindWidget(): void; hideFindWidget(): void; getFindState(): FindReplaceState; findNext(): void; findPrevious(): void; } export interface IRemoteTerminalService extends IOffProcessTerminalService { createProcess( shellLaunchConfig: IShellLaunchConfig, configuration: ICompleteTerminalConfiguration, activeWorkspaceRootUri: URI | undefined, cols: number, rows: number, unicodeVersion: '6' | '11', shouldPersist: boolean ): Promise<ITerminalChildProcess>; } /** * Similar to xterm.js' ILinkProvider but using promises and hides xterm.js internals (like buffer * positions, decorations, etc.) from the rest of vscode. This is the interface to use for * workbench integrations. */ export interface ITerminalExternalLinkProvider { provideLinks(instance: ITerminalInstance, line: string): Promise<ITerminalLink[] | undefined>; } export interface ITerminalProfileProvider { createContributedTerminalProfile(options: ICreateContributedTerminalProfileOptions): Promise<void>; } export interface ITerminalLink { /** The startIndex of the link in the line. */ startIndex: number; /** The length of the link in the line. */ length: number; /** The descriptive label for what the link does when activated. */ label?: string; /** * Activates the link. * @param text The text of the link. */ activate(text: string): void; } export interface ISearchOptions { /** Whether the find should be done as a regex. */ regex?: boolean; /** Whether only whole words should match. */ wholeWord?: boolean; /** Whether find should pay attention to case. */ caseSensitive?: boolean; /** Whether the search should start at the current search position (not the next row). */ incremental?: boolean; } export interface ITerminalBeforeHandleLinkEvent { terminal?: ITerminalInstance; /** The text of the link */ link: string; /** Call with whether the link was handled by the interceptor */ resolve(wasHandled: boolean): void; } export interface ITerminalInstance { /** * The ID of the terminal instance, this is an arbitrary number only used to uniquely identify * terminal instances within a window. */ readonly instanceId: number; /** * A unique URI for this terminal instance with the following encoding: * path: /<workspace ID>/<instance ID> * fragment: Title * Note that when dragging terminals across windows, this will retain the original workspace ID /instance ID * from the other window. */ readonly resource: URI; readonly cols: number; readonly rows: number; readonly maxCols: number; readonly maxRows: number; readonly icon?: TerminalIcon; readonly color?: string; readonly statusList: ITerminalStatusList; /** * The process ID of the shell process, this is undefined when there is no process associated * with this terminal. */ processId: number | undefined; target?: TerminalLocation; /** * The id of a persistent process. This is defined if this is a terminal created by a pty host * that supports reconnection. */ readonly persistentProcessId: number | undefined; /** * Whether the process should be persisted across reloads. */ readonly shouldPersist: boolean; /** * Whether the process communication channel has been disconnected. */ readonly isDisconnected: boolean; /** * Whether the terminal's pty is hosted on a remote. */ readonly isRemote: boolean; /** * Whether an element within this terminal is focused. */ readonly hasFocus: boolean; /** * An event that fires when the terminal instance's title changes. */ onTitleChanged: Event<ITerminalInstance>; /** * An event that fires when the terminal instance's icon changes. */ onIconChanged: Event<ITerminalInstance>; /** * An event that fires when the terminal instance is disposed. */ onDisposed: Event<ITerminalInstance>; onProcessIdReady: Event<ITerminalInstance>; onLinksReady: Event<ITerminalInstance>; onRequestExtHostProcess: Event<ITerminalInstance>; onDimensionsChanged: Event<void>; onMaximumDimensionsChanged: Event<void>; onDidChangeHasChildProcesses: Event<boolean>; onDidFocus: Event<ITerminalInstance>; onDidBlur: Event<ITerminalInstance>; onDidInputData: Event<ITerminalInstance>; /** * An event that fires when a terminal is dropped on this instance via drag and drop. */ onRequestAddInstanceToGroup: Event<IRequestAddInstanceToGroupEvent>; /** * Attach a listener to the raw data stream coming from the pty, including ANSI escape * sequences. */ onData: Event<string>; /** * Attach a listener to the binary data stream coming from xterm and going to pty */ onBinary: Event<string>; /** * Attach a listener to listen for new lines added to this terminal instance. * * @param listener The listener function which takes new line strings added to the terminal, * excluding ANSI escape sequences. The line event will fire when an LF character is added to * the terminal (ie. the line is not wrapped). Note that this means that the line data will * not fire for the last line, until either the line is ended with a LF character of the process * is exited. The lineData string will contain the fully wrapped line, not containing any LF/CR * characters. */ onLineData: Event<string>; /** * Attach a listener that fires when the terminal's pty process exits. The number in the event * is the processes' exit code, an exit code of null means the process was killed as a result of * the ITerminalInstance being disposed. */ onExit: Event<number | undefined>; readonly exitCode: number | undefined; readonly areLinksReady: boolean; /** * Returns an array of data events that have fired within the first 10 seconds. If this is * called 10 seconds after the terminal has existed the result will be undefined. This is useful * when objects that depend on the data events have delayed initialization, like extension * hosts. */ readonly initialDataEvents: string[] | undefined; /** A promise that resolves when the terminal's pty/process have been created. */ readonly processReady: Promise<void>; /** Whether the terminal's process has child processes (ie. is dirty/busy). */ readonly hasChildProcesses: boolean; /** * The title of the terminal. This is either title or the process currently running or an * explicit name given to the terminal instance through the extension API. */ readonly title: string; /** * How the current title was set. */ readonly titleSource: TitleEventSource; /** * The shell type of the terminal. */ readonly shellType: TerminalShellType; /** * The focus state of the terminal before exiting. */ readonly hadFocusOnExit: boolean; /** * False when the title is set by an API or the user. We check this to make sure we * do not override the title when the process title changes in the terminal. */ isTitleSetByProcess: boolean; /** * The shell launch config used to launch the shell. */ readonly shellLaunchConfig: IShellLaunchConfig; /** * Whether to disable layout for the terminal. This is useful when the size of the terminal is * being manipulating (e.g. adding a split pane) and we want the terminal to ignore particular * resize events. */ disableLayout: boolean; /** * An object that tracks when commands are run and enables navigating and selecting between * them. */ readonly commandTracker: ICommandTracker | undefined; readonly navigationMode: INavigationMode | undefined; description: string | undefined; /** * Shows the environment information hover if the widget exists. */ showEnvironmentInfoHover(): void; /** * Dispose the terminal instance, removing it from the panel/service and freeing up resources. * * @param immediate Whether the kill should be immediate or not. Immediate should only be used * when VS Code is shutting down or in cases where the terminal dispose was user initiated. * The immediate===false exists to cover an edge case where the final output of the terminal can * get cut off. If immediate kill any terminal processes immediately. */ dispose(immediate?: boolean): void; /** * Inform the process that the terminal is now detached. */ detachFromProcess(): Promise<void>; /** * Forces the terminal to redraw its viewport. */ forceRedraw(): void; /** * Check if anything is selected in terminal. */ hasSelection(): boolean; /** * Copies the terminal selection to the clipboard. */ copySelection(): Promise<void>; <|fim▁hole|> readonly selection: string | undefined; /** * Clear current selection. */ clearSelection(): void; /** * Select all text in the terminal. */ selectAll(): void; /** * Find the next instance of the term */ findNext(term: string, searchOptions: ISearchOptions): boolean; /** * Find the previous instance of the term */ findPrevious(term: string, searchOptions: ISearchOptions): boolean; /** * Notifies the terminal that the find widget's focus state has been changed. */ notifyFindWidgetFocusChanged(isFocused: boolean): void; /** * Focuses the terminal instance if it's able to (xterm.js instance exists). * * @param focus Force focus even if there is a selection. */ focus(force?: boolean): void; /** * Focuses the terminal instance when it's ready (the xterm.js instance is created). Use this * when the terminal is being shown. * * @param focus Force focus even if there is a selection. */ focusWhenReady(force?: boolean): Promise<void>; /** * Focuses and pastes the contents of the clipboard into the terminal instance. */ paste(): Promise<void>; /** * Focuses and pastes the contents of the selection clipboard into the terminal instance. */ pasteSelection(): Promise<void>; /** * Send text to the terminal instance. The text is written to the stdin of the underlying pty * process (shell) of the terminal instance. * * @param text The text to send. * @param addNewLine Whether to add a new line to the text being sent, this is normally * required to run a command in the terminal. The character(s) added are \n or \r\n * depending on the platform. This defaults to `true`. */ sendText(text: string, addNewLine: boolean): Promise<void>; /** Scroll the terminal buffer down 1 line. */ scrollDownLine(): void; /** Scroll the terminal buffer down 1 page. */ scrollDownPage(): void; /** Scroll the terminal buffer to the bottom. */ scrollToBottom(): void; /** Scroll the terminal buffer up 1 line. */ scrollUpLine(): void; /** Scroll the terminal buffer up 1 page. */ scrollUpPage(): void; /** Scroll the terminal buffer to the top. */ scrollToTop(): void; /** * Clears the terminal buffer, leaving only the prompt line. */ clear(): void; /** * Attaches the terminal instance to an element on the DOM, before this is called the terminal * instance process may run in the background but cannot be displayed on the UI. * * @param container The element to attach the terminal instance to. */ attachToElement(container: HTMLElement): Promise<void> | void; /** * Detaches the terminal instance from the terminal editor DOM element. */ detachFromElement(): void; /** * Configure the dimensions of the terminal instance. * * @param dimension The dimensions of the container. */ layout(dimension: { width: number, height: number }): void; /** * Sets whether the terminal instance's element is visible in the DOM. * * @param visible Whether the element is visible. */ setVisible(visible: boolean): void; /** * Immediately kills the terminal's current pty process and launches a new one to replace it. * * @param shell The new launch configuration. */ reuseTerminal(shell: IShellLaunchConfig): Promise<void>; /** * Relaunches the terminal, killing it and reusing the launch config used initially. Any * environment variable changes will be recalculated when this happens. */ relaunch(): void; /** * Sets the title of the terminal instance. */ setTitle(title: string, eventSource: TitleEventSource): void; waitForTitle(): Promise<string>; setDimensions(dimensions: ITerminalDimensions): void; addDisposable(disposable: IDisposable): void; toggleEscapeSequenceLogging(): void; getInitialCwd(): Promise<string>; getCwd(): Promise<string>; /** * @throws when called before xterm.js is ready. */ registerLinkProvider(provider: ITerminalExternalLinkProvider): IDisposable; /** * Sets the terminal name to the provided title or triggers a quick pick * to take user input. */ rename(title?: string): Promise<void>; /** * Triggers a quick pick to change the icon of this terminal. */ changeIcon(): Promise<void>; /** * Triggers a quick pick to change the color of the associated terminal tab icon. */ changeColor(): Promise<void>; } export interface IRequestAddInstanceToGroupEvent { uri: URI; side: 'before' | 'after' } export const enum LinuxDistro { Unknown = 1, Fedora = 2, Ubuntu = 3, }<|fim▁end|>
/** * Current selection in the terminal. */
<|file_name|>icosphere.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap; use std::mem; use scene::Vertex; fn vertex(pos: [f32; 3]) -> Vertex { use std::f32::consts::{PI}; let u = pos[0].atan2(pos[2]) / (-2.0 * PI); let u = if u < 0. { u + 1. } else { u }; let v = pos[1].asin() / PI + 0.5; Vertex::new(pos, [u, v]) } pub fn generate(recursion: u16) -> (Vec<Vertex>, Vec<u16>) { let face_count = 20 * 4usize.pow(recursion as u32); let edge_count = 3 * face_count / 2; // Euler's formula let vertex_count = 2 + edge_count - face_count; let index_count = face_count * 3; let t = (1.0 + 5.0_f32.sqrt()) / 2.0; let n = (1. + t * t).sqrt(); let u = 1. / n; let v = t / n; let mut vertex_data = Vec::with_capacity(vertex_count); vertex_data.extend_from_slice(&[ vertex([-u, v, 0.0]), vertex([ u, v, 0.0]), vertex([ -u, -v, 0.0]), vertex([ u, -v, 0.0]), vertex([ 0.0, -u, v]), vertex([ 0.0, u, v]), vertex([ 0.0, -u, -v]), vertex([ 0.0, u, -v]), vertex([ v, 0.0, -u]), vertex([ v, 0.0, u]), vertex([ -v, 0.0, -u]), vertex([ -v, 0.0, u]), ]); let mut index_data: Vec<u16> = Vec::with_capacity(index_count); index_data.extend_from_slice(&[ // 5 faces around point 0 0, 11, 5, 0, 5, 1, 0, 1, 7, 0, 7, 10, 0, 10, 11, // 5 adjacent faces 1, 5, 9, 5, 11, 4, 11, 10, 2, 10, 7, 6, 7, 1, 8, // 5 faces around point 3 3, 9, 4, 3, 4, 2, 3, 2, 6, 3, 6, 8, 3, 8, 9, // 5 adjacent faces 4, 9, 5, 2, 4, 11, 6, 2, 10, 8, 6, 7, 9, 8, 1, ]); let mut cache = HashMap::new(); let mut next_indices = Vec::with_capacity(index_count); { let mut middle = |ia, ib| { let key = if ia < ib { (ia, ib) } else { (ib, ia) }; cache.get(&key).cloned().unwrap_or_else(|| { let pa = vertex_data[ia as usize].pos; let pb = vertex_data[ib as usize].pos; let middle = [ (pa[0] + pb[0]) / 2.0, (pa[1] + pb[1]) / 2.0, (pa[2] + pb[2]) / 2.0, ]; let norm = (middle[0] * middle[0] + middle[1] * middle[1] + middle[2] * middle[2]).sqrt(); let index = vertex_data.len() as u16; let v = vertex([middle[0]/norm, middle[1]/norm, middle[2]/norm]); vertex_data.push(v); cache.insert(key, index); index }) }; for _ in 0..recursion { for tri in index_data.chunks(3) {<|fim▁hole|> let i2 = tri[1]; let i3 = tri[2]; let a = middle(i1, i2); let b = middle(i2, i3); let c = middle(i3, i1); next_indices.extend_from_slice(&[ i1, a, c, a, i2, b, c, b, i3, a, b, c, ]); } mem::swap(&mut next_indices, &mut index_data); next_indices.clear(); } } debug_assert!(vertex_data.len() == vertex_count); debug_assert!(index_data.len() == index_count); (vertex_data, index_data) }<|fim▁end|>
let i1 = tri[0];
<|file_name|>cmd.py<|end_file_name|><|fim▁begin|># mako/cmd.py # Copyright 2006-2021 the Mako authors and contributors <see AUTHORS file> # # This module is part of Mako and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from argparse import ArgumentParser from os.path import dirname from os.path import isfile import sys from mako import exceptions from mako.lookup import TemplateLookup from mako.template import Template def varsplit(var): if "=" not in var: return (var, "")<|fim▁hole|> def _exit(): sys.stderr.write(exceptions.text_error_template().render()) sys.exit(1) def cmdline(argv=None): parser = ArgumentParser() parser.add_argument( "--var", default=[], action="append", help="variable (can be used multiple times, use name=value)", ) parser.add_argument( "--template-dir", default=[], action="append", help="Directory to use for template lookup (multiple " "directories may be provided). If not given then if the " "template is read from stdin, the value defaults to be " "the current directory, otherwise it defaults to be the " "parent directory of the file provided.", ) parser.add_argument( "--output-encoding", default=None, help="force output encoding" ) parser.add_argument( "--output-file", default=None, help="Write to file upon successful render instead of stdout", ) parser.add_argument("input", nargs="?", default="-") options = parser.parse_args(argv) output_encoding = options.output_encoding output_file = options.output_file if options.input == "-": lookup_dirs = options.template_dir or ["."] lookup = TemplateLookup(lookup_dirs) try: template = Template( sys.stdin.read(), lookup=lookup, output_encoding=output_encoding, ) except: _exit() else: filename = options.input if not isfile(filename): raise SystemExit("error: can't find %s" % filename) lookup_dirs = options.template_dir or [dirname(filename)] lookup = TemplateLookup(lookup_dirs) try: template = Template( filename=filename, lookup=lookup, output_encoding=output_encoding, ) except: _exit() kw = dict(varsplit(var) for var in options.var) try: rendered = template.render(**kw) except: _exit() else: if output_file: open(output_file, "wt", encoding=output_encoding).write(rendered) else: sys.stdout.write(rendered) if __name__ == "__main__": cmdline()<|fim▁end|>
return var.split("=", 1)
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'Resource' db.create_table('inventory_resource', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.TextField')()), ('trainable', self.gf('django.db.models.fields.BooleanField')(default=True)), )) db.send_create_signal('inventory', ['Resource']) # Adding model 'Metadata' db.create_table('inventory_metadata', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.TextField')()), ('type', self.gf('django.db.models.fields.IntegerField')()), ('value', self.gf('django.db.models.fields.TextField')()), ('resource', self.gf('django.db.models.fields.related.ForeignKey')(related_name='metadata', to=orm['inventory.Resource'])), )) db.send_create_signal('inventory', ['Metadata']) # Adding model 'TrainingLevel' db.create_table('inventory_traininglevel', ( ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('member', self.gf('django.db.models.fields.related.ForeignKey')(related_name='trainings', to=orm['membership.Member'])), ('resource', self.gf('django.db.models.fields.related.ForeignKey')(related_name='trainings', to=orm['inventory.Resource'])), ('rank', self.gf('django.db.models.fields.IntegerField')()), ('comments', self.gf('django.db.models.fields.TextField')(blank=True)), )) db.send_create_signal('inventory', ['TrainingLevel']) def backwards(self, orm): # Deleting model 'Resource' db.delete_table('inventory_resource') # Deleting model 'Metadata' db.delete_table('inventory_metadata') # Deleting model 'TrainingLevel' db.delete_table('inventory_traininglevel') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),<|fim▁hole|> 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'inventory.metadata': { 'Meta': {'object_name': 'Metadata'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'resource': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'metadata'", 'to': "orm['inventory.Resource']"}), 'type': ('django.db.models.fields.IntegerField', [], {}), 'value': ('django.db.models.fields.TextField', [], {}) }, 'inventory.resource': { 'Meta': {'object_name': 'Resource'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.TextField', [], {}), 'trainable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'users': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['membership.Member']", 'through': "orm['inventory.TrainingLevel']", 'symmetrical': 'False'}) }, 'inventory.traininglevel': { 'Meta': {'object_name': 'TrainingLevel'}, 'comments': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'member': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'trainings'", 'to': "orm['membership.Member']"}), 'rank': ('django.db.models.fields.IntegerField', [], {}), 'resource': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'trainings'", 'to': "orm['inventory.Resource']"}) }, 'membership.field': { 'Meta': {'object_name': 'Field'}, 'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'membership.fieldvalue': { 'Meta': {'object_name': 'FieldValue'}, 'field': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['membership.Field']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'member': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attributes'", 'to': "orm['membership.Member']"}), 'value': ('django.db.models.fields.TextField', [], {}) }, 'membership.member': { 'Meta': {'object_name': 'Member'}, 'birthday': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'fields': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['membership.Field']", 'through': "orm['membership.FieldValue']", 'symmetrical': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'lastSeen': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'profession': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'tagline': ('django.db.models.fields.CharField', [], {'max_length': '255'}), 'user': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True'}) } } complete_apps = ['inventory']<|fim▁end|>
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': {
<|file_name|>markdown.rs<|end_file_name|><|fim▁begin|>use file; use playpen; pub struct Markdown<'a, 'b> { content: String, id: &'a str, prefix: &'b str, } impl<'a, 'b> Markdown<'a, 'b> { pub fn process(number: &[uint], id: &'a str, title: &str, prefix: &'b str) -> Result<(), String> { let mut mkd = try!(Markdown::new(number, id, title, prefix)); try!(mkd.insert_sources()); try!(mkd.insert_outputs()); try!(mkd.insert_playpen_links()); try!(mkd.save()); Ok(()) } fn new(number: &[uint], id: &'a str, title: &str, prefix: &'b str) -> Result<Markdown<'a, 'b>, String> { let path = Path::new(format!("examples/{}/{}/input.md", prefix, id)); let body = try!(file::read(&path)); let version = number.iter().map(|x| { format!("{}", x) }).collect::<Vec<String>>().connect("."); let content = format!("{} {} {}\n\n{}", "#".repeat(number.len()), version, title, body); Ok(Markdown { content: content, id: id, prefix: prefix, }) } fn insert_sources(&mut self) -> Result<(), String> { let id = self.id; let prefix = self.prefix; let re = regex!(r"\{(.*\.rs)\}"); let mut table = Vec::new(); for line in self.content.as_slice().lines() { match re.captures(line) { None => {}, Some(captures) => { let src = captures.at(1); let input = format!("{{{}}}", src); let p = format!("examples/{}/{}/{}", prefix, id, src); let output = match file::read(&Path::new(p.as_slice())) { Err(_) => { return Err(format!("{} not found", p)); }, Ok(string) => { format!("``` rust\n// {}\n{}```", captures.at(1), string) } }; table.push((input, output)) } } } for (input, output) in table.move_iter() { self.content = self.content.replace(input.as_slice(), output.as_slice());<|fim▁hole|> Ok(()) } fn insert_outputs(&mut self) -> Result<(), String> { let id = self.id; let prefix = self.prefix; let r = regex!(r"\{(.*)\.out\}"); let dir = Path::new(format!("bin/{}/{}", prefix, id)); file::mkdir(&dir); let mut table = Vec::new(); for line in self.content.as_slice().lines() { match r.captures(line) { None => {}, Some(captures) => { let src = captures.at(1); let input = format!("{{{}.out}}", src); let s = try!(file::run(prefix, id, src)); let s = format!("```\n$ rustc {0}.rs && ./{0}\n{1}```", src, s); table.push((input, s)); }, } } for (input, output) in table.move_iter() { self.content = self.content.replace(input.as_slice(), output.as_slice()); } Ok(()) } fn insert_playpen_links(&mut self) -> Result<(), String> { let id = self.id; let prefix = self.prefix; let re = regex!(r"\{(.*)\.play\}"); let mut once_ = false; let mut table = Vec::new(); for line in self.content.as_slice().lines() { match re.captures(line) { None => {}, Some(captures) => { if once_ { return Err(format!("more than one editor!")) } else { once_ = true; } let input = format!("{{{}.play}}", captures.at(1)); let src = format!("{}.rs", captures.at(1)); let p = format!("examples/{}/{}/{}", prefix, id, src); let output = match file::read(&Path::new(p.as_slice())) { Err(_) => { return Err(format!("{} not found", p)); }, Ok(source) => { playpen::editor(source.as_slice()) } }; table.push((input, output)) } } } for (input, output) in table.move_iter() { self.content = self.content.replace(input.as_slice(), output.as_slice()); } Ok(()) } fn save(&self) -> Result<(), String> { let path = Path::new(format!("stage/{}/{}.md", self.prefix, self.id)); file::write(&path, self.content.as_slice()) } }<|fim▁end|>
}
<|file_name|>InputStreamSource.java<|end_file_name|><|fim▁begin|>package dk.dbc.kafka.dispatch.sources; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.nio.charset.StandardCharsets; import java.util.Optional; /** * Source for reading InputStreams line-by-line * @author Adam Tulinius */ public class InputStreamSource extends Source<String> { private BufferedReader reader; public InputStreamSource(InputStream inputStream) { this.reader = new BufferedReader(new InputStreamReader(inputStream, StandardCharsets.UTF_8));<|fim▁hole|> @Override public Optional<String> next() throws IOException { String line = reader.readLine(); if (line != null) { return Optional.of(line); } else { return Optional.empty(); } } }<|fim▁end|>
}
<|file_name|>gulpfile.js<|end_file_name|><|fim▁begin|>const gulp = require('gulp'), rename = require('gulp-rename'), sketch = require('gulp-sketch'), iconfont = require('gulp-iconfont'), imagemin = require('gulp-imagemin'), consolidate = require('gulp-consolidate') /** * Font settings */ const // set name of your symbol font fontName = 'floating-top-link', // set class name in your CSS className = 'FloatingTopLink__icon', // you can also choose 'foundation-style' template = 'fontawesome-style', // you can also choose 'symbol-font-16px.sketch' skethcFileName = 'floating-top-link-font-14px.sketch' /** * Recommended to get consistent builds when watching files * See https://github.com/nfroidure/gulp-iconfont */ const timestamp = Math.round(Date.now() / 1000) gulp.task('symbols', () => gulp.src(skethcFileName) .pipe(sketch({ export: 'artboards', formats: 'svg' })) .pipe(imagemin()) .pipe(iconfont({ fontName, formats: ['ttf', 'eot', 'woff', 'woff2', 'svg'], timestamp, log: () => {} // suppress unnecessary logging })) .on('glyphs', (glyphs) => { const options = { className, fontName, fontPath: '../fonts/', // set path to font (from your CSS file if relative) glyphs: glyphs.map(mapGlyphs) } gulp.src(`templates/${ template }.css`) .pipe(consolidate('lodash', options)) .pipe(rename({ basename: fontName })) .pipe(gulp.dest('./css/')) // set path to export your CSS // if you don't need sample.html, remove next 4 lines gulp.src(`templates/${ template }.html`) .pipe(consolidate('lodash', options)) .pipe(rename({ basename: 'sample' })) .pipe(gulp.dest('./')) // set path to export your sample HTML }) .pipe(gulp.dest('../fonts/')) // set path to export your fonts ) gulp.task('watch', () => gulp.watch('*.sketch', ['symbols']))<|fim▁hole|> */ function mapGlyphs(glyph) { return { name: glyph.name, codepoint: glyph.unicode[0].charCodeAt(0) } }<|fim▁end|>
/** * This is needed for mapping glyphs and codepoints.
<|file_name|>macro-inner-attributes.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![feature(custom_attribute)] macro_rules! test { ($nm:ident, #[$a:meta], $i:item) => (mod $nm { #![$a] $i }); } test!(a, #[cfg(qux)], pub fn bar() { }); test!(b,<|fim▁hole|> #[qux] fn main() { a::bar(); //~^ ERROR failed to resolve. Use of undeclared type or module `a` //~^^ ERROR unresolved name `a::bar` b::bar(); }<|fim▁end|>
#[cfg(not(qux))], pub fn bar() { });
<|file_name|>platforms.py<|end_file_name|><|fim▁begin|>import pygame from pygame.colordict import THECOLORS import data class Platform(pygame.sprite.Sprite): def __init__(self, width, height):<|fim▁hole|> self.image = pygame.Surface([width, height]) self.image.fill(THECOLORS["green"]) self.rect = self.image.get_rect() class Trampoline(pygame.sprite.Sprite): def __init__(self): pygame.sprite.Sprite.__init__(self) self.image = data.load_image("trampoline.png") self.rect = self.image.get_rect()<|fim▁end|>
pygame.sprite.Sprite.__init__(self)
<|file_name|>wysiwyg-editor.js<|end_file_name|><|fim▁begin|>/** * Created by Stefan on 24.05.14. */ (function() { tinymce.create('tinymce.plugins.Footnotes', { /** * Initializes the plugin, this will be executed after the plugin has been created. * This call is done before the editor instance has finished its initialization so use the onInit event * of the editor instance to intercept that event. * * @param {tinymce.Editor} ed Editor instance that the plugin is initialized in. * @param {string} url Absolute URL to where the plugin is located. */ init : function(ed, url) { ed.addButton('footnotes', { title : 'footnotes', cmd : 'footnotes', image : url + '/../img/fn-wysiwyg.png' <|fim▁hole|> ed.addCommand('footnotes', function() { jQuery.ajax({ type: 'POST', url: '/wp-admin/admin-ajax.php', data: { action: 'footnotes_getTags' }, success: function(data, textStatus, XMLHttpRequest){ var l_arr_Tags = JSON.parse(data); var return_text = l_arr_Tags['start'] + ed.selection.getContent() + l_arr_Tags['end']; ed.execCommand('insertHTML', true, return_text); }, error: function(MLHttpRequest, textStatus, errorThrown){ console.log("Error: " + errorThrown); } }); }); }, /** * Creates control instances based in the incomming name. This method is normally not * needed since the addButton method of the tinymce.Editor class is a more easy way of adding buttons * but you sometimes need to create more complex controls like listboxes, split buttons etc then this * method can be used to create those. * * @param {String} n Name of the control to create. * @param {tinymce.ControlManager} cm Control manager to use inorder to create new control. * @return {tinymce.ui.Control} New control instance or null if no control was created. */ createControl : function(n, cm) { return null; }, /** * Returns information about the plugin as a name/value array. * The current keys are longname, author, authorurl, infourl and version. * * @return {Object} Name/value array containing information about the plugin. */ getInfo : function() { return { longname : 'Inserts the Footnotes short code.', author : 'ManFisher Medien ManuFaktur', authorurl : 'http://http://manfisher.net/', infourl : 'http://wordpress.org/plugins/footnotes/', version : "1.5.0" }; } }); // Register plugin tinymce.PluginManager.add('footnotes', tinymce.plugins.Footnotes); })();<|fim▁end|>
});
<|file_name|>Issue_2149.cpp<|end_file_name|><|fim▁begin|><|fim▁hole|> { EnumValue1 = 1 << 1 }; }<|fim▁end|>
namespace { enum EnumValue
<|file_name|>functions.go<|end_file_name|><|fim▁begin|>// Package misc defines miscellaneous useful functions package misc import ( "reflect" "strconv" "strings" "time" ) // NVL is null value logic func NVL(str string, def string) string { if len(str) == 0 { return def } return str } // ZeroOrNil checks if the argument is zero or null func ZeroOrNil(obj interface{}) bool { value := reflect.ValueOf(obj) if !value.IsValid() { return true } if obj == nil { return true } if value.Kind() == reflect.Slice || value.Kind() == reflect.Array { return value.Len() == 0 } zero := reflect.Zero(reflect.TypeOf(obj)) if obj == zero.Interface() { return true } return false } // Atoi returns casted int func Atoi(candidate string) int {<|fim▁hole|> if i, err := strconv.Atoi(candidate); err == nil { result = i } } return result } // ParseUint16 returns casted uint16 func ParseUint16(candidate string) uint16 { var result uint16 if candidate != "" { if u, err := strconv.ParseUint(candidate, 10, 16); err == nil { result = uint16(u) } } return result } // ParseDuration returns casted time.Duration func ParseDuration(candidate string) time.Duration { var result time.Duration if candidate != "" { if d, err := time.ParseDuration(candidate); err == nil { result = d } } return result } // ParseBool returns casted bool func ParseBool(candidate string) bool { result := false if candidate != "" { if b, err := strconv.ParseBool(candidate); err == nil { result = b } } return result } // ParseCsvLine returns comma splitted strings func ParseCsvLine(data string) []string { splitted := strings.SplitN(data, ",", -1) parsed := make([]string, len(splitted)) for i, val := range splitted { parsed[i] = strings.TrimSpace(val) } return parsed } // TimeToJST changes time.Time to Tokyo time zone func TimeToJST(t time.Time) time.Time { jst, err := time.LoadLocation("Asia/Tokyo") if err != nil { return t } return t.In(jst) } // TimeToString changes time.Time to string func TimeToString(t time.Time) string { timeformat := "2006-01-02T15:04:05Z0700" return t.Format(timeformat) } // StringToTime changes string to time.Time func StringToTime(t string) time.Time { timeformat := "2006-01-02T15:04:05Z0700" candidate, _ := time.Parse(timeformat, t) return candidate }<|fim▁end|>
result := 0 if candidate != "" {
<|file_name|>index.js<|end_file_name|><|fim▁begin|>global.SETTINGS = require('./settings'); global.log = require('./lib/log'); let app = require('./lib/server'); app.listen(SETTINGS.PORT, () => {<|fim▁hole|>});<|fim▁end|>
log.info(`#server Listening on port ${SETTINGS.PORT}`);
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*- ################################################################################ # # # Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as published by # # the Free Software Foundation, either version 3 of the License, or # # (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # ################################################################################ <|fim▁hole|><|fim▁end|>
import clv_medicament_template_wkf
<|file_name|>common.rs<|end_file_name|><|fim▁begin|>use num::{Zero, One, Signed, Float}; use math::scalar::{BaseNum, BaseFloat}; use std::ops::{Add, Sub, Mul, Div, Index, Neg}; #[derive(Debug)] pub enum Dimension2 { X = 1, Y = 2, } #[derive(Debug)] pub enum Dimension3 { X = 1, Y = 2, Z = 3, } pub trait ComponentWise where Self: Index<usize>, Self: Index<<Self as ComponentWise>::Dimension> { type Scalar: BaseNum; type Dimension; fn min_component(self) -> Self::Scalar; fn max_component(self) -> Self::Scalar; fn max_dimension(self) -> Self::Dimension; fn min(self, other: Self) -> Self; fn max(self, other: Self) -> Self; } pub trait ComponentWiseSigned: ComponentWise where <Self as ComponentWise>::Scalar: BaseNum + Signed { fn abs(self) -> Self; } pub trait ComponentWiseFloat: ComponentWiseSigned where <Self as ComponentWise>::Scalar: BaseFloat { fn floor(self) -> Self; fn ceil(self) -> Self; } pub trait VectorSpace: Copy + Clone where Self: Zero, Self: Add<Self, Output = Self>, Self: Sub<Self, Output = Self>, Self: Mul<<Self as VectorSpace>::Scalar, Output = Self>, Self: Div<<Self as VectorSpace>::Scalar, Output = Self>, { type Scalar: BaseNum; } pub trait InnerProduct<RHS = Self>: VectorSpace { fn dot(self, other: RHS) -> Self::Scalar; } pub trait CrossProduct<RHS = Self>: VectorSpace { type CrossOutput: VectorSpace; fn cross(self, other: RHS) -> Self::CrossOutput; } pub trait InnerProductSpace: InnerProduct where <Self as VectorSpace>::Scalar: BaseFloat, { fn magnitude(self) -> Self::Scalar { Float::sqrt(self.magnitude_squared()) } fn magnitude_squared(self) -> Self::Scalar { self.dot(self) } fn normalize(self) -> Self { self * (Self::Scalar::one() / self.magnitude()) } } pub trait MetricSpace<RHS = Self>: Copy + Clone { type Scalar: BaseFloat; fn distance(self, other: RHS) -> Self::Scalar { Float::sqrt(self.distance_squared(other)) } fn distance_squared(self, other: RHS) -> Self::Scalar; } pub trait LinearInterpolate: Copy + Clone where Self: Add<Self, Output = Self>, Self: Mul<<Self as LinearInterpolate>::Scalar, Output = Self>, { type Scalar: BaseFloat; fn lerp(self, other: Self, t: Self::Scalar) -> Self { self * (Self::Scalar::one() - t) + other * t } } pub fn dot<T: InnerProduct, U: InnerProduct<T>>(v1: U, v2: T) -> U::Scalar { v1.dot(v2) } pub fn abs_dot<T: InnerProduct, U: InnerProduct<T>>(v1: U, v2: T) -> U::Scalar where U::Scalar: Signed { v1.dot(v2).abs() } pub fn cross<T: CrossProduct, U: CrossProduct<T>>(v1: U, v2: T) -> U::CrossOutput { v1.cross(v2) } pub fn min_component<T: ComponentWise>(v: T) -> T::Scalar {<|fim▁hole|>} pub fn max_component<T: ComponentWise>(v: T) -> T::Scalar { v.max_component() } pub fn distance<T: MetricSpace>(v1: T, v2: T) -> T::Scalar { v1.distance(v2) } pub fn distance_squared<T: MetricSpace>(v1: T, v2: T) -> T::Scalar { v1.distance_squared(v2) } pub fn component_wise_min<T: ComponentWise>(v1: T, v2: T) -> T { v1.min(v2) } pub fn component_wise_max<T: ComponentWise>(v1: T, v2: T) -> T { v1.max(v2) } pub fn face_forward<T: InnerProduct, U: InnerProduct<T> + Neg<Output = U>>(v1: U, v2: T) -> U { if dot(v1, v2) < U::Scalar::zero() { -v1 } else { v1 } }<|fim▁end|>
v.min_component()
<|file_name|>problem_04.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 Mitchell Kember. Subject to the MIT License. // Project Euler: Problem 4 // Largest palindrome product fn is_palindrome(n: int) -> bool { let size = (n as f64).log10().ceil() as int; let mut num = n; let mut digits = Vec::with_capacity((size/2) as uint); let mut i = 0; // Push the first half of the digits into the vector. while i < size/2 { digits.push(num % 10); num /= 10; i += 1; } i -= 1; // Skip the middle digit when there are an odd number of digits. if size % 2 != 0 { num /= 10; } // Verify that the second half of the digits matches the first. while i >= 0 { let d = num % 10; if d != *digits.get(i as uint) { return false; } num /= 10; i -= 1; } true } pub fn solve() -> int { let mut largest = 0;<|fim▁hole|> for a in range(100, 1000) { for b in range(100, 1000) { let c = a * b; if c > largest && is_palindrome(c) { largest = c; } } } largest }<|fim▁end|>
<|file_name|>test_parse_uri.py<|end_file_name|><|fim▁begin|>from yoyo.connections import parse_uri, unparse_uri def _test_parse_uri(connection_string, expected_uri_tuple): uri_tuple = parse_uri(connection_string) assert isinstance(uri_tuple, tuple) assert (uri_tuple == expected_uri_tuple) def _test_unparse_uri(uri_tuple, expected_connection_string): connection_string = unparse_uri(uri_tuple) assert isinstance(connection_string, str) assert (connection_string == expected_connection_string) def test_uri_without_db_params():<|fim▁hole|> _test_unparse_uri(uri_tuple, connection_string) def test_parse_uri_with_db_params(): connection_string = 'odbc://user:password@server:7777/database?DSN=dsn' uri_tuple = ('odbc', 'user', 'password', 'server', 7777, 'database', {'DSN': 'dsn'}) _test_parse_uri(connection_string, uri_tuple) _test_unparse_uri(uri_tuple, connection_string)<|fim▁end|>
connection_string = 'postgres://user:password@server:7777/database' uri_tuple = ('postgres', 'user', 'password', 'server', 7777, 'database', None) _test_parse_uri(connection_string, uri_tuple)
<|file_name|>c10t-tk.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # vi:ts=4 sw=4 et import re import os.path import subprocess from mainwindow import MainWindow def default_c10t_executable(): return "./c10t" def default_minecraft_world(): return "~/.minecraft/saves/World1" def default_output_image(): return os.path.abspath("out.png") def quote_arg_if_needed(arg): """Add quotes if the argument has 'weird' characters. This function is extremely simple, and it is not fool-proof. Improvements are quite welcome! WARNING: single-quotes inside the argument will break this!""" if re.search(r'''[^-a-zA-Z0-9_.,/+=]''', arg): return "'%s'" % (arg,) else: return arg def args_to_string(args): """Converts a list of arguments to one string that can be copy-pasted into a terminal and will work (hopefully).""" return " ".join(quote_arg_if_needed(arg) for arg in args) class Program(object): def __init__(self): self.win = MainWindow() self.args = [] # Files self.win.ui.exepath = default_c10t_executable() self.win.ui.world = default_minecraft_world() self.win.ui.output = default_output_image() self.update_ui_commandline() self.win.update_button_callback = self.update_ui_commandline self.win.run_button_callback = self.run_command self.win.load_button_callback = self.load_image def run_command(self):<|fim▁hole|> # Meanwhile... let's just block this program until c10t finishes... # Ugly, but better than nothing. proc.communicate() # TODO: Check process returncode self.load_image() def load_image(self): self.win.load_image_from_file(os.path.expanduser(self.win.ui.output)) def update_ui_commandline(self): self.build_commandline() self.win.ui.command = args_to_string(self.args) def build_commandline(self): ui = self.win.ui args = [os.path.expanduser(ui.exepath)] # Filtering if ui.topcheck : args.extend(["--top" , str(ui.top )]) if ui.bottomcheck: args.extend(["--bottom", str(ui.bottom)]) if ui.limitscheck: args.extend([ "--limits", ",".join(str(x) for x in ( ui.limitsnorth, ui.limitssouth, ui.limitseast, ui.limitswest, )) ]) if ui.cavemodecheck: args.append("--cave-mode") if ui.excludecheck: for block in re.split("[ \t,;/]+", ui.exclude): args.extend(["-e", str(block)]) if ui.includecheck: args.append("--hide-all") for block in re.split("[ \t,;/]+", ui.include): args.extend(["-i", str(block)]) # Rendering if ui.obliquecheck : args.append("--oblique") if ui.obliqueanglecheck: args.append("--oblique-angle") if ui.isometriccheck : args.append("--isometric") if ui.nightcheck : args.append("--night") if ui.heightmapcheck : args.append("--heightmap") if ui.rotate : args.extend(["-r", str(ui.rotate)]) if int(ui.threads) != 0: args.extend(["--threads", str(ui.threads)]) # Text and fonts args.extend(["--ttf-size" , str(ui.ttfsize)]) args.extend(["--ttf-color", str(ui.ttfcolor)]) if ui.showplayerscheck: args.append("--show-players") if ui.showsignscheck : args.append("--show-signs") if ui.showcoordscheck : args.append("--show-coordinates") if ui.playercolorcheck: args.extend(["--player-color", str(ui.playercolor)]) if ui.signcolorcheck : args.extend(["--sign-color", str(ui.signcolor)]) if ui.coordcolorcheck : args.extend(["--coordinate-color", str(ui.coordcolor)]) # Adding the "Files" section to the end for readability reasons args.extend([ "-w", os.path.expanduser(ui.world), "-o", os.path.expanduser(ui.output), ]) self.args = args def main(self): self.win.mainloop() if __name__ == "__main__": p = Program() p.main()<|fim▁end|>
self.update_ui_commandline() proc = subprocess.Popen(self.args, shell=False) # TODO: Add a progress window/progress bar
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import json try: from urllib.parse import urljoin from urllib.parse import urlencode except ImportError: from urlparse import urljoin from urllib import urlencode import facepy from django.conf import settings from django.utils import timezone from django.conf import settings from django.core import signing from django.core.urlresolvers import reverse from django.utils import encoding from . facepy_wrapper import utils GRAPH_MAX_TRIES = 3 FACEBOOK_TIMEOUT = getattr(settings, 'FACEBOOK_AUTH_BACKEND_FACEBOOK_TIMEOUT', timezone.timedelta(seconds=20).total_seconds()) FACEBOOK_API_VERSION = getattr(settings, 'FACEBOOK_API_VERSION', '2.1') class InvalidNextUrl(Exception): pass class Next(object): salt = 'facebook_auth.urls.Next' def encode(self, data): data = self.dumps(data) return urlencode({'next': data}) def decode(self, data): try: return self.loads(data) except signing.BadSignature: raise InvalidNextUrl() def dumps(self, obj): data = json.dumps( obj, separators=(',', ':'), sort_keys=True).encode('utf-8') base64d = signing.b64_encode(data) return signing.Signer(salt=self.salt).sign(base64d) def loads(self, s): base64d = encoding.force_bytes( signing.Signer(salt=self.salt).unsign(s)) data = signing.b64_decode(base64d) return json.loads(data.decode('utf-8')) <|fim▁hole|> def redirect_uri(next, close): return urljoin( settings.FACEBOOK_CANVAS_URL, reverse('facebook-auth-handler') + "?" + Next().encode({'next': next, 'close': close}) ) def get_from_graph_api(graphAPI, query): for i in range(GRAPH_MAX_TRIES): try: return graphAPI.get(query) except facepy.FacepyError as e: if i == GRAPH_MAX_TRIES - 1 or getattr(e, 'code', None) != 1: raise def get_application_graph(version=None): version = version or FACEBOOK_API_VERSION token = (facepy.utils .get_application_access_token(settings.FACEBOOK_APP_ID, settings.FACEBOOK_APP_SECRET, api_version=version)) return get_graph(token) def get_graph(*args, **kwargs): version = FACEBOOK_API_VERSION return utils.get_graph(*args, version=version, timeout=FACEBOOK_TIMEOUT, **kwargs) def get_long_lived_access_token(access_token): return utils.get_long_lived_access_token( access_token=access_token, client_id=settings.FACEBOOK_APP_ID, client_secret=settings.FACEBOOK_APP_SECRET, ) def get_access_token(code=None, redirect_uri=None): return utils.get_access_token( code=code, redirect_uri=redirect_uri, client_id=settings.FACEBOOK_APP_ID, client_secret=settings.FACEBOOK_APP_SECRET, timeout=FACEBOOK_TIMEOUT, )<|fim▁end|>
<|file_name|>ParserUI.js<|end_file_name|><|fim▁begin|>/** * Created by zacharymartin on 4/20/15. */ ParserUI.PARSING = 0; ParserUI.PLATES = 1; ParserUI.FEATURES = 2; ParserUI.EXPERIMENT = 3; ParserUI.MANUAL_ENTRY = "byManualEntry"; ParserUI.PLATE_LEVEL_FEATURE = "byFeature"; ParserUI.FEATURE_LIST_PLACEHOLDER = "--- features ---"; ParserUI.LABEL_LIST_PLACEHOLDER = "--- labels ---"; function ParserUI(parsingController){ this.parsingController = parsingController; this.parseOnlyModeOn = false; var _self = this; // construct a flash messenger var flashMessenger = new FlashMessenger("userMsgPanel"); // references to all UI elements by Tab // Parsing var parsingNameElement = document.getElementById("parsingName"); var machineNameElement = document.getElementById("machineName"); var wellRowElement = document.getElementById("plateDimensions"); var wellColumnElement = document.getElementById("assayType"); var parsingDescriptionElement = document.getElementById("parsingDescription"); var selectedFileElement = document.getElementById("selectedFile"); var filesInput = document.getElementById("files"); var chooseFileButton = document.getElementById("getFile"); var delimiterList = document.getElementById("delimiterList"); var delimiterOptions = []; // an array of all the options elements in the delimiter // list // ~~~~~~~~~~~~~~~~~~~~~ Plate ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ var firstPlateCellRangeElement = document.getElementById("firstPlateCellRange"); var applyFirstPlateButton = document.getElementById("applyFirstPlate"); // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Features ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ var newFeatureButton = document.getElementById("newFeature"); var addFeatureButton = document.getElementById("saveFeature"); var deleteFeatureButton = document.getElementById("deleteFeature"); var applyFeaturesButton = document.getElementById("applyFeatures"); var featureCellRangeElement = document.getElementById("featureCellRange"); var featureCategoryElement = document.getElementById("featureCategory"); var featureLevelRadioButtonSet = document.getElementById("featureLevel");<|fim▁hole|> var featureListElement = document.getElementById("featureList"); var featureOptions = []; // an array of all the options elements in the feature list var labelListElement = document.getElementById("labelList"); var labelOptions = []; // an array of all the options elements in the label list // ~~~~~~~~~~~~~~~~~~~~~~~~~ Experiment ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ var importAndSaveDataButton = document.getElementById("sendImportDataToServer"); var downloadFileImportButton = document.getElementById("downloadFileImport"); var downloadIntergroupButton = document.getElementById("downloadIntergroupFile"); var byPlateLevelFeatureRadioButton = document.getElementById("byFeature"); var byManualEntryRadioButton = document.getElementById("byManualEntry"); var plateLevelFeatureListElement = document.getElementById("plateLevelFeatureList"); var setPlateIdButton = document.getElementById("setPlateID"); var plateIdentifierList = document.getElementById("plateList"); var plateImportList = document.getElementById("plateImportList"); var experimentSelectizeElement; // an object representing the options in the experiment selectize element. This object // has a property for each option in the selectize element. The property name is the // name of the experiment and the displayed text for the option. The property value // id the experiment id and is the value of the selectize option var experimentSelectizeOptions = {}; var plateIdentifierSelectizeElement; var plateIdentifierSelectizeOptionValues = []; var byFeatureMethodDiv = document.getElementById("byFeatureMethod"); var byManualMethodDiv = document.getElementById("byManualEntryMethod"); //~~~~~~~~~~~~~~~~~~~~~~~~ General Elements ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ var $tabsElement = $("#tabs"); var parsingIDElement = document.getElementById("parsingId"); var saveConfigButton = document.getElementById("saveConfigToServer"); var saveAsConfigButton = document.getElementById("saveAsConfigToServer"); var selectedFiles; // ------------------- parsing tab getters and setters ------------------------------ this.getParsingName = function(){ if (_self.parseOnlyModeOn){ return parsingNameElement.innerHTML; } else { return parsingNameElement.value; } }; this.getPlateDimensions = function(){ if (_self.parseOnlyModeOn){ return wellRowElement.innerHTML; } else { return wellRowElement.value; } }; this.setPlateDimensions = function(plateDimensions){ if (_self.parseOnlyModeOn){ wellRowElement.innerHTML = plateDimensions; } else { wellRowElement.value = plateDimensions; } }; this.getAssayType = function(){ if (_self.parseOnlyModeOn){ return wellColumnElement.innerHTML; } else { return wellColumnElement.value; } }; this.setAssayType = function(assayType){ if (_self.parseOnlyModeOn){ wellColumnElement.innerHTML = assayType; } else { wellColumnElement.value = assayType; } }; this.setParsingName = function(parsingName){ if (_self.parseOnlyModeOn){ parsingNameElement.innerHTML = parsingName; } else { parsingNameElement.value = parsingName; } }; this.getMachineName = function(){ if (_self.parseOnlyModeOn){ return machineNameElement.innerHTML; } else { return machineNameElement.value; } }; this.setMachineName = function(machineName){ if (_self.parseOnlyModeOn){ machineNameElement.innerHTML = machineName; } else { machineNameElement.value = machineName; } }; this.getParsingDescription = function(){ if (_self.parseOnlyModeOn){ return parsingDescriptionElement.innerHTML; } else { return parsingDescriptionElement.value; } }; this.setParsingDescription = function(parsingDescription){ if (_self.parseOnlyModeOn){ parsingDescriptionElement.innerHTML = parsingDescription; } else { parsingDescriptionElement.value = parsingDescription; } }; this.getSelectedFileName = function(){ return selectedFileElement.innerHTML; }; this.setSelectedFileName = function(selectedFileName){ selectedFileElement.innerHTML = selectedFileName; }; this.getSelectedDelimiter = function(){ if (_self.parseOnlyModeOn){ return delimiterList.innerHTML; } else { return delimiterList.value; } }; this.setDelimiter = function(delimiterName){ if (delimiterName !== null){ if (_self.parseOnlyModeOn){ delimiterList.innerHTML = delimiterName; } else { delimiterList.value = delimiterName; } } else { if (_self.parseOnlyModeOn){ delimiterList.innerHTML = ""; } else { delimiterList.scrollTop = 0; } // deselect all options for (var i=0; i<delimiterOptions.length; i++){ delimiterOptions[i].selected = false; } } }; this.loadDelimiterList = function(delimiterNameArray){ // load the delimiterOptions array delimiterOptions = []; for (var i=0; i<delimiterNameArray.length; i++){ var optionName = delimiterNameArray[i]; var option = document.createElement("option"); delimiterOptions.push(option); option.setAttribute("value", optionName); option.innerHTML = optionName; } if (!_self.parseOnlyModeOn){ delimiterList.innerHTML = ""; // load the delimiter list for (var j=0; j<delimiterOptions.length; j++){ delimiterList.appendChild(delimiterOptions[j]) } } _self.setDelimiter(null); }; this.switchDelimiterListToSpan = function(){ var span = document.createElement("span"); span.innerHTML = _self.getSelectedDelimiter(); $(delimiterList).replaceWith(span); delimiterList = span; }; this.switchDelimiterListToSelect = function(){ var select = document.createElement("select"); for (var j=0; j<delimiterOptions.length; j++){ select.appendChild(delimiterOptions[j]); } select.value = _self.getSelectedDelimiter(); $(delimiterList).replaceWith(select); delimiterList = select; addEvent(select, "change", _self.handleDelimiterChange); }; this.getListedDelimiters = function(){ var delimiters = []; for (var i=0; i<delimiterOptions.length; i++){ delimiters.push(delimiterOptions[i].value); } return delimiters; }; // --------------------- parsing tab event handlers -------------------------------- this.handleDelimiterChange = function(event){ var selectedDelimiter = delimiterList.value; try{ _self.parsingController.changeDelimiter(selectedDelimiter); } catch (error){ _self.displayError(error); } }; this.handleFileSelect = function(event) { if (event.target && event.target.files){ // file input case selectedFiles = event.target.files; // FileList object } else if (event.dataTransfer && event.dataTransfer.files) { // drag and drop case selectedFiles = event.dataTransfer.files; } try{ _self.parsingController.loadFiles(selectedFiles); // if the files loaded without errors, display their names on the UI // TODO - display multiple file names _self.setSelectedFileName(selectedFiles[0].name); } catch (error){ _self.displayError(error); } }; // ---------------------- Plate tab getters and setters ------------------------------ this.getFirstPlateCellRange = function(){ var cellRange; try { if (_self.parseOnlyModeOn){ cellRange = ParserUI.convertStringToCellRange( firstPlateCellRangeElement.innerHTML); } else { cellRange = ParserUI.convertStringToCellRange(firstPlateCellRangeElement.value); } } catch (error) { _self.displayError(error); } return cellRange; }; this.setFirstPlateCellRange = function(firstPlateRange){ if (_self.parseOnlyModeOn){ firstPlateCellRangeElement.innerHTML = firstPlateRange.toString(); } else { firstPlateCellRangeElement.value = firstPlateRange.toString(); } }; // ---------------------- Plate tab event handlers ----------------------------------- this.handleFirstPlateCellRangeChange = function(){ var selectedCellRange = _self.getFirstPlateCellRange(); try { _self.parsingController.selectCells(selectedCellRange); } catch (error){ _self.displayError(error); } }; this.handleApplyFirstPlate = function(){ var selectedCellRange = _self.getFirstPlateCellRange(); try { _self.parsingController.defineFirstPlate(selectedCellRange); } catch (error){ _self.displayError(error); } }; // ----------------------- Features tab getters and setters -------------------------- this.getFeatureCellRange = function(){ var cellRange; try { if (_self.parseOnlyModeOn){ cellRange = ParserUI.convertStringToCellRange( featureCellRangeElement.innerHTML); } else { cellRange = ParserUI.convertStringToCellRange( featureCellRangeElement.value); } } catch (error){ _self.displayError(error); } return cellRange; }; this.setFeatureCellRange = function(featureRange){ if (_self.parseOnlyModeOn){ featureCellRangeElement.innerHTML = featureRange.toString(); } else { featureCellRangeElement.value = featureRange.toString(); } }; this.getFeatureLevel = function(){ var level = null; if (wellLevelRadioButton.checked){ level = WELL_LEVEL; } else if (document.getElementById("plateLevel").checked){ level = PLATE_LEVEL; } else if (document.getElementById("experimentLevel").checked){ level = EXPERIMENT_LEVEL; } return level; }; this.setFeatureLevel = function(level){ if (level === WELL_LEVEL){ wellLevelRadioButton.checked = true; plateLevelRadioButton.checked = false; experimentLevelRadioButton.checked = false; } else if (level === PLATE_LEVEL) { wellLevelRadioButton.checked = false; plateLevelRadioButton.checked = true; experimentLevelRadioButton.checked = false; } else if (level === EXPERIMENT_LEVEL){ wellLevelRadioButton.checked = false; plateLevelRadioButton.checked = false; experimentLevelRadioButton.checked = true; } else { wellLevelRadioButton.checked = false; plateLevelRadioButton.checked = false; experimentLevelRadioButton.checked = false; } }; this.getFeatureCategory = function(){ if (_self.parseOnlyModeOn){ return featureCategoryElement.innerHTML; } else { return featureCategoryElement.value; } }; this.setFeatureCategory = function(categoryName){ if (_self.parseOnlyModeOn){ featureCategoryElement.innerHTML = categoryName; } else { featureCategoryElement.value = categoryName; } }; this.getSelectedFeature = function(){ return featureListElement.value; }; this.loadFeatureList = function(featureNameArray){ featureListElement.innerHTML = ""; featureListElement.scrollTop = 0; featureOptions = []; if (featureNameArray && featureNameArray.length){ loadSelectElement(featureListElement, featureNameArray, featureOptions); } else { // load the label selector with a place holder option = document.createElement("option"); option.setAttribute("value", ParserUI.FEATURE_LIST_PLACEHOLDER); option.innerHTML = ParserUI.FEATURE_LIST_PLACEHOLDER; featureOptions.push(option); featureListElement.appendChild(option); } }; this.setFeature = function(featureName){ if (featureName !== null){ featureListElement.value = featureName; } else { // deselect all options var featureOptionsArray = featureListElement.options; for (var i=0; i<featureOptionsArray.length; i++){ featureOptionsArray[i].selected = false; } } }; this.getListedFeatures = function(){ var listedFeatureNames = []; for (var i=0; i<featureOptions.length; i++){ listedFeatureNames.push(featureOptions[i].value); } return listedFeatureNames; }; this.loadLabelList = function(labelDescriptors){ // clear the label list selector labelListElement.innerHTML = ""; labelListElement.scrollTop = 0; labelOptions = []; var option; if (labelDescriptors){ // load the label selector with the label descriptors for (var i=0; i<labelDescriptors.length; i++){ var value = labelDescriptors[i].cell; var descriptor = labelDescriptors[i].descriptor; option = document.createElement("option"); labelOptions.push(option); option.setAttribute("value", value); option.innerHTML = descriptor; labelListElement.appendChild(option); } } else { // load the label selector with a place holder option = document.createElement("option"); option.setAttribute("value", ParserUI.FEATURE_LIST_PLACEHOLDER); option.innerHTML = ParserUI.LABEL_LIST_PLACEHOLDER; labelOptions.push(option); labelListElement.appendChild(option); } }; this.setLabel = function(labelCell){ if (labelCell !== null){ labelListElement.value = labelCell; } else { // deselect all options var labelOptionsArray = labelListElement.options; for (var i=0; i<labelOptionsArray.length; i++){ labelOptionsArray[i].selected = false; } } }; this.getListedLabels = function(){ var listedLabels = []; for (var i=0; i<labelOptions.length; i++){ listedLabels.push(labelOptions[i].innerHTML); } return listedLabels; }; this.getListedLabelCells = function(){ var listedLabelCells = []; for (var i=0; i<labelOptions.length; i++){ listedLabelCellss.push(labelOptions[i].value); } return listedLabelCells; }; this.clearFeatureInfo = function(){ _self.setFeatureCellRange(""); _self.setFeatureLevel(null); _self.setFeatureCategory(""); _self.setFeature(null); _self.loadLabelList(null); }; this.enableDeleteFeatureButton = function(){ deleteFeatureButton.disabled = false; }; this.disableDeleteFeatureButton = function(){ deleteFeatureButton.disabled = true; }; // ------------------- Features tab event handlers ---------------------------------- this.handleFeatureCellRangeChange = function(){ var selectedCellRange = _self.getFeatureCellRange(); try { _self.parsingController.selectCells(selectedCellRange); } catch (error){ _self.displayError(error); } }; this.handleFeatureSelection = function(event){ var selectedFeature = _self.getSelectedFeature(); if (selectedFeature !== ParserUI.FEATURE_LIST_PLACEHOLDER){ try { _self.parsingController.displayFeature(selectedFeature); _self.enableDeleteFeatureButton(); } catch(error){ _self.displayError(error); } } else { event.preventDefault(); } }; this.handleNewFeature = function(){ try { _self.parsingController.prepareUIForNewFeature(); } catch (error){ _self.displayError(error); } }; this.handleAddFeature = function(){ try{ _self.parsingController.defineFeature(); } catch(error){ _self.displayError(error); } }; this.handleDeleteFeature = function(){ var nameOfFeatureToRemove = _self.getSelectedFeature(); try{ _self.parsingController.removeFeature(nameOfFeatureToRemove); } catch(error){ _self.displayError(error); } }; this.handleApplyFeatures = function(){ try { _self.parsingController.applyAllFeaturesToGrid(); } catch(error){ _self.displayError(error); } }; // ------------------------- Experiment tab getters and setters ---------------------- this.getPlateIDSelectMethod = function(){ var method = null; if (byPlateLevelFeatureRadioButton.checked){ method = ParserUI.PLATE_LEVEL_FEATURE; } else if (byManualEntryRadioButton.checked){ method = ParserUI.MANUAL_ENTRY; } return method; }; this.setPlateIDSelectMethod = function(method){ if (method === ParserUI.MANUAL_ENTRY){ byManualEntryRadioButton.checked = true; byPlateLevelFeatureRadioButton.checked = false; } else if (method === ParserUI.PLATE_LEVEL_FEATURE){ byManualEntryRadioButton.checked = false; byPlateLevelFeatureRadioButton.checked = true; } else { byManualEntryRadioButton.checked = false; byPlateLevelFeatureRadioButton.checked = false; } }; this.loadPlateWithIDList = function(plateIDArray){ // clear the select element plateIdentifierList.innerHTML = ""; plateIdentifierList.scrollTop = 0; plateImportList.innerHTML = ""; // load the plate ID select element for (var i=0; i<plateIDArray.length; i++){ var optionContents = "plate " + (i+1) + ": " +plateIDArray[i]; var importOptionContents = "plate "+ (i+1); var option = document.createElement("option"); var importOption = document.createElement("option"); option.setAttribute("value", i.toString()); importOption.setAttribute("value", i.toString()); option.innerHTML = optionContents; importOption.innerHTML = importOptionContents; plateIdentifierList.appendChild(option); plateImportList.appendChild(importOption); } $('#plateImportList').multiSelect({ selectableHeader: "<div class='custom-header'>Plates to Import</div>", selectionHeader: "<div class='custom-header'>Plates to Skip</div>", afterSelect: this.plateImportListSelection, afterDeselect: this.plateImportListSelection }); this.plateImportListSelection(); }; this.plateImportListSelection = function(values) { _self.parsingController.platesToImport.length = 0; var options = plateImportList.options, cnt = 0, bImp; for (var idx = 0; idx < options.length; ++idx) { bImp = options[idx].selected; _self.parsingController.platesToImport.push(!bImp); if (bImp) cnt++; } importAndSaveDataButton.disabled = (cnt == options.length) ? true : false; }; this.getListedPlatesWithIDs = function(){ var result = []; var options = plateIdentifierList.options; for (var i=0; i<options.length; i++){ var opitonContents = options[i].innerHTML; var splitContents = option.split(" "); var plateID = splitContents[splitContents.length - 1]; result.push(plateID); } return result; }; this.getSelectedPlateWithIDIndex = function(){ var selectedIndex = plateIdentifierList.value; var parsedSelectedIndex = parseInt(selectedIndex); if (selectedIndex === "" || isNaN(parsedSelectedIndex)){ return null; } else { return parsedSelectedIndex; } }; this.setSelectedPlateWithIDIndex = function(plateIndex){ var plateWithIDOptionsArray = plateIdentifierList.options; if (plateIndex !== null && plateWithIDOptionsArray.length !== 0){ plateIdentifierList.value = plateIndex; } else { plateIdentifierList.scrollTop = 0; // deselect all options for (var i=0; i<plateWithIDOptionsArray.length; i++){ plateWithIDOptionsArray[i].selected = false; } } }; this.loadPlateLevelFeatureList = function(plateLevelFeatureNameArray){ // clear the select element and scroll back to the top plateLevelFeatureListElement.innerHTML = ""; plateLevelFeatureListElement.scrollTop = 0; // load the select element for (var i=0; i<plateLevelFeatureNameArray.length; i++){ var optionName = plateLevelFeatureNameArray[i]; var option = document.createElement("option"); option.setAttribute("value", optionName); option.innerHTML = optionName; plateLevelFeatureListElement.appendChild(option); } }; this.getListedPlateLevelFeatures = function(){ var result = []; var options = plateLevelFeatureListElement.options; for (var i=0; i<options.length; i++){ var opitonContents = options[i].innerHTML; result.push(optionContents); } return result; }; this.getSelectedPlateLevelFeature = function(){ var selectedPlateLevelFeature = plateLevelFeatureListElement.value; if (!selectedPlateLevelFeature){ return null; } else { return selectedPlateLevelFeature; } }; this.setSelectedPlateLevelFeature = function(featureName){ if (featureName !== null){ plateLevelFeatureListElement.value = featureName; } else { plateIdentifierList.scrollTop = 0; var plateFeatureOptions = plateLevelFeatureListElement.options; // deselect all options for (var i=0; i<plateFeatureOptions.length; i++){ plateFeatureOptions[i].selected = false; } } }; /** * * @param experimentNameIDObjectArray - an array of objects, one for each experiment * in which the experiment name is listed under the * property "name" and the experiment id is listed * under the property "id" */ this.loadExperimentSelectize = function(experimentNameIDObjectArray){ experimentSelectizeElement.clearOptions(); experimentSelectizeOptions = {}; for (var i=0; i<experimentNameIDObjectArray.length; i++){ var name = experimentNameIDObjectArray[i].name; var id = experimentNameIDObjectArray[i].id; experimentSelectizeElement.addOption(experimentNameIDObjectArray[i]); experimentSelectizeOptions[name] = id; } experimentSelectizeElement.refreshOptions(true); }; this.getExperimentOptionNames = function(){ var result = []; for (var experimentName in experimentSelectizeOptions){ result.push(experimentName); } return result; }; this.getExperimentOptionIDs = function(){ var result = []; for (var experimentName in experimentSelectizeOptions){ var experimentID = experimentSelectizeOptions[experimentName]; result.push(experimentID); } return result; }; this.getSelectedExperimentName = function(){ var selectedExperimentID = this.getSelectedExperimentID(); return experimentSelectizeElement.getOption(selectedExperimentID).html(); }; this.getSelectedExperimentID = function(){ return experimentSelectizeElement.getValue(); }; this.setSelectedExperimentByID = function(experimentID){ experimentSelectizeElement.setValue(experimentID) }; this.setSelectedExperimentByName = function(experimentName){ var experimentID = experimentSelectizeOptions[experimentName]; return experimentSelectizeElement.setValue(experimentID); }; /** * * @param plateIDArray - an array of plate identifiers to load into the plate id * selectize element */ this.loadPlateIDSelectize = function(plateIDArray){ plateIdentifierSelectizeElement.clearOptions(); plateIdentifierSelectizeOptionValues = []; for (var i=0; i<plateIDArray.length; i++){ plateIdentifierSelectizeElement.addOption({ plateID: plateIDArray[i] }); plateIdentifierSelectizeOptionValues.push(plateIDArray[i]); } plateIdentifierSelectizeElement.refreshOptions(true); }; this.getPlateIDSelectizeOptionValues = function(){ var result = []; for (var i=0; i<plateIdentifierSelectizeOptionValues.length; i++){ result.push(plateIdentifierSelectizeOptionValues[i]); } return result; }; this.getSelectedPlateIdentifier = function(){ return plateIdentifierSelectizeElement.getValue(); }; this.setSelectedPlateIdentifier = function(plateIdentifier){ if (plateIdentifier === null){ plateIdentifierSelectizeElement.clear(true); } else { plateIdentifierSelectizeElement.setValue(plateIdentifier); } }; this.incrementPlateAndIdentifierSelections = function(){ // move the selected plate with id selection down by 1 var selectedPlateIndex = _self.getSelectedPlateWithIDIndex(); var numPlates = plateIdentifierList.options.length; var newSelectedPlateIndex = (selectedPlateIndex + 1) % numPlates; _self.setSelectedPlateWithIDIndex(newSelectedPlateIndex); // move the selected plate identifier down by 1 var selectedPlateIdentifier = _self.getSelectedPlateIdentifier(); for(var i=0; i<plateIdentifierSelectizeOptionValues.length; i++){ if (plateIdentifierSelectizeOptionValues[i] === selectedPlateIdentifier){ var newSelectedPlateIdentifierIndex = (i+1)%plateIdentifierSelectizeOptionValues.length; var newSelectedPlateIdentifier = plateIdentifierSelectizeOptionValues[newSelectedPlateIdentifierIndex]; _self.setSelectedPlateIdentifier(newSelectedPlateIdentifier); } } }; // ------------------ Experiment tab event handlers ---------------------------------- this.handleDataImport = function(){ try { _self.parsingController.saveImportDataToServer(); } catch (error){ _self.displayError(error); } }; this.handleDownloadFileImport = function(){ try { _self.parsingController.downloadImportData(); } catch (error){ _self.displayError(error); } }; this.handleIntergroupDownload = function(){ try { _self.parsingController.downloadIntergroupData() ; } catch (error) { _self.displayError(error); } }; this.handleByPlateLevelFeatureMethod = function(){ try { _self.parsingController.assignPlateIDsByFeature(); } catch(error){ _self.displayError(error); } byFeatureMethodDiv.style.display = "block"; byManualMethodDiv.style.display = "none"; }; this.handleByManualMethod = function(){ try { _self.parsingController.assignPlateIDsByManualMethod(); } catch(error){ _self.displayError(error); } byFeatureMethodDiv.style.display = "none"; byManualMethodDiv.style.display = "block"; }; this.handlePlateLevelFeatureSelection = function(){ var selectedFeature = _self.getSelectedPlateLevelFeature(); try{ _self.parsingController.setPlateLevelFeatureAsPlateID(selectedFeature); } catch (error){ _self.displayError(error); } }; this.handlePlateIdSetButtonClick = function(){ var selectedIdentifier = _self.getSelectedPlateIdentifier(); var selectedPlateIndex = _self.getSelectedPlateWithIDIndex(); try{ _self.parsingController.setPlateID(selectedPlateIndex, selectedIdentifier); } catch (error) { _self.displayError(error); } }; this.handlePlateWithIdListSelection = function(){ var selectedPlateIndex = _self.getSelectedPlateWithIDIndex(); try{ _self.parsingController.showPlate(selectedPlateIndex); } catch(error){ _self.displayError(error); } }; this.handleExperimentSelection = function(){ var selectedExperimentID = _self.getSelectedExperimentID(); try{ _self.parsingController.fillOutExperimentPlateIDs(selectedExperimentID); } catch(error){ _self.displayError(error); } }; this.handlePlateIdSelection = function(selectedPlateID){ var selectedFeatureName = _self.getSelectedPlateLevelFeature(); try{ //_self.parsingController.showPlateLevelFeature(selectedFeatureName); } catch(error){ _self.displayError(error); } }; // ---------------------- tabs setters and getters ----------------------------------- this.getActiveTab = function(){ return $tabsElement.tabs( "option", "active" ); }; this.setActiveTab = function(tab){ $tabsElement.tabs("option", "active", tab); }; // ---------------------- tabs event handler ----------------------------------------- this.handleClickCell = function(event, ui){ if (e.shiftKey) { alert("shift+click") } if (e.ctrlKey) { alert("control+click") } var newTab = ui.keyCode var oldTab = ui.oldTab.index(); try { _self.parsingController.changeStage(newTab, oldTab); } catch (error) { event.preventDefault(); _self.displayError(error); } }; this.handleTabChange = function(event, ui){ var newTab = ui.newTab.index(); var oldTab = ui.oldTab.index(); try { _self.parsingController.changeStage(newTab, oldTab); } catch (error) { event.preventDefault(); _self.displayError(error); } }; // ------------------- General getters and setters ---------------------------------- this.getParsingID = function(){ return parsingIDElement.value; }; this.setParsingID = function(id){ parsingIDElement.value = id; }; this.displayError = function(error){ console.log(error); this.displayErrorMessage(error.getMessage()); }; this.displayErrorMessage = function(message){ flashMessenger.showUserMsg(FlashMessenger.ERROR, message); }; this.displayMessage = function(message){ flashMessenger.showUserMsg(FlashMessenger.HIGHLIGHT, message) }; this.enableSaveButton = function(){ saveConfigButton.disabled = false; }; this.disableSaveButton = function(){ saveConfigButton.disabled = true; }; this.enableSaveAsButton = function(){ saveAsConfigButton.disabled = false; }; this.disableSaveAsButton = function(){ saveAsConfigButton.disabled = true; }; // -------------------- General event handlers --------------------------------------- this.handleSaveConfig = function(){ try { _self.parsingController.saveParsingConfigToServer(); } catch (error){ _self.displayError(error); } }; this.handleSaveAsConfig = function(){ if (_self.parseOnlyModeOn){ _self.switchOutOfParseOnlyMode(); } else { try { _self.parsingController.saveAsParsingConfigToServer(); } catch (error){ _self.displayError(error); } } }; this.switchSaveAsButtonToModifyAsNewParsingConfig = function(){ saveAsConfigButton.innerHTML = "Modify as new Parsing Configuration"; }; this.switchSaveAsButtonBackToSaveAs = function(){ saveAsConfigButton.innerHTML = "Save As"; }; this.switchToParseOnlyMode = function(){ parsingNameElement = switchTextInputToSpan(parsingNameElement); machineNameElement = switchTextInputToSpan(machineNameElement); parsingDescriptionElement = switchTextAreaToP(parsingDescriptionElement); _self.switchDelimiterListToSpan(); firstPlateCellRangeElement = switchTextInputToSpan(firstPlateCellRangeElement); featureCellRangeElement = switchTextInputToSpan(featureCellRangeElement); featureCategoryElement = switchTextInputToSpan(featureCategoryElement); applyFirstPlateButton.style.display = "none"; newFeatureButton.style.display = "none"; addFeatureButton.style.display = "none"; deleteFeatureButton.style.display = "none"; _self.switchSaveAsButtonToModifyAsNewParsingConfig(); _self.parseOnlyModeOn = true; }; this.switchOutOfParseOnlyMode = function(){ parsingNameElement = switchSpanToTextInput(parsingNameElement); machineNameElement = switchSpanToTextInput(machineNameElement); parsingDescriptionElement = switchPToTextArea(parsingDescriptionElement); _self.switchDelimiterListToSelect(); firstPlateCellRangeElement = switchSpanToTextInput(firstPlateCellRangeElement); featureCellRangeElement = switchSpanToTextInput(featureCellRangeElement); featureCategoryElement = switchSpanToTextInput(featureCategoryElement); applyFirstPlateButton.style.display = "inline"; newFeatureButton.style.display = "inline"; addFeatureButton.style.display = "inline"; deleteFeatureButton.style.display = "inline"; _self.switchSaveAsButtonBackToSaveAs(); _self.parseOnlyModeOn = false; }; function switchTextInputToSpan(element){ var span = document.createElement("span"); span.innerHTML = element.value; span.id = element.id; $(element).replaceWith(span); return span; } function switchSpanToTextInput(element){ var textInput = document.createElement("input"); textInput.value = element.innerHTML; textInput.id = element.id; $(element).replaceWith(textInput); return textInput; } function switchTextAreaToP(element){ var p = document.createElement("p"); p.innerHTML = element.value; p.id = element.id; $(element).replaceWith(p); return p; } function switchPToTextArea(element){ var textArea = document.createElement("textarea"); textArea.value = element.innerHTML; textArea.id = element.id; $(element).replaceWith(textArea); return textArea; } function loadSelectElement(selectElement, optionNamesArray, optionsArray){ // clear the select element and scroll back to the top selectElement.innerHTML = ""; selectElement.scrollTop = 0; optionsArray = []; // load the select element for (var i=0; i<optionNamesArray.length; i++){ var optionName = optionNamesArray[i]; var option = document.createElement("option"); optionsArray.push(option); option.setAttribute("value", optionName); option.innerHTML = optionName; selectElement.appendChild(option); } } /** * getTargetElement - This function get a reference to the HTML element that * triggered an event * @param event - the event for which we wish the element that triggered it * @returns - the HTML element that triggered the event. */ function getTargetElement(event){ 'use strict'; var target; // make sure we have the event, depending on different browser // capabilities event = event || window.event; // get a reference to the element that triggered the event, depending on // different browser capabilities target = event.target || event.srcElement; return target; } // end of function getTargetElement /** * addEvent - This function adds an event handler to an html element in * a way that covers many browser types. * @param elementId - the string id of the element to attach the handler to * or a reference to the element itself. * @param eventType - a string representation of the event to be handled * without the "on" prefix * @param handlerFunction - the function to handle the event */ function addEvent(elementId, eventType, handlerFunction) { 'use strict'; var element; if (typeof(elementId) === "string"){ element = document.getElementById(elementId); } else { element = elementId; } if (element.addEventListener) { element.addEventListener(eventType, handlerFunction, false); } else if (window.attachEvent) { element.attachEvent("on" + eventType, handlerFunction); } } // end of function addEvent function init (){ // +++++++++++++++++++++ parsing tab events +++++++++++++++++++++++++++++++++++++ // when a file is selected using the files input, trigger the file select handler addEvent(filesInput, "change", _self.handleFileSelect); // if the choose file button is clicked, trigger a click event on the hidden // files input, to open a file system browser for selecting files addEvent(chooseFileButton, "click", function(){ filesInput.click(); }); // Attach listener for when a file is first dragged onto the screen addEvent(document, "dragenter", function(e){ e.stopPropagation(); e.preventDefault(); // Show an overlay so it is clear what the user needs to do document.body.classList.add('show-overlay'); }); // Attach a listener for while the file is over the browser window addEvent(document, "dragover", function(e) { e.stopPropagation(); e.preventDefault(); }); // Attach a listener for when the file is actually dropped, and trigger the // file select handler addEvent(document, "drop", function(e) { e.stopPropagation(); e.preventDefault(); // Hides the overlay document.body.classList.remove('show-overlay'); // Process the files _self.handleFileSelect(e); }); addEvent(delimiterList, "change", _self.handleDelimiterChange); // ++++++++++++++++++ plate tab events +++++++++++++++++++++++++++++++++++++++++++ addEvent(firstPlateCellRangeElement, "change", _self.handleFirstPlateCellRangeChange); addEvent(applyFirstPlateButton, "click", _self.handleApplyFirstPlate); // ++++++++++++++++++++ features tab events ++++++++++++++++++++++++++++++++++++++ addEvent(newFeatureButton, "click", _self.handleNewFeature); addEvent(addFeatureButton, "click", _self.handleAddFeature); addEvent(deleteFeatureButton, "click", _self.handleDeleteFeature); addEvent(applyFeaturesButton, "click", _self.handleApplyFeatures); addEvent(featureListElement, "change", _self.handleFeatureSelection); addEvent(featureCellRangeElement, "change", _self.handleFeatureCellRangeChange); // ++++++++++++++++++++ Experiment tab events and selectize setup ++++++++++++++++ addEvent(importAndSaveDataButton, "click", _self.handleDataImport); addEvent(downloadFileImportButton, "click", _self.handleDownloadFileImport); addEvent(downloadIntergroupButton, "click", _self.handleIntergroupDownload); addEvent(byPlateLevelFeatureRadioButton, "click", _self.handleByPlateLevelFeatureMethod); addEvent(byManualEntryRadioButton, "click", _self.handleByManualMethod); addEvent(plateLevelFeatureListElement, "change", _self.handlePlateLevelFeatureSelection); addEvent(setPlateIdButton, "click", _self.handlePlateIdSetButtonClick); addEvent(plateIdentifierList, "change", _self.handlePlateWithIdListSelection); var $select1 = $("#experiment").selectize({ labelField: "name", valueField: "id", onChange: _self.handleExperimentSelection, create: false }); experimentSelectizeElement = $select1[0].selectize; var $select2 = $("#plateID").selectize({ labelField: "plateID", valueField: "plateID", onChange: _self.handlePlateIdSelection, create: true }); plateIdentifierSelectizeElement = $select2[0].selectize; // ++++++++++++++++++++++++ tabs setup and events ++++++++++++++++++++++++++++++++ // to get jQuery-UI tab functionality working $tabsElement.tabs({ beforeActivate: _self.handleTabChange }); // ++++++++++++++++++++++++++ General events +++++++++++++++++++++++++++++++++++++ addEvent(saveConfigButton, "click", _self.handleSaveConfig); addEvent(saveAsConfigButton, "click", _self.handleSaveAsConfig); // +++++++++++++++++++++++++++ start the Parsing Controller ++++++++++++++++++++++ } // set up all of the event handlers init(); } ParserUI.convertStringToCellRange = function(string){ if (!string){ return null; } var range = string.trim(); var rangeSplit = range.split(":"); if (!rangeSplit || !(rangeSplit[0] && typeof rangeSplit[0] === "string") || !(rangeSplit[1] && typeof rangeSplit[0] === "string")){ return null; } var startCoords = Grid.getCellCoordinates(rangeSplit[0].trim()); var endCoords = Grid.getCellCoordinates(rangeSplit[1].trim()); if (!startCoords || !startCoords[0] || !startCoords[1] || typeof startCoords[0] !== "number" || typeof startCoords[1] !== "number"){ return null } if (!endCoords || !endCoords[0] || !endCoords[1] || typeof endCoords[0] !== "number" || typeof endCoords[1] !== "number"){ return null } var cellRange = new CellRange(startCoords[0], startCoords[1], endCoords[0], endCoords[1]); return cellRange; };<|fim▁end|>
var wellLevelRadioButton = document.getElementById("wellLevel"); var plateLevelRadioButton = document.getElementById("plateLevel"); var experimentLevelRadioButton = document.getElementById("experimentLevel");
<|file_name|>ConnectorLogic.java<|end_file_name|><|fim▁begin|>/* carsharing-ds: car sharing datasource for the integreen cloud Copyright (C) 2015 TIS Innovation Park - Bolzano/Bozen - Italy This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. */ package it.bz.tis.integreen.carsharingbzit; import it.bz.idm.bdp.dto.DataTypeDto; import it.bz.idm.bdp.dto.SimpleRecordDto; import it.bz.idm.bdp.dto.TypeMapDto; import it.bz.idm.bdp.dto.carsharing.CarsharingStationDto; import it.bz.idm.bdp.dto.carsharing.CarsharingVehicleDto; import it.bz.tis.integreen.carsharingbzit.api.ApiClient; import it.bz.tis.integreen.carsharingbzit.api.BoundingBox; import it.bz.tis.integreen.carsharingbzit.api.GetStationRequest; import it.bz.tis.integreen.carsharingbzit.api.GetStationResponse; import it.bz.tis.integreen.carsharingbzit.api.GetVehicleRequest; import it.bz.tis.integreen.carsharingbzit.api.GetVehicleResponse; import it.bz.tis.integreen.carsharingbzit.api.ListStationByBoundingBoxRequest; import it.bz.tis.integreen.carsharingbzit.api.ListStationsByCityRequest; import it.bz.tis.integreen.carsharingbzit.api.ListStationsByCityResponse; import it.bz.tis.integreen.carsharingbzit.api.ListVehicleOccupancyByStationRequest; import it.bz.tis.integreen.carsharingbzit.api.ListVehicleOccupancyByStationResponse; import it.bz.tis.integreen.carsharingbzit.api.ListVehicleOccupancyByStationResponse.VehicleAndOccupancies; import it.bz.tis.integreen.carsharingbzit.api.ListVehiclesByStationsRequest; import it.bz.tis.integreen.carsharingbzit.api.ListVehiclesByStationsResponse; import it.bz.tis.integreen.carsharingbzit.api.ListVehiclesByStationsResponse.StationAndVehicles; import it.bz.tis.integreen.carsharingbzit.tis.IXMLRPCPusher; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.ArrayList; import java.util.Arrays; import java.util.Date; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import it.bz.idm.bdp.util.IntegreenException; /** * * @author Davide Montesin <[email protected]> */ public class ConnectorLogic { final static long INTERVALL = 10L * 60L * 1000L; public static final String CARSHARINGSTATION_DATASOURCE = "Carsharingstation"; public static final String CARSHARINGCAR_DATASOURCE = "Carsharingcar"; static final SimpleDateFormat SIMPLE_DATE_FORMAT = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ssXXX"); // 2014-09-15T12:00:00 static HashMap<String, String[]> process(ApiClient apiClient, String[] cityUIDs, IXMLRPCPusher xmlrpcPusher, HashMap<String, String[]> vehicleIdsByStationIds, long updateTime, ActivityLog activityLog, ArrayList<ActivityLog> lock) throws IOException { if (vehicleIdsByStationIds == null) // Do a full sync { vehicleIdsByStationIds = processSyncStations(apiClient, cityUIDs, xmlrpcPusher, activityLog, lock); } processPusDatas(apiClient, xmlrpcPusher, vehicleIdsByStationIds, updateTime, activityLog, lock); return vehicleIdsByStationIds; } static HashMap<String, String[]> processSyncStations(ApiClient apiClient, String[] cityUIDs, IXMLRPCPusher xmlrpcPusher, ActivityLog activityLog, ArrayList<ActivityLog> lock) throws IOException { /////////////////////////////////////////////////////////////// // Stations by city<|fim▁hole|>// ListStationsByCityRequest request = new ListStationsByCityRequest(cityUIDs); // ListStationsByCityResponse response = apiClient.callWebService(request, ListStationsByCityResponse.class); // CarsharingStationDto[] stations = response.getCityAndStations()[0].getStation(); /////////////////////////////////////////////////////////////// // Stations by Bounding Box /////////////////////////////////////////////////////////////// List<BoundingBox> boxes = new ArrayList<BoundingBox>(); boxes.add(new BoundingBox(10.375214,46.459147,11.059799,46.86113)); boxes.add(new BoundingBox(11.015081,46.450277,11.555557,46.765265)); boxes.add(new BoundingBox(11.458354,46.533418,11.99883,46.847924)); boxes.add(new BoundingBox(11.166573,46.218327,11.521568,46.455303)); boxes.add(new BoundingBox(11.092758,46.794448,11.797256,47.018653)); boxes.add(new BoundingBox(11.959305,46.598506,12.423477,47.098175)); Set<String> stationSet = new HashSet<String>(); for (BoundingBox box:boxes){ ListStationByBoundingBoxRequest request = new ListStationByBoundingBoxRequest(box); ListStationsByBoundingBoxResponse response = apiClient.callWebService(request, ListStationsByBoundingBoxResponse.class); if (response != null){ CarsharingStationDto[] stations = response.getStation(); for (int i = 0; i < stations.length; i++) { stationSet.add(stations[i].getId()); } } } /////////////////////////////////////////////////////////////// // Stations details /////////////////////////////////////////////////////////////// String[] stationIds = stationSet.toArray(new String[stationSet.size()]); GetStationRequest requestGetStation = new GetStationRequest(stationIds); GetStationResponse responseGetStation = apiClient.callWebService(requestGetStation, GetStationResponse.class); /////////////////////////////////////////////////////////////// // Vehicles by stations /////////////////////////////////////////////////////////////// ListVehiclesByStationsRequest vehicles = new ListVehiclesByStationsRequest(stationIds); ListVehiclesByStationsResponse responseVehicles = apiClient.callWebService(vehicles, ListVehiclesByStationsResponse.class); /////////////////////////////////////////////////////////////// // Vehicles details /////////////////////////////////////////////////////////////// HashMap<String, String[]> vehicleIdsByStationIds = new HashMap<>(); ArrayList<String> veichleIds = new ArrayList<String>(); for (StationAndVehicles stationVehicles : responseVehicles.getStationAndVehicles()) { String[] vehicleIds = new String[stationVehicles.getVehicle().length]; vehicleIdsByStationIds.put(stationVehicles.getStation().getId(), vehicleIds); for (int i = 0; i < stationVehicles.getVehicle().length; i++) { CarsharingVehicleDto carsharingVehicleDto = stationVehicles.getVehicle()[i]; veichleIds.add(carsharingVehicleDto.getId()); vehicleIds[i] = carsharingVehicleDto.getId(); } } GetVehicleRequest requestVehicleDetails = new GetVehicleRequest(veichleIds.toArray(new String[0])); GetVehicleResponse responseVehicleDetails = apiClient.callWebService(requestVehicleDetails, GetVehicleResponse.class); /////////////////////////////////////////////////////////////// // Write data to integreen /////////////////////////////////////////////////////////////// Object result = xmlrpcPusher.syncStations(CARSHARINGSTATION_DATASOURCE, responseGetStation.getStation()); if (result instanceof IntegreenException) { throw new IOException("IntegreenException"); } synchronized (lock) { activityLog.report += "syncStations(" + CARSHARINGSTATION_DATASOURCE + "): " + responseGetStation.getStation().length + "\n"; } result = xmlrpcPusher.syncStations(CARSHARINGCAR_DATASOURCE, responseVehicleDetails.getVehicle()); if (result instanceof IntegreenException) { throw new IOException("IntegreenException"); } synchronized (lock) { activityLog.report += "syncStations(" + CARSHARINGCAR_DATASOURCE + "): " + responseVehicleDetails.getVehicle().length + "\n"; } return vehicleIdsByStationIds; } static void processPusDatas(ApiClient apiClient, IXMLRPCPusher xmlrpcPusher, HashMap<String, String[]> vehicleIdsByStationIds, long updateTime, ActivityLog activityLog, ArrayList<ActivityLog> lock) throws IOException { /////////////////////////////////////////////////////////////// // Read vehicles occupancy and calculate summaries /////////////////////////////////////////////////////////////// String created = String.valueOf(updateTime); // Current and forecast for (long forecast : new long[] { 0, 30L * 60L * 1000L }) { String begin = String.valueOf(updateTime + forecast); // TODO begin buffer depends on car type String begin_carsharing = SIMPLE_DATE_FORMAT.format(new Date(updateTime - 30L * 60L * 1000L + forecast)); String end = SIMPLE_DATE_FORMAT.format(new Date(updateTime + INTERVALL + forecast)); String[] stationIds = vehicleIdsByStationIds.keySet().toArray(new String[0]); Arrays.sort(stationIds); HashMap<String, TypeMapDto> stationData = new HashMap<String, TypeMapDto>(); HashMap<String, TypeMapDto> vehicleData = new HashMap<String, TypeMapDto>(); for (String stationId : stationIds) { String[] vehicleIds = vehicleIdsByStationIds.get(stationId); ListVehicleOccupancyByStationRequest occupancyByStationRequest = new ListVehicleOccupancyByStationRequest(begin_carsharing, end, stationId, vehicleIds); ListVehicleOccupancyByStationResponse responseOccupancy = apiClient.callWebService(occupancyByStationRequest, ListVehicleOccupancyByStationResponse.class); VehicleAndOccupancies[] occupancies = responseOccupancy.getVehicleAndOccupancies(); if (occupancies== null) continue; if (occupancies.length != vehicleIds.length) // Same number of responses as the number to requests { throw new IllegalStateException(); } int free = 0; for (VehicleAndOccupancies vehicleOccupancy : occupancies) { if (vehicleOccupancy.getOccupancy().length > 1) { throw new IllegalStateException("Why???"); } int state = 0; // free if (vehicleOccupancy.getOccupancy().length == 1) { state = 1; } else { free++; } TypeMapDto typeMap = new TypeMapDto(); vehicleData.put(vehicleOccupancy.getVehicle().getId(), typeMap); String type = "unknown"; if (begin.equals(created)) type = DataTypeDto.AVAILABILITY; else type = DataTypeDto.FUTURE_AVAILABILITY; Set<SimpleRecordDto> dtos = typeMap.getRecordsByType().get(type); if (dtos == null){ dtos = new HashSet<SimpleRecordDto>(); typeMap.getRecordsByType().put(type, dtos); } dtos.add(new SimpleRecordDto(updateTime + forecast,state+0.,600)); } Set<SimpleRecordDto> dtos = new HashSet<SimpleRecordDto>(); TypeMapDto typeMap = new TypeMapDto(); typeMap.getRecordsByType().put(DataTypeDto.NUMBER_AVAILABE, dtos); if (begin.equals(created)) dtos.add(new SimpleRecordDto(updateTime + forecast, free+0.,600)); stationData.put(stationId, typeMap ); } /////////////////////////////////////////////////////////////// // Write data to integreen /////////////////////////////////////////////////////////////// Object result = xmlrpcPusher.pushData(CARSHARINGSTATION_DATASOURCE, new Object[]{stationData}); if (result instanceof IntegreenException) { throw new IOException("IntegreenException"); } synchronized (lock) { activityLog.report += "pushData(" + CARSHARINGSTATION_DATASOURCE + "): " + stationData.size()+ "\n"; } result = xmlrpcPusher.pushData(CARSHARINGCAR_DATASOURCE, new Object[]{vehicleData}); if (result instanceof IntegreenException) { throw new IOException("IntegreenException"); } synchronized (lock) { activityLog.report += "pushData(" + CARSHARINGCAR_DATASOURCE + "): " + vehicleData.size() + "\n"; } } } }<|fim▁end|>
///////////////////////////////////////////////////////////////
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate docopt; #[macro_use] extern crate serde_derive; use docopt::Docopt; #[macro_use] extern crate log; use log::{Level, LevelFilter, Metadata, Record}; use std::collections::{HashMap, HashSet}; use std::fs::File; use std::io::{BufRead, BufReader, Seek, SeekFrom}; use std::path::Path; static MY_LOGGER: SimpleLogger = SimpleLogger; struct SimpleLogger; impl log::Log for SimpleLogger { fn enabled(&self, metadata: &Metadata) -> bool { metadata.level() <= Level::Info } fn log(&self, record: &Record) { if self.enabled(record.metadata()) { println!("{} - {}", record.level(), record.args()); } } fn flush(&self) {} } struct CsvDesc<'a> { file_path: &'a Path, delimiter: char, quote: Option<char>, } impl<'a> std::fmt::Display for CsvDesc<'a> { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!( f, "{} {} {:?}", self.file_path.display(), self.delimiter, self.quote ) }<|fim▁hole|> path_arg: &'a String, delimiter_arg: &'a String, quote_arg: &'a String, ) -> Result<CsvDesc<'a>, &'static str> { let csv_file_path = Path::new(path_arg); let csv_delimiter = match delimiter_arg.chars().next() { Some(result) => result, None => return Err("incorrect delimiter"), }; let csv_quote = quote_arg.chars().next(); Ok(CsvDesc { file_path: &csv_file_path, delimiter: csv_delimiter, quote: csv_quote, }) } fn get_csv_cols(csv_desc: &CsvDesc) -> Result<Vec<String>, String> { let csv_file = match File::open(csv_desc.file_path) { Err(why) => panic!( "couldn't open csv @ {}: {}", csv_desc.file_path.display(), why ), Ok(file) => file, }; let csv_reader = BufReader::new(csv_file); let mut csv_line_iter = csv_reader.lines(); let csv_header: String = match csv_line_iter.next() { Some(result) => match result { Err(why) => return Err(format!("error getting csv header: {}", why)), Ok(header) => header, }, None => return Err("csv header reading failed".to_string()), }; let csv_cols: Vec<String> = { let cols_iter = csv_header.split(csv_desc.delimiter); match csv_desc.quote { Some(q) => cols_iter.map(|s| s.trim_matches(q).to_string()).collect(), None => cols_iter.map(|s| s.to_string()).collect(), } }; Ok(csv_cols) } fn build_index(csv_desc: &CsvDesc) -> Result<HashMap<String, u64>, String> { let mut csv_index = HashMap::new(); let csv_file = match File::open(csv_desc.file_path) { Err(why) => panic!( "couldn't open csv @ {}: {}", csv_desc.file_path.display(), why ), Ok(file) => file, }; let csv_reader = BufReader::new(csv_file); let mut csv_line_iter = csv_reader.lines(); let mut offset_in_file: u64 = 0; let mut expected_col_count = 0; let mut row_idx = 0; loop { let csv_row: String = match csv_line_iter.next() { Some(result) => match result { Err(why) => return Err(format!("error getting csv row: {}", why)), Ok(header) => header, }, None => break, }; let csv_cols: Vec<String> = { let cols_iter = csv_row.split(csv_desc.delimiter); match csv_desc.quote { Some(q) => cols_iter.map(|s| s.trim_matches(q).to_string()).collect(), None => cols_iter.map(|s| s.to_string()).collect(), } }; let curr_col_count = csv_cols.len(); if curr_col_count == 0 { return Err(format!("zero columns in row{}", row_idx)); } if expected_col_count != 0 && expected_col_count != curr_col_count { return Err(format!( "{} columns in row #{}, {} expected", curr_col_count, row_idx, expected_col_count )); } expected_col_count = curr_col_count; row_idx += 1; let key = format!("{}{}", csv_cols[0], csv_cols[1]); csv_index.insert(key, offset_in_file); offset_in_file += (csv_row.len() + 1) as u64; } Ok(csv_index) } fn get_csv_row(csv_desc: &CsvDesc, line_offset: u64) -> Result<Vec<String>, String> { let mut csv_file = match File::open(csv_desc.file_path) { Err(why) => panic!( "couldn't open csv @ {}: {}", csv_desc.file_path.display(), why ), Ok(file) => file, }; if let Err(e) = csv_file.seek(SeekFrom::Start(line_offset)) { return Err(format!("error seeking offset: {}", e)); } let mut csv_reader = BufReader::new(csv_file); let mut row_buff = String::new(); match csv_reader.read_line(&mut row_buff) { Ok(_n) => { if row_buff.ends_with("\n") { row_buff.pop(); } } Err(e) => return Err(format!("error gettig csv row: {}", e)), }; let result: Vec<String> = { let cols_iter = row_buff.split(csv_desc.delimiter); match csv_desc.quote { Some(q) => cols_iter.map(|s| s.trim_matches(q).to_string()).collect(), None => cols_iter.map(|s| s.to_string()).collect(), } }; Ok(result) } static USAGE: &'static str = " Usage: rusty-csv-diff <csv1> <delim1> <quote1> <csv2> <delim2> <quote2> rusty-csv-diff --help Options: -h, --help Show this message. "; #[derive(Debug, Deserialize)] struct Args { arg_csv1: String, arg_delim1: String, arg_quote1: String, arg_csv2: String, arg_delim2: String, arg_quote2: String, } fn main() { /* 1. Parse arguments 2. Open CSV files 3. Get columns (cols_N) 4. Get intersection of those two sets of columns(cols_to_compare) 5. Create {column name : column index in cols_N} dicts 6. Create {CSV_col_value : CSV row index in file} dicts, where CSV_col_value is a unique key made of the value of several CSV columns. For example, {Alex38 : 76}. Here the name and age form a unique key for the 76th CSV row. 7. Get intersection of key sets of dicts from step 6 (row_keys_to_compare) 8. Loop through row_keys_to_compare, use dicts from step 6 to get line numbers for CSV files 8.1 Loop through cols_to_compare, use dicts from step 5 to extract column values from CSV rows 8.2 Compare values Input parameters: CSV paths, delimiters, quotes For example, ./main file_1.csv "," "'" file_2.csv " " "" */ /*** 0 ***/ log::set_logger(&MY_LOGGER).unwrap(); log::set_max_level(LevelFilter::Error); /*** 1 ***/ let args: Args = Docopt::new(USAGE) .and_then(|d| d.deserialize()) .unwrap_or_else(|e| e.exit()); let csv_desc_1: CsvDesc = match parse_args(&args.arg_csv1, &args.arg_delim1, &args.arg_quote1) { Err(why) => panic!("error parsing arguments for CSV #1: {}", why), Ok(result) => result, }; let csv_desc_2: CsvDesc = match parse_args(&args.arg_csv2, &args.arg_delim2, &args.arg_quote2) { Err(why) => panic!("error parsing arguments for CSV #2: {}", why), Ok(result) => result, }; /*** 2&3 ***/ let csv_cols_1: Vec<String> = match get_csv_cols(&csv_desc_1) { Err(why) => panic!("couldn't get columns: {}", why), Ok(cols) => cols, }; let csv_cols_2: Vec<String> = match get_csv_cols(&csv_desc_2) { Err(why) => panic!("couldn't get columns: {}", why), Ok(cols) => cols, }; /*** 5 ***/ let mut csv_col_index_1 = HashMap::new(); for i in 0..csv_cols_1.len() { let key = csv_cols_1[i].clone(); if csv_col_index_1.contains_key(&key) { panic!("duplicate column found in CSV #1: {}", key); }; csv_col_index_1.insert(key, i); } info!("{:?}", csv_col_index_1); let mut csv_col_index_2 = HashMap::new(); for i in 0..csv_cols_2.len() { let key = csv_cols_2[i].clone(); if csv_col_index_2.contains_key(&key) { panic!("duplicate column found in CSV #1: {}", key); }; csv_col_index_2.insert(key, i); } info!("{:?}", csv_col_index_2); /*** 4 ***/ let mut cols_to_compare = HashSet::new(); for col_1 in csv_col_index_1.keys() { if csv_col_index_2.contains_key(col_1) { cols_to_compare.insert(col_1); }; } info!("{:?}", cols_to_compare); /*** 6 ***/ // let's assume that the unique key is (col_0 + col_1) let csv_index_1 = match build_index(&csv_desc_1) { Err(why) => panic!("failed building index #1: {}", why), Ok(index) => index, }; let csv_index_2 = match build_index(&csv_desc_2) { Err(why) => panic!("failed building index #2: {}", why), Ok(index) => index, }; /*** 7 ***/ let mut row_keys_to_compare = HashSet::new(); for key_1 in csv_index_1.keys() { if csv_index_2.contains_key(key_1) { row_keys_to_compare.insert(key_1); }; } info!("{:?}", row_keys_to_compare); /*** 8 ***/ for row_key in row_keys_to_compare { let index_1 = *csv_index_1.get(row_key).unwrap(); let index_2 = *csv_index_2.get(row_key).unwrap(); let row_1 = match get_csv_row(&csv_desc_1, index_1) { Ok(row) => row, Err(e) => panic!("failed getting csv row #1: {}", e), }; let row_2 = match get_csv_row(&csv_desc_2, index_2) { Ok(row) => row, Err(e) => panic!("failed getting csv row #2: {}", e), }; info!("comparing {}:", row_key); info!("line #1: {:?}", row_1); info!("line #2: {:?}", row_2); for col in &cols_to_compare { let col_index_1 = *csv_col_index_1.get(*col).unwrap(); let col_index_2 = *csv_col_index_2.get(*col).unwrap(); info!( "column {}, index_1={}, index_2={}", col, col_index_1, col_index_2 ); if row_1[col_index_1] != row_2[col_index_2] { println!( "found a difference for {}, {}: {} / {}", row_key, col, row_1[col_index_1], row_2[col_index_2] ); } } } }<|fim▁end|>
} fn parse_args<'a>(
<|file_name|>pstypes.py<|end_file_name|><|fim▁begin|>import ptypes, pecoff from ptypes import * from . import error, ldrtypes, rtltypes, umtypes, ketypes, Ntddk, heaptypes, sdkddkver from .datatypes import * class PEB_FREE_BLOCK(pstruct.type): pass class PPEB_FREE_BLOCK(P(PEB_FREE_BLOCK)): pass PEB_FREE_BLOCK._fields_ = [(PPEB_FREE_BLOCK, 'Next'), (ULONG, 'Size')] class _Win32kCallbackTable(pstruct.type, versioned): _fields_ = [ (PVOID, 'fnCOPYDATA'), (PVOID, 'fnCOPYGLOBALDATA'), (PVOID, 'fnDWORD'), (PVOID, 'fnNCDESTROY'), (PVOID, 'fnDWORDOPTINLPMSG'), (PVOID, 'fnINOUTDRAG'), (PVOID, 'fnGETTEXTLENGTHS'), (PVOID, 'fnINCNTOUTSTRING'), (PVOID, 'fnPOUTLPINT'), (PVOID, 'fnINLPCOMPAREITEMSTRUCT'), (PVOID, 'fnINLPCREATESTRUCT'), (PVOID, 'fnINLPDELETEITEMSTRUCT'), (PVOID, 'fnINLPDRAWITEMSTRUCT'), (PVOID, 'fnPOPTINLPUINT'), (PVOID, 'fnPOPTINLPUINT2'), (PVOID, 'fnINLPMDICREATESTRUCT'), (PVOID, 'fnINOUTLPMEASUREITEMSTRUCT'), (PVOID, 'fnINLPWINDOWPOS'), (PVOID, 'fnINOUTLPPOINT5'), (PVOID, 'fnINOUTLPSCROLLINFO'), (PVOID, 'fnINOUTLPRECT'), (PVOID, 'fnINOUTNCCALCSIZE'), (PVOID, 'fnINOUTLPPOINT5_'), (PVOID, 'fnINPAINTCLIPBRD'), (PVOID, 'fnINSIZECLIPBRD'), (PVOID, 'fnINDESTROYCLIPBRD'), (PVOID, 'fnINSTRING'), (PVOID, 'fnINSTRINGNULL'), (PVOID, 'fnINDEVICECHANGE'), (PVOID, 'fnPOWERBROADCAST'), (PVOID, 'fnINLPUAHDRAWMENU'), (PVOID, 'fnOPTOUTLPDWORDOPTOUTLPDWORD'), (PVOID, 'fnOPTOUTLPDWORDOPTOUTLPDWORD_'), (PVOID, 'fnOUTDWORDINDWORD'), (PVOID, 'fnOUTLPRECT'), (PVOID, 'fnOUTSTRING'), (PVOID, 'fnPOPTINLPUINT3'), (PVOID, 'fnPOUTLPINT2'), (PVOID, 'fnSENTDDEMSG'), (PVOID, 'fnINOUTSTYLECHANGE'), (PVOID, 'fnHkINDWORD'), (PVOID, 'fnHkINLPCBTACTIVATESTRUCT'), (PVOID, 'fnHkINLPCBTCREATESTRUCT'), (PVOID, 'fnHkINLPDEBUGHOOKSTRUCT'), (PVOID, 'fnHkINLPMOUSEHOOKSTRUCTEX'), (PVOID, 'fnHkINLPKBDLLHOOKSTRUCT'), (PVOID, 'fnHkINLPMSLLHOOKSTRUCT'), (PVOID, 'fnHkINLPMSG'), (PVOID, 'fnHkINLPRECT'), (PVOID, 'fnHkOPTINLPEVENTMSG'),<|fim▁hole|> (PVOID, 'ClientCallDummyCallback'), (PVOID, 'fnKEYBOARDCORRECTIONCALLOUT'), (PVOID, 'fnOUTLPCOMBOBOXINFO'), (PVOID, 'fnINLPCOMPAREITEMSTRUCT2'), (PVOID, 'xxxClientCallDevCallbackCapture'), (PVOID, 'xxxClientCallDitThread'), (PVOID, 'xxxClientEnableMMCSS'), (PVOID, 'xxxClientUpdateDpi'), (PVOID, 'xxxClientExpandStringW'), (PVOID, 'ClientCopyDDEIn1'), (PVOID, 'ClientCopyDDEIn2'), (PVOID, 'ClientCopyDDEOut1'), (PVOID, 'ClientCopyDDEOut2'), (PVOID, 'ClientCopyImage'), (PVOID, 'ClientEventCallback'), (PVOID, 'ClientFindMnemChar'), (PVOID, 'ClientFreeDDEHandle'), (PVOID, 'ClientFreeLibrary'), (PVOID, 'ClientGetCharsetInfo'), (PVOID, 'ClientGetDDEFlags'), (PVOID, 'ClientGetDDEHookData'), (PVOID, 'ClientGetListboxString'), (PVOID, 'ClientGetMessageMPH'), (PVOID, 'ClientLoadImage'), (PVOID, 'ClientLoadLibrary'), (PVOID, 'ClientLoadMenu'), (PVOID, 'ClientLoadLocalT1Fonts'), (PVOID, 'ClientPSMTextOut'), (PVOID, 'ClientLpkDrawTextEx'), (PVOID, 'ClientExtTextOutW'), (PVOID, 'ClientGetTextExtentPointW'), (PVOID, 'ClientCharToWchar'), (PVOID, 'ClientAddFontResourceW'), (PVOID, 'ClientThreadSetup'), (PVOID, 'ClientDeliverUserApc'), (PVOID, 'ClientNoMemoryPopup'), (PVOID, 'ClientMonitorEnumProc'), (PVOID, 'ClientCallWinEventProc'), (PVOID, 'ClientWaitMessageExMPH'), (PVOID, 'ClientWOWGetProcModule'), (PVOID, 'ClientWOWTask16SchedNotify'), (PVOID, 'ClientImmLoadLayout'), (PVOID, 'ClientImmProcessKey'), (PVOID, 'fnIMECONTROL'), (PVOID, 'fnINWPARAMDBCSCHAR'), (PVOID, 'fnGETTEXTLENGTHS2'), (PVOID, 'fnINLPKDRAWSWITCHWND'), (PVOID, 'ClientLoadStringW'), (PVOID, 'ClientLoadOLE'), (PVOID, 'ClientRegisterDragDrop'), (PVOID, 'ClientRevokeDragDrop'), (PVOID, 'fnINOUTMENUGETOBJECT'), (PVOID, 'ClientPrinterThunk'), (PVOID, 'fnOUTLPCOMBOBOXINFO2'), (PVOID, 'fnOUTLPSCROLLBARINFO'), (PVOID, 'fnINLPUAHDRAWMENU2'), (PVOID, 'fnINLPUAHDRAWMENUITEM'), (PVOID, 'fnINLPUAHDRAWMENU3'), (PVOID, 'fnINOUTLPUAHMEASUREMENUITEM'), (PVOID, 'fnINLPUAHDRAWMENU4'), (PVOID, 'fnOUTLPTITLEBARINFOEX'), (PVOID, 'fnTOUCH'), (PVOID, 'fnGESTURE'), (PVOID, 'fnPOPTINLPUINT4'), (PVOID, 'fnPOPTINLPUINT5'), (PVOID, 'xxxClientCallDefaultInputHandler'), (PVOID, 'fnEMPTY'), (PVOID, 'ClientRimDevCallback'), (PVOID, 'xxxClientCallMinTouchHitTestingCallback'), (PVOID, 'ClientCallLocalMouseHooks'), (PVOID, 'xxxClientBroadcastThemeChange'), (PVOID, 'xxxClientCallDevCallbackSimple'), (PVOID, 'xxxClientAllocWindowClassExtraBytes'), (PVOID, 'xxxClientFreeWindowClassExtraBytes'), (PVOID, 'fnGETWINDOWDATA'), (PVOID, 'fnINOUTSTYLECHANGE2'), (PVOID, 'fnHkINLPMOUSEHOOKSTRUCTEX2'), ] class PEB(pstruct.type, versioned): ''' 0x0098 NT 3.51 0x0150 NT 4.0 0x01E8 Win2k 0x020C XP 0x0230 WS03 0x0238 Vista 0x0240 Win7_BETA 0x0248 Win6 0x0250 Win8 0x045C Win10 ''' class BitField(pbinary.flags): _fields_ = [ (1, 'ImageUsesLargePages'), (1, 'IsProtectedProcess'), (1, 'IsLegacyProcess'), (1, 'IsImageDynamicallyRelocated'), (1, 'SkipPatchingUser32Forwarders'), (1, 'SpareBits'), ] class CrossProcessFlags(pbinary.flags): _fields_ = [ (1, 'ProcessInJob'), (1, 'ProcessInitializing'), (1, 'ProcessUsingVEH'), (1, 'ProcessUsingVCH'), (1, 'ProcessUsingFTH'), (27, 'ReservedBits0'), ] class NtGlobalFlag(pbinary.flags): def __init__(self, **attrs): super(PEB.NtGlobalFlag, self).__init__(**attrs) f = [] f.extend([ (1, 'FLG_STOP_ON_EXCEPTION'), # 0x00000001 (1, 'FLG_SHOW_LDR_SNAPS'), # 0x00000002 (1, 'FLG_DEBUG_INITIAL_COMMAND'), # 0x00000004 (1, 'FLG_STOP_ON_HUNG_GUI'), # 0x00000008 (1, 'FLG_HEAP_ENABLE_TAIL_CHECK'), # 0x00000010 (1, 'FLG_HEAP_ENABLE_FREE_CHECK'), # 0x00000020 (1, 'FLG_HEAP_VALIDATE_PARAMETERS'), # 0x00000040 (1, 'FLG_HEAP_VALIDATE_ALL'), # 0x00000080 (1, 'FLG_POOL_ENABLE_TAIL_CHECK'), # 0x00000100 (1, 'FLG_POOL_ENABLE_FREE_CHECK'), # 0x00000200 (1, 'FLG_POOL_ENABLE_TAGGING'), # 0x00000400 (1, 'FLG_HEAP_ENABLE_TAGGING'), # 0x00000800 (1, 'FLG_USER_STACK_TRACE_DB'), # 0x00001000 (1, 'FLG_KERNEL_STACK_TRACE_DB'), # 0x00002000 (1, 'FLG_MAINTAIN_OBJECT_TYPELIST'), # 0x00004000 (1, 'FLG_HEAP_ENABLE_TAG_BY_DLL'), # 0x00008000 (1, 'FLG_IGNORE_DEBUG_PRIV'), # 0x00010000 (1, 'FLG_ENABLE_CSRDEBUG'), # 0x00020000 (1, 'FLG_ENABLE_KDEBUG_SYMBOL_LOAD'), # 0x00040000 (1, 'FLG_DISABLE_PAGE_KERNEL_STACKS'), # 0x00080000 ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) < sdkddkver.NTDDI_WINXP: f.append((1, 'FLG_HEAP_ENABLE_CALL_TRACING')) #0x00100000 else: f.append((1, 'FLG_ENABLE_SYSTEM_CRIT_BREAKS')) #0x00100000 f.extend([ (1, 'FLG_HEAP_DISABLE_COALESCING'), # 0x00200000 (1, 'FLG_ENABLE_CLOSE_EXCEPTIONS'), # 0x00400000 (1, 'FLG_ENABLE_EXCEPTION_LOGGING'), # 0x00800000 (1, 'FLG_ENABLE_HANDLE_TYPE_TAGGING'), # 0x01000000 (1, 'FLG_HEAP_PAGE_ALLOCS'), # 0x02000000 (1, 'FLG_DEBUG_INITIAL_COMMAND_EX'), # 0x04000000 ]) f.append((1+1+1+1+1, 'FLG_RESERVED')) self._fields_ = list(reversed(f)) def __repr__(self): ofs = '[{:x}]'.format(self.getoffset()) names = '|'.join((k for k, v in self.items() if v)) return ' '.join([ofs, self.name(), names, '{!r}'.format(self.serialize())]) class TracingFlags(pbinary.flags): _fields_ = [ (1, 'HeapTracingEnabled'), (1, 'CritSecTracingEnabled'), (1, 'LibLoaderTracingEnabled'), (29, 'SpareTracingBits'), ] def __init__(self, **attrs): super(PEB, self).__init__(**attrs) self._fields_ = f = [] aligned = dyn.align(8 if getattr(self, 'WIN64', False) else 4) f.extend([ (UCHAR, 'InheritedAddressSpace'), (UCHAR, 'ReadImageFileExecOptions'), (UCHAR, 'BeingDebugged'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: f.append( (pbinary.littleendian(PEB.BitField), 'BitField') ) else: raise error.NdkUnsupportedVersion(self) f.append( (BOOLEAN, 'SpareBool') ) f.extend([ (aligned, 'align(Mutant)'), (HANDLE, 'Mutant'), (P(pecoff.Executable.File), 'ImageBaseAddress'), (ldrtypes.PPEB_LDR_DATA, 'Ldr'), (P(rtltypes.RTL_USER_PROCESS_PARAMETERS), 'ProcessParameters'), (PVOID, 'SubSystemData'), (P(heaptypes.HEAP), 'ProcessHeap'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN7: f.extend([ (P(rtltypes.RTL_CRITICAL_SECTION), 'FastPebLock'), (PVOID, 'AltThunkSListPtr'), (PVOID, 'IFEOKey'), (pbinary.littleendian(PEB.CrossProcessFlags), 'CrossProcessFlags'), (aligned, 'align(UserSharedInfoPtr)'), (P(_Win32kCallbackTable), 'UserSharedInfoPtr'), (ULONG, 'SystemReserved'), (ULONG, 'AtlThunkSListPtr32') if getattr(self, 'WIN64', False) else (ULONG, 'SpareUlong'), (P(API_SET_MAP), 'ApiSetMap'), ]) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: raise error.NdkUnsupportedVersion(self) f.extend([ (P(rtltypes.RTL_CRITICAL_SECTION), 'FastPebLock'), (PVOID, 'AltThunkSListPtr'), (PVOID, 'IFEOKey'), (ULONG, 'CrossProcessFlags'), (P(_Win32kCallbackTable), 'UserSharedInfoPtr'), (ULONG, 'SystemReserved'), (ULONG, 'SpareUlong'), (PVOID, 'SparePebPtr0'), ]) else: raise error.NdkUnsupportedVersion(self) f.extend([ (P(rtltypes.RTL_CRITICAL_SECTION), 'FastPebLock'), (PVOID, 'FastPebLockRoutine'), (PVOID, 'FastPebUnlockRoutine'), (ULONG, 'EnvironmentUpdateCount'), (P(_Win32kCallbackTable), 'KernelCallbackTable'), (PVOID, 'EventLogSection'), (PVOID, 'EventLog'), (PPEB_FREE_BLOCK, 'FreeList'), ]) f.extend([ (ULONG, 'TlsExpansionCounter'), (aligned, 'align(TlsBitmap)'), (PVOID, 'TlsBitmap'), # FIXME: Does TlsBitmapBits represent the number of bytes that are in use? (dyn.clone(BitmapBitsUlong, _object_=ULONG, length=2), 'TlsBitmapBits'), (PVOID, 'ReadOnlySharedMemoryBase'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: f.append((PVOID, 'HotpatchInformation')) else: f.append((PVOID, 'ReadOnlySharedMemoryHeap')) f.extend([ (P(PVOID), 'ReadOnlyStaticServerData'), (PVOID, 'AnsiCodePageData'), (PVOID, 'OemCodePageData'), (PVOID, 'UnicodeCaseTableData'), (ULONG, 'NumberOfProcessors'), (pbinary.littleendian(PEB.NtGlobalFlag), 'NtGlobalFlag'), (dyn.align(8), 'Reserved'), (LARGE_INTEGER, 'CriticalSectionTimeout'), (ULONGLONG if getattr(self, 'WIN64', False) else ULONG, 'HeapSegmentReserve'), (ULONGLONG if getattr(self, 'WIN64', False) else ULONG, 'HeapSegmentCommit'), (ULONGLONG if getattr(self, 'WIN64', False) else ULONG, 'HeapDeCommitTotalFreeThreshold'), (ULONGLONG if getattr(self, 'WIN64', False) else ULONG, 'HeapDeCommitFreeBlockThreshold'), (ULONG, 'NumberOfHeaps'), (ULONG, 'MaximumNumberOfHeaps'), (lambda s: P(dyn.clone(heaptypes.ProcessHeapEntries, length=s['NumberOfHeaps'].li.int())), 'ProcessHeaps'), # (P(win32k.GDI_HANDLE_TABLE), 'GdiSharedHandleTable'), (PVOID, 'GdiSharedHandleTable'), (PVOID, 'ProcessStarterHelper'), (ULONG, 'GdiDCAttributeList'), ]) f.extend([ (aligned, 'align(LoaderLock)'), (P(rtltypes.RTL_CRITICAL_SECTION), 'LoaderLock') ]) f.extend([ (ULONG, 'OSMajorVersion'), (ULONG, 'OSMinorVersion'), (USHORT, 'OSBuildNumber'), (USHORT, 'OSCSDVersion'), (ULONG, 'OSPlatformId'), (ULONG, 'ImageSubSystem'), (ULONG, 'ImageSubSystemMajorVersion'), (ULONG, 'ImageSubSystemMinorVersion'), (aligned, 'align(ActiveProcessAffinityMask)'), (ULONG, 'ActiveProcessAffinityMask'), (aligned, 'align(GdiHandleBuffer)'), (dyn.array(ULONG, 0x3c if getattr(self, 'WIN64', False) else 0x22), 'GdiHandleBuffer'), (PVOID, 'PostProcessInitRoutine'), (PVOID, 'TlsExpansionBitmap'), (dyn.clone(BitmapBitsUlong, _object_=ULONG, length=0x20), 'TlsExpansionBitmapBits'), (ULONG, 'SessionId'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WINBLUE: f.extend([ (dyn.block(4 if getattr(self, 'WIN64', False) else 0), 'Padding5'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WINXP: f.extend([ (aligned, 'align(AppCompatFlags)'), (ULARGE_INTEGER, 'AppCompatFlags'), (ULARGE_INTEGER, 'AppCompatFlagsUser'), (PVOID, 'pShimData'), (PVOID, 'AppCompatInfo'), (umtypes.UNICODE_STRING, 'CSDVersion'), (PVOID, 'ActivationContextData'), # FIXME: P(_ACTIVATION_CONTEXT_DATA) (PVOID, 'ProcessAssemblyStorageMap'), # FIXME: P(_ASSEMBLY_STORAGE_MAP) (PVOID, 'SystemDefaultActivationContextData'), # FIXME: P(_ACTIVATION_CONTEXT_DATA) (PVOID, 'SystemAssemblyStorageMap'), # FIXME: P(_ASSEMBLY_STORAGE_MAP) (ULONGLONG if getattr(self, 'WIN64', False) else ULONG, 'MinimumStackCommit'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WS03: f.extend([ (PVOID, 'FlsCallback'), # FIXME: P(_FLS_CALLBACK_INFO) (LIST_ENTRY, 'FlsListHead'), (PVOID, 'FlsBitmap'), (dyn.clone(BitmapBitsUlong, _object_=ULONG, length=4), 'FlsBitmapBits'), (ULONG, 'FlsHighIndex'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: f.extend([ (aligned, 'align(WerRegistrationData)'), (PVOID, 'WerRegistrationData'), (PVOID, 'WerShipAssertPtr'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN7: f.extend([ (PVOID, 'pContextData'), (PVOID, 'pImageHeaderHash'), (pbinary.littleendian(PEB.TracingFlags), 'TracingFlags') ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WINBLUE: f.extend([ (dyn.block(4 if getattr(self, 'WIN64', False) else 0), 'Padding6'), (ULONGLONG, 'CsrServerReadOnlySharedMemoryBase') ]) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN8: f.extend([ (ULONGLONG, 'CsrServerReadOnlySharedMemoryBase') ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN10_TH2: f.extend([ (ULONG, 'TppWorkerpListLock'), (LIST_ENTRY, 'TppWorkerpList'), (dyn.array(PVOID, 128), 'WaitOnAddressHashTable'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN10_RS3: f.extend([ (PVOID, 'TelemetryCoverageHeader'), (ULONG, 'CloudFileFlags'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN10_RS4: f.extend([ (ULONG, 'CloudFileDiagFlags'), (CHAR, 'PlaceHolderCompatibilityMode'), (dyn.block(7), 'PlaceHolderCompatibilityModeReserved'), ]) # FIXME: Some fields added for windows 10 RS5 # See https://www.geoffchappell.com/studies/windows/win32/ntdll/structs/peb/index.htm return def getmodulebyname(self, name): ldr = self['Ldr'].d.l for m in ldr.walk(): if m['BaseDllName'].str() == name: return m continue raise KeyError(name) def getmodulebyaddress(self, address): ldr = self['Ldr'].d.l for m in ldr.walk(): start, size = m['DllBase'].int(), m['SizeOfImage'].int() left, right = start, start+size if address >= left and address <= right: return m continue raise KeyError(name) def getmodulebyfullname(self, name): ldr = self['Ldr'].d.l name = name.lower().replace('\\', '/') for m in ldr.walk(): if m['FullDllName'].str().lower().replace('\\', '/') == name: return m continue raise KeyError(name) class TEB_ACTIVE_FRAME_CONTEXT(pstruct.type): _fields_ = [ (ULONG, 'Flags'), (P(umtypes.PSTR), 'FrameName'), ] class TEB_ACTIVE_FRAME(pstruct.type): _fields_ = [ (ULONG, 'Flags'), (lambda s: P(TEB_ACTIVE_FRAME), 'Previous'), (P(TEB_ACTIVE_FRAME_CONTEXT), 'Context'), ] class PTEB_ACTIVE_FRAME(P(TEB_ACTIVE_FRAME)): pass class GDI_TEB_BATCH(pstruct.type): _fields_ = [ (ULONG, 'Offset'), (HANDLE, 'HDC'), (dyn.array(ULONG, 0x136), 'Buffer'), ] class TEB(pstruct.type, versioned): ''' 0x0F28 NT 3.51 0x0F88 NT 4.0 0x0FA4 Win2k 0x0FB4 prior to XP SP2 0x0FB8 XP SP2/WS03+ 0x0FBC WS03 SP1+ 0x0FF8 Vista/WS08 0x0FE4 Win7/WS08 R2 0x0FE8 Win8-Win8.1/WS12 0x1000 Win10 ''' @pbinary.littleendian class _SameTebFlags(pbinary.flags): _fields_ = [ (1, 'SafeThunkCall'), (1, 'InDebugPrint'), (1, 'HasFiberData'), (1, 'SkipThreadAttach'), (1, 'WerInShipAssertCode'), (1, 'RanProcessInit'), (1, 'ClonedThread'), (1, 'SuppressDebugMsg'), (1, 'DisableUserStackWalk'), (1, 'RtlExceptionAttached'), (1, 'InitialThread'), (1, 'SessionAware'), (1, 'LoadOwner'), (1, 'LoaderWorker'), (2, 'SpareSameTebBits'), ] def __init__(self, **attrs): super(TEB, self).__init__(**attrs) self._fields_ = f = [] aligned = dyn.align(8 if getattr(self, 'WIN64', False) else 4) f.extend([ (NT_TIB, 'Tib'), (PVOID, 'EnvironmentPointer'), (umtypes.CLIENT_ID, 'ClientId'), (PVOID, 'ActiveRpcHandle'), (PVOID, 'ThreadLocalStoragePointer'), (P(PEB), 'ProcessEnvironmentBlock'), (ULONG, 'LastErrorValue'), (ULONG, 'CountOfOwnedCriticalSections'), (PVOID, 'CsrClientThread'), (P(Ntddk.W32THREAD), 'Win32ThreadInfo'), (dyn.array(ULONG, 0x1a), 'User32Reserved'), (dyn.array(ULONG, 5), 'UserReserved'), (aligned, 'align(WOW32Reserved)'), (PVOID, 'WOW32Reserved'), (LCID, 'CurrentLocale'), (ULONG, 'FpSoftwareStatusRegister'), (dyn.array(PVOID, 0x36), 'SystemReserved1'), (LONG, 'ExceptionCode'), (aligned, 'align(ActivationContextStackPointer)'), (P(Ntddk.ACTIVATION_CONTEXT_STACK), 'ActivationContextStackPointer'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) < sdkddkver.NTDDI_WS03: f.append((dyn.block(28 if getattr(self, 'WIN64', False) else 24), 'SpareBytes1')) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) == sdkddkver.NTDDI_WS03: f.append((dyn.block(28 if getattr(self, 'WIN64', False) else 0x28), 'SpareBytes1')) else: f.append((dyn.block(24 if getattr(self, 'WIN64', False) else 0x24), 'SpareBytes1')) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: f.append((ULONG, 'TxFsContext')) f.extend([ (aligned, 'align(GdiTebBatch)'), (GDI_TEB_BATCH, 'GdiTebBatch'), (aligned, 'align(RealClientId)'), (umtypes.CLIENT_ID, 'RealClientId'), (PVOID, 'GdiCachedProcessHandle'), (ULONG, 'GdiClientPID'), (ULONG, 'GdiClientTID'), (PVOID, 'GdiThreadLocalInfo'), (dyn.array(PVOID, 62), 'Win32ClientInfo'), (dyn.array(PVOID, 0xe9), 'glDispatchTable'), (dyn.array(PVOID, 0x1d), 'glReserved1'), (PVOID, 'glReserved2'), (PVOID, 'glSectionInfo'), (PVOID, 'glSection'), (PVOID, 'glTable'), (PVOID, 'glCurrentRC'), (PVOID, 'glContext'), (aligned, 'align(LastStatusValue)'), (umtypes.NTSTATUS, 'LastStatusValue'), (aligned, 'align(StaticUnicodeString)'), (umtypes.UNICODE_STRING, 'StaticUnicodeString'), (dyn.clone(pstr.wstring, length=0x106), 'StaticUnicodeBuffer'), (aligned, 'align(DeallocationStack)'), (PVOID, 'DeallocationStack'), (dyn.array(PVOID, 0x40), 'TlsSlots'), (LIST_ENTRY, 'TlsLinks'), (PVOID, 'Vdm'), (PVOID, 'ReservedForNtRpc'), (dyn.array(PVOID, 0x2), 'DbgSsReserved'), (ULONG, 'HardErrorMode'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) < sdkddkver.NTDDI_VISTA: f.extend([ (aligned, 'align(Instrumentation)'), (dyn.array(PVOID, 14 if getattr(self, 'WIN64', False) else 16), 'Instrumentation') ]) else: f.extend([ (aligned, 'align(Instrumentation)'), (dyn.array(PVOID, 11 if getattr(self, 'WIN64', False) else 9), 'Instrumentation') ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) <= sdkddkver.NTDDI_WS03: f.extend([ (PVOID, 'SubProcessTag'), (PVOID, 'EtwTraceData'), ]) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: f.extend([ (GUID, 'ActivityId'), (PVOID, 'SubProcessTag'), (PVOID, 'EtwLocalData'), (PVOID, 'EtwTraceData'), ]) f.extend([ (PVOID, 'WinSockData'), (ULONG, 'GdiBatchCount'), ]) f.extend([ (UCHAR, 'InDbgPrint'), (UCHAR, 'FreeStackOnTermination'), (UCHAR, 'HasFiberData'), (UCHAR, 'IdealProcessor'), ]) f.extend([ (ULONG, 'GuaranteedStackBytes'), (aligned, 'align(ReservedForPerf)'), (PVOID, 'ReservedForPerf'), (aligned, 'align(ReservedForOle)'), (PVOID, 'ReservedForOle'), (ULONG, 'WaitingOnLoaderLock'), (dyn.block(4 if getattr(self, 'WIN64', False) else 0), 'padding(WaitingOnLoaderLock)'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) <= sdkddkver.NTDDI_WS03: f.extend([ (ULONGLONG, 'SparePointer1'), (ULONGLONG, 'SoftPatchPtr1'), (ULONGLONG, 'SoftPatchPtr2'), ]) else: f.extend([ (aligned, 'align(SavedPriorityState)'), (PVOID, 'SavedPriorityState'), (ULONGLONG if getattr(self, 'WIN64', False) else ULONG, 'SoftPatchPtr1'), (PVOID, 'ThreadPoolData'), ]) f.extend([ (PVOID, 'TlsExpansionSlots'), ]) if getattr(self, 'WIN64', False): f.extend([ (PVOID, 'DeallocationBStore'), (PVOID, 'BStoreLimit'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) < sdkddkver.NTDDI_WIN7: f.append((ULONG, 'ImpersonationLocale')) else: f.append((ULONG, 'MuiGeneration')) f.extend([ (ULONG, 'IsImpersonating'), (PVOID, 'NlsCache'), (PVOID, 'pShimData'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) <= sdkddkver.NTDDI_WIN7: f.append((ULONG, 'HeapVirtualAffinity')) else: f.extend([ (USHORT, 'HeapVirtualAffinity'), (USHORT, 'LowFragHeapDataSlot'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WINBLUE: f.extend([ (dyn.block(4 if getattr(self, 'WIN64', False) else 0), 'Padding7'), ]) f.extend([ (aligned, 'align(CurrentTransactionHandle)'), (PVOID, 'CurrentTransactionHandle'), (PTEB_ACTIVE_FRAME, 'ActiveFrame'), (PVOID, 'FlsData'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) <= sdkddkver.NTDDI_WS03: f.extend([ (UCHAR, 'SafeThunkCall'), (dyn.array(UCHAR, 3), 'BooleanSpare'), ]) return elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_VISTA: f.extend([ (PVOID, 'PreferredLangauges'), (PVOID, 'UserPrefLanguages'), (PVOID, 'MergedPrefLanguages'), (ULONG, 'MuiImpersonation'), (USHORT, 'CrossTebFlags'), (TEB._SameTebFlags, 'SameTebFlags'), (PVOID, 'TxnScopeEnterCallback'), (PVOID, 'TxnScopeExitCallback'), (PVOID, 'TxnScopeContext'), (ULONG, 'LockCount'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) <= sdkddkver.NTDDI_VISTA: f.extend([ (ULONG, 'ProcessRundown'), (ULONGLONG, 'LastSwitchTime'), (ULONGLONG, 'TotalSwitchOutTime'), (LARGE_INTEGER, 'WaitReasonBitmap'), ]) return elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) < sdkddkver.NTDDI_WIN10: f.extend([ (ULONG, 'SpareUlong0'), (PVOID, 'ResourceRetValue'), ]) else: f.extend([ (ULONG, 'WowTebOffset'), (PVOID, 'ResourceRetValue'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN8: f.extend([ (PVOID, 'ReservedForWdf'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN10: f.extend([ (ULONGLONG, 'ReservedForCrt'), (GUID, 'EffectiveContainerId'), ]) return class THREAD_INFORMATION_CLASS(pint.enum): _values_ = [(name, value) for value, name in [ (0, 'ThreadBasicInformation'), (1, 'ThreadTimes'), (2, 'ThreadPriority'), (3, 'ThreadBasePriority'), (4, 'ThreadAffinityMask'), (5, 'ThreadImpersonationToken'), (6, 'ThreadDescriptorTableEntry'), (7, 'ThreadEnableAlignmentFaultFixup'), (8, 'ThreadEventPair'), (9, 'ThreadQuerySetWin32StartAddress'), (10, 'ThreadZeroTlsCell'), (11, 'ThreadPerformanceCount'), (12, 'ThreadAmILastThread'), (13, 'ThreadIdealProcessor'), (14, 'ThreadPriorityBoost'), (15, 'ThreadSetTlsArrayAddress'), (16, 'ThreadIsIoPending'), (17, 'ThreadHideFromDebugger'), (18, 'ThreadBreakOnTermination'), (19, 'ThreadSwitchLegacyState'), (20, 'ThreadIsTerminated'), (21, 'ThreadLastSystemCall'), (22, 'ThreadIoPriority'), (23, 'ThreadCycleTime'), (24, 'ThreadPagePriority'), (25, 'ThreadActualBasePriority'), (26, 'ThreadTebInformation'), (27, 'ThreadCSwitchMon'), (28, 'ThreadCSwitchPmu'), (29, 'ThreadWow64Context'), (30, 'ThreadGroupInformation'), (31, 'ThreadUmsInformation'), (32, 'ThreadCounterProfiling'), (33, 'ThreadIdealProcessorEx'), (34, 'ThreadCpuAccountingInformation'), (35, 'ThreadSuspendCount'), (36, 'ThreadHeterogeneousCpuPolicy'), (37, 'ThreadContainerId'), (38, 'ThreadNameInformation'), (39, 'ThreadProperty'), ]] class THREAD_BASIC_INFORMATION(pstruct.type, versioned): type = THREAD_INFORMATION_CLASS.byname('ThreadBasicInformation') def __init__(self, **attrs): super(THREAD_BASIC_INFORMATION, self).__init__(**attrs) self._fields_ = [ (umtypes.NTSTATUS, 'ExitStatus'), (PVOID, 'TebBaseAddress'), (umtypes.CLIENT_ID, 'ClientId'), (umtypes.KAFFINITY, 'AffinityMask'), (umtypes.KPRIORITY, 'Priority'), (umtypes.KPRIORITY, 'BasePriority'), ] class THREAD_PROPERTY_INFORMATION(pstruct.type): type = THREAD_INFORMATION_CLASS.byname('ThreadProperty') _fields_ = [ (ULONGLONG, 'Key'), (PVOID, 'Object'), (PVOID, 'Thread'), (ULONG, 'Flags'), ] class PROCESS_INFORMATION_CLASS(pint.enum): _values_ = [(name, value) for value, name in [ (0, 'ProcessBasicInformation'), (1, 'ProcessQuotaLimits'), (2, 'ProcessIoCounters'), (3, 'ProcessVmCounters'), (4, 'ProcessTimes'), (5, 'ProcessBasePriority'), (6, 'ProcessRaisePriority'), (7, 'ProcessDebugPort'), (8, 'ProcessExceptionPort'), (9, 'ProcessAccessToken'), (10, 'ProcessLdtInformation'), (11, 'ProcessLdtSize'), (12, 'ProcessDefaultHardErrorMode'), (13, 'ProcessIoPortHandlers'), (14, 'ProcessPooledUsageAndLimits'), (15, 'ProcessWorkingSetWatch'), (16, 'ProcessUserModeIOPL'), (17, 'ProcessEnableAlignmentFaultFixup'), (18, 'ProcessPriorityClass'), (19, 'ProcessWx86Information'), (20, 'ProcessHandleCount'), (21, 'ProcessAffinityMask'), (22, 'ProcessPriorityBoost'), (23, 'ProcessDeviceMap'), (24, 'ProcessSessionInformation'), (25, 'ProcessForegroundInformation'), (26, 'ProcessWow64Information'), (27, 'ProcessImageFileName'), (28, 'ProcessLUIDDeviceMapsEnabled'), (29, 'ProcessBreakOnTermination'), (30, 'ProcessDebugObjectHandle'), (31, 'ProcessDebugFlags'), (32, 'ProcessHandleTracing'), (33, 'ProcessIoPriority'), (34, 'ProcessExecuteFlags'), (35, 'ProcessResourceManagement'), (36, 'ProcessCookie'), (37, 'ProcessImageInformation'), (38, 'ProcessCycleTime'), (39, 'ProcessPagePriority'), (40, 'ProcessInstrumentationCallback'), (41, 'ProcessThreadStackAllocation'), (42, 'ProcessWorkingSetWatchEx'), (43, 'ProcessImageFileNameWin32'), (44, 'ProcessImageFileMapping'), (45, 'ProcessAffinityUpdateMode'), (46, 'ProcessMemoryAllocationMode'), (47, 'ProcessGroupInformation'), (48, 'ProcessTokenVirtualizationEnabled'), (49, 'ProcessConsoleHostProcess'), (50, 'ProcessWindowInformation'), (51, 'ProcessHandleInformation'), (52, 'ProcessMitigationPolicy'), (53, 'ProcessDynamicFunctionTableInformation'), (54, 'ProcessHandleCheckingMode'), (55, 'ProcessKeepAliveCount'), (56, 'ProcessRevokeFileHandles'), (57, 'ProcessWorkingSetControl'), (58, 'ProcessHandleTable'), (59, 'ProcessCheckStackExtentsMode'), (60, 'ProcessCommandLineInformation'), (61, 'ProcessProtectionInformation'), (62, 'ProcessMemoryExhaustion'), (63, 'ProcessFaultInformation'), (64, 'ProcessTelemetryIdInformation'), (65, 'ProcessCommitReleaseInformation'), (66, 'ProcessDefaultCpuSetsInformation'), (67, 'ProcessAllowedCpuSetsInformation'), (68, 'ProcessSubsystemProcess'), (69, 'ProcessJobMemoryInformation'), (70, 'ProcessInPrivate'), (71, 'ProcessRaiseUMExceptionOnInvalidHandleClose'), (72, 'ProcessIumChallengeResponse'), (73, 'ProcessChildProcessInformation'), (74, 'ProcessHighGraphicsPriorityInformation'), (75, 'ProcessSubsystemInformation'), (76, 'ProcessEnergyValues'), (77, 'ProcessActivityThrottleState'), (78, 'ProcessActivityThrottlePolicy'), (79, 'ProcessWin32kSyscallFilterInformation'), (80, 'ProcessDisableSystemAllowedCpuSets'), (81, 'ProcessWakeInformation'), (82, 'ProcessEnergyTrackingState'), (83, 'ProcessManageWritesToExecutableMemory'), (84, 'ProcessCaptureTrustletLiveDump'), (85, 'ProcessTelemetryCoverage'), (86, 'ProcessEnclaveInformation'), (87, 'ProcessEnableReadWriteVmLogging'), (88, 'ProcessUptimeInformation'), (89, 'ProcessImageSection'), (90, 'ProcessDebugAuthInformation'), (91, 'ProcessSystemResourceManagement'), (92, 'ProcessSequenceNumber'), (93, 'ProcessLoaderDetour'), (94, 'ProcessSecurityDomainInformation'), (95, 'ProcessCombineSecurityDomainsInformation'), (96, 'ProcessEnableLogging'), (97, 'ProcessLeapSecondInformation'), (98, 'ProcessFiberShadowStackAllocation'), (99, 'ProcessFreeFiberShadowStackAllocation'), ]] class PROCESS_BASIC_INFORMATION(pstruct.type, versioned): # XXX: there's 2 versions of this structure on server 2016 # 32-bit -> 24, 32 # 64-bit -> 48, 64 _fields_ = [ (umtypes.NTSTATUS, 'ExitStatus'), (lambda self: dyn.block(4 if getattr(self, 'WIN64', False) else 0), 'padding(ExitStatus)'), (P(PEB), 'PebBaseAddress'), (ULONG_PTR, 'AffinityMask'), (umtypes.KPRIORITY, 'BasePriority'), (lambda self: dyn.block(4 if getattr(self, 'WIN64', False) else 0), 'padding(BasePriority)'), (HANDLE, 'UniqueProcessId'), (HANDLE, 'InheritedFromUniqueProcessId'), ] class PROCESS_MEMORY_EXHAUSTION_TYPE(pint.enum, ULONG): _values_ = [(n, v) for v, n in [ (0, 'PMETypeFaultFastOnCommitFailure'), ]] class PROCESS_MEMORY_EXHAUSTION_INFO(pstruct.type): type = PROCESS_INFORMATION_CLASS.byname('ProcessMemoryExhaustion') _fields_ = [ (USHORT, 'Version'), (USHORT, 'Reserved'), (PROCESS_MEMORY_EXHAUSTION_TYPE, 'Value'), (ULONGLONG, 'Value'), ] class PROCESS_FAULT_INFORMATION(pstruct.type): type = PROCESS_INFORMATION_CLASS.byname('ProcessFaultInformation') _fields_ = [ (ULONG, 'FaultFlags'), (ULONG, 'AdditionalInfo'), ] class PROCESS_TELEMETRY_ID_INFORMATION(pstruct.type): type = PROCESS_INFORMATION_CLASS.byname('ProcessTelemetryIdInformation') _fields_ = [ (ULONG, 'HeaderSize'), (ULONG, 'ProcessId'), (ULONGLONG, 'ProcessStartKey'), (ULONGLONG, 'CreateTime'), (ULONGLONG, 'CreateInterruptTime'), (ULONGLONG, 'ProcessSequenceNumber'), (ULONGLONG, 'SessionCreateTime'), (ULONG, 'SessionId'), (ULONG, 'BootId'), (ULONG, 'ImageChecksum'), (ULONG, 'ImageTimeDateStamp'), (ULONG, 'UserIdOffset'), (ULONG, 'ImagePathOffset'), (ULONG, 'PackageNameOffset'), (ULONG, 'RelativeAppNameOffset'), (ULONG, 'CommandLineOffset'), ] @pbinary.littleendian class API_SET_SCHEMA_FLAGS_(pbinary.flags): _fields_ = [ (30, 'unused'), (1, 'HOST_EXTENSION'), (1, 'SEALED'), ] class API_SET_HEADER(pstruct.type): def __init__(self, **attrs): super(API_SET_HEADER, self).__init__(**attrs) self._fields_ = f = [] # https://www.geoffchappell.com/studies/windows/win32/apisetschema/index.htm if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) < sdkddkver.NTDDI_WIN8: f.extend([ (ULONG, 'Version'), (ULONG, 'Count'), ]) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) >= sdkddkver.NTDDI_WIN8: f.extend([ (ULONG, 'Version'), (ULONG, 'Size'), (API_SET_SCHEMA_FLAGS_, 'Flags'), (ULONG, 'Count'), ]) else: raise error.NdkUnsupportedVersion(self) return def summary(self): res = [] for fld in self: res.append("{:s}={:s}".format(fld, "{:#x}".format(self[fld].int()) if isinstance(self[fld], pint.type) else self[fld].summary())) return ' '.join(res) class API_SET_VALUE_ENTRY(pstruct.type): class _Value(rpointer_t): _value_ = ULONG def summary(self): res = super(API_SET_VALUE_ENTRY._Value, self).summary() return '{:s} -> {!r}'.format(res, self.d.l.str()) class _object_(pstr.wstring): def blocksize(self): try: parent = self.getparent(API_SET_VALUE_ENTRY) result = parent['Size'].li.int() except (ptypes.error.ItemNotFoundError, ptypes.error.InitializationError): result = 0 return result def __Value(self): def _object_(self, parent=self): parent = self.getparent(API_SET_VALUE_ENTRY) res = parent['Size'].li.int() return dyn.clone(pstr.wstring, blocksize=lambda s, sz=res: sz) return dyn.clone(API_SET_VALUE_ENTRY._Value, _baseobject_=self._baseobject_, _object_=_object_) _fields_ = [ (lambda s: dyn.clone(s._Value, _baseobject_=s._baseobject_), 'Value'), (ULONG, 'Size'), ] class API_SET_VALUE(pstruct.type): _fields_ = [ (ULONG, 'Count'), (ULONG, 'EndOfEntriesOffset'), (ULONG, 'Hash'), (lambda s: API_SET_VALUE_ENTRY if s['Count'].li.int() > 1 else ptype.undefined, 'OriginalRedirect'), (lambda s: dyn.array(API_SET_VALUE_ENTRY, s['Count'].li.int()), 'Entry'), ] class API_SET_ENTRY(pstruct.type): _baseobject_ = None class _NameOffset(rpointer_t): _value_ = ULONG def summary(self): res = super(API_SET_ENTRY._NameOffset, self).summary() return '{:s} -> {!r}'.format(res, self.d.li.str()) class _object_(pstr.wstring): def blocksize(self): try: parent = self.getparent(API_SET_ENTRY) result = parent['NameLength'].li.int() except (ptypes.error.ItemNotFoundError, ptypes.error.InitializationError): result = 0 return result class _ValueOffset(rpointer_t): _value_ = ULONG _object_ = API_SET_VALUE _fields_ = [ (lambda s: dyn.clone(s._NameOffset, _baseobject_=s._baseobject_), 'NameOffset'), (ULONG, 'NameLength'), (lambda s: dyn.clone(s._ValueOffset, _baseobject_=s._baseobject_), 'ValueOffset'), ] class API_SET_MAP(pstruct.type, versioned): def __Entry(self): res = self['Header'].li return dyn.array(API_SET_ENTRY, res['Count'].int(), recurse={'_baseobject_':self}) _fields_ = [ (API_SET_HEADER, 'Header'), (__Entry, 'Entry'), ] class KSYSTEM_TIME(pstruct.type): _fields_ = [ (ULONG, 'LowPart'), (LONG, 'High1Time'), (LONG, 'High2Time'), ] def summary(self): return "LowPart={:#x} High1Time={:#x} High2Time={:#x}".format(self['LowPart'].int(), self['High1Time'].int(), self['High2Time'].int()) class WOW64_SHARED_INFORMATION(pint.enum): _values_ = [ ('SharedNtdll32LdrInitializeThunk', 0), ('SharedNtdll32KiUserExceptionDispatcher', 1), ('SharedNtdll32KiUserApcDispatcher', 2), ('SharedNtdll32KiUserCallbackDispatcher', 3), ('SharedNtdll32LdrHotPatchRoutine', 4), ('SharedNtdll32ExpInterlockedPopEntrySListFault', 5), ('SharedNtdll32ExpInterlockedPopEntrySListResume', 6), ('SharedNtdll32ExpInterlockedPopEntrySListEnd', 7), ('SharedNtdll32RtlUserThreadStart', 8), ('SharedNtdll32pQueryProcessDebugInformationRemote', 9), ('SharedNtdll32EtwpNotificationThread', 10), ('SharedNtdll32BaseAddress', 11), ('Wow64SharedPageEntriesCount', 12), ] class NT_PRODUCT_TYPE(pint.enum, ULONG): _values_ = [ ('NtProductWinNt', 1), ('NtProductLanManNt', 2), ('NtProductServer', 3), ] class ALTERNATIVE_ARCHITECTURE_TYPE(pint.enum, ULONG): _values_ = [ ('StandardDesign', 0), ('NEC98x86', 1), ('EndAlternatives', 2), ] class XSTATE_CONFIGURATION(pstruct.type): class FEATURE(pstruct.type): _fields_ = [(ULONG, 'Offset'), (ULONG, 'Size')] _fields_ = [ (ULONGLONG, 'EnabledFeatures'), (ULONG, 'Size'), (ULONG, 'OptimizedSave'), (dyn.array(FEATURE, 64), 'Features'), ] class SHARED_GLOBAL_FLAGS_(pbinary.flags): _fields_ = [ (21, 'SpareBits'), (1, 'STATE_SEPARATION_ENABLED'), # 0x00000400 (1, 'MULTIUSERS_IN_SESSION_SKU'), # 0x00000200 (1, 'MULTI_SESSION_SKU'), # 0x00000100 (1, 'SECURE_BOOT_ENABLED'), # 0x00000080 (1, 'CONSOLE_BROKER_ENABLED'), # 0x00000040 # (1, 'SEH_VALIDATION_ENABLED'), # 0x00000040 (W7) (1, 'DYNAMIC_PROC_ENABLED'), # 0x00000020 (1, 'LKG_ENABLED'), # 0x00000010 # (1, 'SPARE'), # 0x00000010 (W7) (1, 'INSTALLER_DETECT_ENABLED'), # 0x00000008 (1, 'VIRT_ENABLED'), # 0x00000004 (1, 'ELEVATION_ENABLED'), # 0x00000002 (1, 'ERROR_PORT'), # 0x00000001 ] PROCESSOR_MAX_FEATURES = 64 class PF_(parray.type): _object_, length = BOOLEAN, PROCESSOR_MAX_FEATURES _aliases_ = [ ('FLOATING_POINT_PRECISION_ERRATA', 0), # 4.0 and higher (x86) ('FLOATING_POINT_EMULATED', 1), # 4.0 and higher (x86) ('COMPARE_EXCHANGE_DOUBLE', 2), # 4.0 and higher ('MMX_INSTRUCTIONS_AVAILABLE', 3), # 4.0 and higher ('PPC_MOVEMEM_64BIT_OK', 4), # none ('ALPHA_BYTE_INSTRUCTIONS', 5), # none ('XMMI_INSTRUCTIONS_AVAILABLE', 6), # 5.0 and higher ('3DNOW_INSTRUCTIONS_AVAILABLE', 7), # 5.0 and higher ('RDTSC_INSTRUCTION_AVAILABLE', 8), # 5.0 and higher ('PAE_ENABLED', 9), # 5.0 and higher ('XMMI64_INSTRUCTIONS_AVAILABLE', 10), # 5.1 and higher ('SSE_DAZ_MODE_AVAILABLE', 11), # none ('NX_ENABLED', 12), # late 5.1; late 5.2 and higher ('SSE3_INSTRUCTIONS_AVAILABLE', 13), # 6.0 and higher ('COMPARE_EXCHANGE128', 14), # 6.0 and higher (x64) ('COMPARE64_EXCHANGE128', 15), # none ('CHANNELS_ENABLED', 16), # 6.0 only ('XSAVE_ENABLED', 17), # 6.1 and higher ('ARM_VFP_32_REGISTERS_AVAILABLE', 18), # none ('ARM_NEON_INSTRUCTIONS_AVAILABLE', 19), # none ('SECOND_LEVEL_ADDRESS_TRANSLATION', 20), # 6.2 and higher ('VIRT_FIRMWARE_ENABLED', 21), # 6.2 and higher ('RDWRFSGSBASE_AVAILABLE', 22), # 6.2 and higher (x64) ('FASTFAIL_AVAILABLE', 23), # 6.2 and higher ('ARM_DIVIDE_INSTRUCTION_AVAILABLE', 24), # none ('ARM_64BIT_LOADSTORE_ATOMIC', 25), # none ('ARM_EXTERNAL_CACHE_AVAILABLE', 26), # none ('ARM_FMAC_INSTRUCTIONS_AVAILABLE', 27), # none ('RDRAND_INSTRUCTION_AVAILABLE', 28), # 6.3 and higher ('ARM_V8_INSTRUCTIONS_AVAILABLE', 29), # none ('ARM_V8_CRYPTO_INSTRUCTIONS_AVAILABLE', 30), # none ('ARM_V8_CRC32_INSTRUCTIONS_AVAILABLE', 31), # none ('RDTSCP_INSTRUCTION_AVAILABLE', 32), # 10.0 and higher ] class KUSER_SHARED_DATA(pstruct.type, versioned): # FIXME: https://www.geoffchappell.com/studies/windows/km/ntoskrnl/inc/api/ntexapi_x/kuser_shared_data/index.htm class TscQpc(pbinary.struct): _fields_ = [ (16, 'Pad'), (6, 'Shift'), (1, 'SpareFlag'), (1, 'Enabled'), ] class SharedDataFlags(pbinary.flags): _fields_ = [ (25, 'SpareBits'), (0, 'DbgStateSeparationEnabled'), # 1709 (0, 'DbgMultiUsersInSessionSKU'), # 1607 (0, 'DbgMultiSessionSKU'), # 10.0 (0, 'DbgSecureBootEnabled'), # 6.2 (1, 'DbgSEHValidationEnabled'), # 6.1 (0, 'DbgConsoleBrokerEnabled'), # 6.2 (1, 'DbgDynProcessorEnabled'), # 6.1 (1, 'DbgSystemDllRelocated'), # 6.0 (0, 'DbgLkgEnabled'), # 6.2 (1, 'DbgInstallerDetectEnabled'), # 6.0 (1, 'DbgVirtEnabled'), # 6.0 (1, 'DbgElevationEnabled'), # 6.0 (1, 'DbgErrorPortPresent'), # 6.0 ] def __init__(self, **attrs): super(KUSER_SHARED_DATA, self).__init__(**attrs) self._fields_ = f = [] PROCESSOR_MAX_FEATURES = 64 f.extend([ (ULONG, 'TickCountLowDeprecated'), (ULONG, 'TickCountMultiplier'), (KSYSTEM_TIME, 'InterruptTime'), (KSYSTEM_TIME, 'SystemTime'), (KSYSTEM_TIME, 'TimeZoneBias'), (USHORT, 'ImageNumberLow'), (USHORT, 'ImageNumberHigh'), (dyn.clone(pstr.wstring, length=260), 'NtSystemRoot'), (ULONG, 'MaxStackTraceDepth'), (ULONG, 'CryptoExponent'), (ULONG, 'TimeZoneId'), (ULONG, 'LargePageMinimum'), (dyn.array(ULONG, 7), 'Reserved2'), (NT_PRODUCT_TYPE, 'NtProductType'), (BOOLEAN, 'ProductTypeIsValid'), (dyn.align(4), 'ProductTypeIsValidAlignment'), (ULONG, 'NtMajorVersion'), (ULONG, 'NtMinorVersion'), (dyn.array(BOOLEAN, PROCESSOR_MAX_FEATURES), 'ProcessorFeatures'), # PF_ (ULONG, 'Reserved1'), (ULONG, 'Reserved3'), (ULONG, 'TimeSlip'), (ALTERNATIVE_ARCHITECTURE_TYPE, 'AlternativeArchitecture'), (ULONG, 'AltArchitecturePad'), (LARGE_INTEGER, 'SystemExpirationDate'), (ULONG, 'SuiteMask'), (BOOLEAN, 'KdDebuggerEnabled'), (UCHAR, 'NXSupportPolicy'), (dyn.align(4), 'ActiveConsoleAlignment'), (ULONG, 'ActiveConsoleId'), (ULONG, 'DismountCount'), (ULONG, 'ComPlusPackage'), (ULONG, 'LastSystemRITEventTickCount'), (ULONG, 'NumberOfPhysicalPages'), (BOOLEAN, 'SafeBootMode'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) == sdkddkver.NTDDI_WIN7: f.append((self.TscQpc, 'TscQpc')) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) > sdkddkver.NTDDI_WIN7: f.append((dyn.array(pint.uint8_t, 4), 'Reserved12')) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) == sdkddkver.NTDDI_WINXP: f.append((ULONG, 'TraceLogging')) elif sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) == sdkddkver.NTDDI_WIN7: f.extend([ (self.SharedDataFlags, 'SharedDataFlags'), (dyn.array(ULONG, 1), 'DataFlagsPad'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) in {sdkddkver.NTDDI_WINXP, sdkddkver.NTDDI_WIN7}: f.extend([ (ULONGLONG, 'TestRetInstruction'), (ULONG, 'SystemCall'), (ULONG, 'SystemCallReturn'), (dyn.array(ULONGLONG, 3), 'SystemCallPad'), ]) f.extend([ (KSYSTEM_TIME, 'TickCount'), (dyn.array(LONG, 1), 'TickCountPad'), (ULONG, 'Cookie'), ]) if sdkddkver.NTDDI_MAJOR(self.NTDDI_VERSION) == sdkddkver.NTDDI_WIN7: f.extend([ (dyn.array(ULONG, 1), 'CookiePad'), # pad to what, a ULONGLONG? (LONGLONG, 'ConsoleSessionForegroundProcessId'), (dyn.array(ULONG, 16), 'Wow64SharedInformation'), (dyn.array(USHORT, 16), 'UserModeGlobalLogger'), (ULONG, 'ImageFileExecutionOptions'), (ULONG, 'LangGenerationCount'), (ULONGLONG, 'Reserved5'), (ULONGLONG, 'InterruptTimeBias'), (ULONGLONG, 'TscQpcBias'), (ULONG, 'ActiveProcessorCount'), (USHORT, 'ActiveGroupCount'), (USHORT, 'Reserved4'), (ULONG, 'AitSamplingValue'), (ULONG, 'AppCompatFlag'), (ULONGLONG, 'SystemDllNativeRelocation'), (ULONGLONG, 'SystemDllWowRelocation'), # (dyn.array(LONG, 1), 'XStatePad'), # (dyn.align(0x10), 'XStatePad'), # ??? (XSTATE_CONFIGURATION, 'XState'), ]) return class ETHREAD(pstruct.type, versioned): _fields_ = [ (ketypes.KTHREAD, 'Tcb'), (LARGE_INTEGER, 'CreateTime'), (LIST_ENTRY, 'KeyedWaitChain'), # XXX: union (LONG, 'ExitStatus'), # XXX: union (LIST_ENTRY, 'PostBlockList'), # XXX: union (PVOID, 'KeyedWaitValue'), # XXX: union (ULONG, 'ActiveTimerListLock'), (LIST_ENTRY, 'ActiveTimerListHead'), (umtypes.CLIENT_ID, 'Cid'), (ketypes.KSEMAPHORE, 'KeyedWaitSemaphore'), # XXX: union # (PS_CLIENT_SECURITY_CONTEXT, 'ClientSecurity'), (dyn.block(4), 'ClientSecurity'), (LIST_ENTRY, 'IrpList'), (ULONG, 'TopLevelIrp'), # (PDEVICE_OBJECT, 'DeviceToVerify'), (P(dyn.block(0xb8)), 'DeviceToVerify'), # (_PSP_RATE_APC *, 'RateControlApc'), (dyn.block(4), 'RateControlApc'), (PVOID, 'Win32StartAddress'), (PVOID, 'SparePtr0'), (LIST_ENTRY, 'ThreadListEntry'), # (EX_RUNDOWN_REF, 'RundownProtect'), # (EX_PUSH_LOCK, 'ThreadLock'), (dyn.block(4), 'RundownProtect'), (dyn.block(4), 'ThreadLock'), (ULONG, 'ReadClusterSize'), (LONG, 'MmLockOrdering'), (ULONG, 'CrossThreadFlags'), # XXX: union (ULONG, 'SameThreadPassiveFlags'), # XXX: union (ULONG, 'SameThreadApcFlags'), # XXX (UCHAR, 'CacheManagerActive'), (UCHAR, 'DisablePageFaultClustering'), (UCHAR, 'ActiveFaultCount'), (ULONG, 'AlpcMessageId'), (PVOID, 'AlpcMessage'), # XXX: union (LIST_ENTRY, 'AlpcWaitListEntry'), (ULONG, 'CacheManagerCount'), ] class PROCESS_BASIC_INFORMATION(pstruct.type): _fields_ = [ (NTSTATUS, 'ExitStatus'), (P(PEB), 'PebBaseAddress'), (ULONG_PTR, 'AffinityMask'), (umtypes.KPRIORITY, 'BasePriority'), (ULONG_PTR, 'UniqueProcessId'), (ULONG_PTR, 'InheritedFromUniqueProcessId'), ] class PROCESS_EXTENDED_BASIC_INFORMATION(pstruct.type): @pbinary.littleendian class _Flags(pbinary.flags): 'ULONG' _fields_ = [ (28, 'SpareBits'), (1, 'IsCrossSectionCreate'), (1, 'IsProcessDeleting'), (1, 'IsWow64Process'), (1, 'IsProtectedProcess'), ] _fields_ = [ (SIZE_T, 'Size'), (PROCESS_BASIC_INFORMATION, 'BasicInfo'), (_Flags, 'Flags'), (ptype.undefined, 'undefined'), ] def alloc(self, **fields): res = super(PROCESS_EXTENDED_BASIC_INFORMATION, self).alloc(**fields) return res if 'Size' in fields else res.set(Size=res.size()) class COPYDATASTRUCT(pstruct.type): _fields_ = [ (ULONG_PTR, 'dwData'), (DWORD, 'cbData'), (lambda self: P(dyn.block(self['cbData'].li.int())), 'lpData'), ] def alloc(self, **fields): res = super(COPYDATASTRUCT, self).alloc(**fields) if res['lpData'].d.initializedQ(): return res if 'cbData' in fields else res.set(cbData=res['lpData'].d.size()) return res class STARTUPINFO(pstruct.type): _fields_ = [ (DWORD, 'cb'), (lambda self: getattr(self, '__string__', umtypes.PSTR), 'lpReserved'), (lambda self: getattr(self, '__string__', umtypes.PSTR), 'lpDesktop'), (lambda self: getattr(self, '__string__', umtypes.PSTR), 'lpTitle'), (DWORD, 'dwX'), (DWORD, 'dwY'), (DWORD, 'dwXSize'), (DWORD, 'dwYSize'), (DWORD, 'dwXCountChars'), (DWORD, 'dwYCountChars'), (DWORD, 'dwFillAttribute'), (DWORD, 'dwFlags'), (WORD, 'wShowWindow'), (WORD, 'cbReserved2'), (lambda self: P(dyn.block(self['cbReserved2'].li.int())), 'lpReserved2'), (HANDLE, 'hStdInput'), (HANDLE, 'hStdOutput'), (HANDLE, 'hStdError'), (ptype.undefined, 'undefined'), ] def alloc(self, **fields): res = super(STARTUPINFO, self).alloc(**fields) return res if 'cb' in fields else res.set(cb=res.size()) class STARTUPINFOA(STARTUPINFO): pass class STARTUPINFOW(STARTUPINFO): __string__ = umtypes.PWSTR if __name__ == '__main__': import ctypes def openprocess (pid): k32 = ctypes.WinDLL('kernel32.dll') res = k32.OpenProcess(0x30 | 0x0400, False, pid) return res def getcurrentprocess (): k32 = ctypes.WinDLL('kernel32.dll') return k32.GetCurrentProcess() def getPBIObj (handle): nt = ctypes.WinDLL('ntdll.dll') class ProcessBasicInformation(ctypes.Structure): _fields_ = [('Reserved1', ctypes.c_uint32), ('PebBaseAddress', ctypes.c_uint32), ('Reserved2', ctypes.c_uint32 * 2), ('UniqueProcessId', ctypes.c_uint32), ('Reserved3', ctypes.c_uint32)] pbi = ProcessBasicInformation() res = nt.NtQueryInformationProcess(handle, 0, ctypes.byref(pbi), ctypes.sizeof(pbi), None) return pbi handle = getcurrentprocess() pebaddress = getPBIObj(handle).PebBaseAddress import ptypes, pstypes Peb = pstypes.PEB() Peb.setoffset(pebaddress) Peb.load() Ldr = Peb['Ldr'].d.l for x in Ldr['InLoadOrderModuleList'].walk(): print(x['BaseDllName'].str(), x['FullDllName'].str()) print(hex(x['DllBase'].int()), hex(x['SizeOfImage'].int()))<|fim▁end|>
(PVOID, 'xxxClientCallDelegateThread'),
<|file_name|>test.py<|end_file_name|><|fim▁begin|>import luhn def test_checksum_len1(): assert luhn.checksum('7') == 7 def test_checksum_len2(): assert luhn.checksum('13') == 5 def test_checksum_len3(): assert luhn.checksum('383') == 3 def test_checksum_len4(): assert luhn.checksum('2827') == 3 def test_checksum_len13(): assert luhn.checksum('4346537657597') == 9 def test_checksum_len14(): assert luhn.checksum('27184931073326') == 1 def test_valid(): assert luhn.verify('356938035643809') def test_invalid(): assert not luhn.verify('4222222222222222') def test_generate(): assert luhn.generate('7992739871') == 3 <|fim▁hole|><|fim▁end|>
def test_append(): assert luhn.append('53461861341123') =='534618613411234'
<|file_name|>FooBaseH.py<|end_file_name|><|fim▁begin|>""" Example of module documentation which can be multiple-lined """ from sqlalchemy import Column, Integer, String from wopmars.Base import Base class FooBaseH(Base): """ Documentation for the class """ __tablename__ = "FooBaseH" id = Column(Integer, primary_key=True, autoincrement=True) name = Column(String(255)) state = Column(String) __mapper_args__ = {<|fim▁hole|> }<|fim▁end|>
'polymorphic_on': state, 'polymorphic_identity': "1"
<|file_name|>timepicker-section.ts<|end_file_name|><|fim▁begin|>/// <reference path="../../tsd.d.ts" /> import { Component, View, } from 'angular2/core'; import { CORE_DIRECTIVES } from 'angular2/common'; import {tabs} from '../../ng2-bootstrap'; import {TimepickerDemo} from './timepicker/timepicker-demo'; let name = 'Timepicker'; let src = 'https://github.com/valor-software/ng2-bootstrap/blob/master/components/timepicker/timepicker.ts'; // webpack html imports let doc = require('../../components/timepicker/readme.md'); let titleDoc = require('../../components/timepicker/title.md'); <|fim▁hole|> @Component({ selector: 'timepicker-section' }) @View({ template: ` <br> <section id="${name.toLowerCase()}"> <div class="row"><h1>${name}<small>(<a href="${src}">src</a>)</small></h1></div> <hr> <div class="row"><div class="col-md-12">${titleDoc}</div></div> <div class="row"> <h2>Example</h2> <div class="card card-block panel panel-default panel-body"> <timepicker-demo></timepicker-demo> </div> </div> <br> <div class="row"> <tabset> <tab heading="Markup"> <div class="card card-block panel panel-default panel-body"> <pre class="language-html"><code class="language-html" ng-non-bindable>${html}</code></pre> </div> </tab> <tab heading="TypeScript"> <div class="card card-block panel panel-default panel-body"> <pre class="language-typescript"><code class="language-typescript" ng-non-bindable>${ts}</code></pre> </div> </tab> </tabset> </div> <br> <div class="row"> <h2>API</h2> <div class="card card-block panel panel-default panel-body">${doc}</div> </div> </section> `, directives: [TimepickerDemo, tabs, CORE_DIRECTIVES] }) export class TimepickerSection { }<|fim▁end|>
let ts = require('!!prismjs?lang=typescript!./timepicker/timepicker-demo.ts'); let html = require('!!prismjs?lang=markup!./timepicker/timepicker-demo.html');
<|file_name|>TGPrdWidget.js<|end_file_name|><|fim▁begin|>define([ "dojo/_base/declare", "dojo/_base/fx", "dojo/_base/lang", "dojo/dom-style", "dojo/mouse", "dojo/on", "dijit/_WidgetBase", "dijit/_TemplatedMixin", "dojo/text!./templates/TGPrdItem.html", "dijit/_OnDijitClickMixin", "dijit/_WidgetsInTemplateMixin", "dijit/form/Button" ], function(declare, baseFx, lang, domStyle, mouse, on, _WidgetBase, _TemplatedMixin, template,_OnDijitClickMixin,_WidgetsInTemplateMixin,Button){ return declare([_WidgetBase, _OnDijitClickMixin,_TemplatedMixin,_WidgetsInTemplateMixin], { // Some default values for our author // These typically map to whatever you're passing to the constructor // 产品名称 rtzzhmc: "No Name", // Using require.toUrl, we can get a path to our AuthorWidget's space // and we want to have a default avatar, just in case // 产品默认图片 <|fim▁hole|> rimg: require.toUrl("./images/defaultAvatar.png"), // //起点金额 // code5:"", // //投资风格 // code7: "", // //收益率 // code8:"", // //产品经理 // code3:"", // //产品ID // rprid:"", // Our template - important! templateString: template, // A class to be applied to the root node in our template baseClass: "TGPrdWidget", // A reference to our background animation mouseAnim: null, // Colors for our background animation baseBackgroundColor: "#fff", // mouseBackgroundColor: "#def", postCreate: function(){ // Get a DOM node reference for the root of our widget var domNode = this.domNode; // Run any parent postCreate processes - can be done at any point this.inherited(arguments); // Set our DOM node's background color to white - // smoothes out the mouseenter/leave event animations domStyle.set(domNode, "backgroundColor", this.baseBackgroundColor); }, //设置属性 _setRimgAttr: function(imagePath) { // We only want to set it if it's a non-empty string if (imagePath != "") { // Save it on our widget instance - note that // we're using _set, to support anyone using // our widget's Watch functionality, to watch values change // this._set("avatar", imagePath); // Using our avatarNode attach point, set its src value this.imgNode.src = HTTP+imagePath; } } }); });<|fim▁end|>
<|file_name|>bundle.js<|end_file_name|><|fim▁begin|>(function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){ (function (Buffer){ var pbkdf2Sync = require('pbkdf2').pbkdf2Sync var MAX_VALUE = 0x7fffffff // N = Cpu cost, r = Memory cost, p = parallelization cost function scrypt (key, salt, N, r, p, dkLen, progressCallback) { if (N === 0 || (N & (N - 1)) !== 0) throw Error('N must be > 0 and a power of 2') if (N > MAX_VALUE / 128 / r) throw Error('Parameter N is too large') if (r > MAX_VALUE / 128 / p) throw Error('Parameter r is too large') var XY = new Buffer(256 * r) var V = new Buffer(128 * r * N) // pseudo global var B32 = new Int32Array(16) // salsa20_8 var x = new Int32Array(16) // salsa20_8 var _X = new Buffer(64) // blockmix_salsa8 // pseudo global var B = pbkdf2Sync(key, salt, 1, p * 128 * r, 'sha256') var tickCallback if (progressCallback) { var totalOps = p * N * 2 var currentOp = 0 tickCallback = function () { ++currentOp // send progress notifications once every 1,000 ops if (currentOp % 1000 === 0) { progressCallback({ current: currentOp, total: totalOps, percent: (currentOp / totalOps) * 100.0 }) } } } for (var i = 0; i < p; i++) { smix(B, i * 128 * r, r, N, V, XY) } return pbkdf2Sync(key, B, 1, dkLen, 'sha256') // all of these functions are actually moved to the top // due to function hoisting function smix (B, Bi, r, N, V, XY) { var Xi = 0 var Yi = 128 * r var i B.copy(XY, Xi, Bi, Bi + Yi) for (i = 0; i < N; i++) { XY.copy(V, i * Yi, Xi, Xi + Yi) blockmix_salsa8(XY, Xi, Yi, r) if (tickCallback) tickCallback() } for (i = 0; i < N; i++) { var offset = Xi + (2 * r - 1) * 64 var j = XY.readUInt32LE(offset) & (N - 1) blockxor(V, j * Yi, XY, Xi, Yi) blockmix_salsa8(XY, Xi, Yi, r) if (tickCallback) tickCallback() } XY.copy(B, Bi, Xi, Xi + Yi) } function blockmix_salsa8 (BY, Bi, Yi, r) { var i arraycopy(BY, Bi + (2 * r - 1) * 64, _X, 0, 64) for (i = 0; i < 2 * r; i++) { blockxor(BY, i * 64, _X, 0, 64) salsa20_8(_X) arraycopy(_X, 0, BY, Yi + (i * 64), 64) } for (i = 0; i < r; i++) { arraycopy(BY, Yi + (i * 2) * 64, BY, Bi + (i * 64), 64) } for (i = 0; i < r; i++) { arraycopy(BY, Yi + (i * 2 + 1) * 64, BY, Bi + (i + r) * 64, 64) } } function R (a, b) { return (a << b) | (a >>> (32 - b)) } function salsa20_8 (B) { var i for (i = 0; i < 16; i++) { B32[i] = (B[i * 4 + 0] & 0xff) << 0 B32[i] |= (B[i * 4 + 1] & 0xff) << 8 B32[i] |= (B[i * 4 + 2] & 0xff) << 16 B32[i] |= (B[i * 4 + 3] & 0xff) << 24 // B32[i] = B.readUInt32LE(i*4) <--- this is signficantly slower even in Node.js } arraycopy(B32, 0, x, 0, 16) for (i = 8; i > 0; i -= 2) { x[ 4] ^= R(x[ 0] + x[12], 7) x[ 8] ^= R(x[ 4] + x[ 0], 9) x[12] ^= R(x[ 8] + x[ 4], 13) x[ 0] ^= R(x[12] + x[ 8], 18) x[ 9] ^= R(x[ 5] + x[ 1], 7) x[13] ^= R(x[ 9] + x[ 5], 9) x[ 1] ^= R(x[13] + x[ 9], 13) x[ 5] ^= R(x[ 1] + x[13], 18) x[14] ^= R(x[10] + x[ 6], 7) x[ 2] ^= R(x[14] + x[10], 9) x[ 6] ^= R(x[ 2] + x[14], 13) x[10] ^= R(x[ 6] + x[ 2], 18) x[ 3] ^= R(x[15] + x[11], 7) x[ 7] ^= R(x[ 3] + x[15], 9) x[11] ^= R(x[ 7] + x[ 3], 13) x[15] ^= R(x[11] + x[ 7], 18) x[ 1] ^= R(x[ 0] + x[ 3], 7) x[ 2] ^= R(x[ 1] + x[ 0], 9) x[ 3] ^= R(x[ 2] + x[ 1], 13) x[ 0] ^= R(x[ 3] + x[ 2], 18) x[ 6] ^= R(x[ 5] + x[ 4], 7) x[ 7] ^= R(x[ 6] + x[ 5], 9) x[ 4] ^= R(x[ 7] + x[ 6], 13) x[ 5] ^= R(x[ 4] + x[ 7], 18) x[11] ^= R(x[10] + x[ 9], 7) x[ 8] ^= R(x[11] + x[10], 9) x[ 9] ^= R(x[ 8] + x[11], 13) x[10] ^= R(x[ 9] + x[ 8], 18) x[12] ^= R(x[15] + x[14], 7) x[13] ^= R(x[12] + x[15], 9) x[14] ^= R(x[13] + x[12], 13) x[15] ^= R(x[14] + x[13], 18) } for (i = 0; i < 16; ++i) B32[i] = x[i] + B32[i] for (i = 0; i < 16; i++) { var bi = i * 4 B[bi + 0] = (B32[i] >> 0 & 0xff) B[bi + 1] = (B32[i] >> 8 & 0xff) B[bi + 2] = (B32[i] >> 16 & 0xff) B[bi + 3] = (B32[i] >> 24 & 0xff) // B.writeInt32LE(B32[i], i*4) //<--- this is signficantly slower even in Node.js } } // naive approach... going back to loop unrolling may yield additional performance function blockxor (S, Si, D, Di, len) { for (var i = 0; i < len; i++) { D[Di + i] ^= S[Si + i] } } } function arraycopy (src, srcPos, dest, destPos, length) { if (Buffer.isBuffer(src) && Buffer.isBuffer(dest)) { src.copy(dest, destPos, srcPos, srcPos + length) } else { while (length--) { dest[destPos++] = src[srcPos++] } } } module.exports = scrypt }).call(this,require("buffer").Buffer) },{"buffer":18,"pbkdf2":2}],2:[function(require,module,exports){ (function (Buffer){ var createHmac = require('create-hmac') var MAX_ALLOC = Math.pow(2, 30) - 1 // default in iojs exports.pbkdf2 = pbkdf2 function pbkdf2 (password, salt, iterations, keylen, digest, callback) { if (typeof digest === 'function') { callback = digest digest = undefined } if (typeof callback !== 'function') { throw new Error('No callback provided to pbkdf2') } var result = pbkdf2Sync(password, salt, iterations, keylen, digest) setTimeout(function () { callback(undefined, result) }) } exports.pbkdf2Sync = pbkdf2Sync function pbkdf2Sync (password, salt, iterations, keylen, digest) { if (typeof iterations !== 'number') { throw new TypeError('Iterations not a number') } if (iterations < 0) { throw new TypeError('Bad iterations') } if (typeof keylen !== 'number') { throw new TypeError('Key length not a number') } if (keylen < 0 || keylen > MAX_ALLOC) { throw new TypeError('Bad key length') } digest = digest || 'sha1' if (!Buffer.isBuffer(password)) password = new Buffer(password, 'binary') if (!Buffer.isBuffer(salt)) salt = new Buffer(salt, 'binary') var hLen var l = 1 var DK = new Buffer(keylen) var block1 = new Buffer(salt.length + 4) salt.copy(block1, 0, 0, salt.length) var r var T for (var i = 1; i <= l; i++) { block1.writeUInt32BE(i, salt.length) var U = createHmac(digest, password).update(block1).digest() if (!hLen) { hLen = U.length T = new Buffer(hLen) l = Math.ceil(keylen / hLen) r = keylen - (l - 1) * hLen } U.copy(T, 0, 0, hLen) for (var j = 1; j < iterations; j++) { U = createHmac(digest, password).update(U).digest() for (var k = 0; k < hLen; k++) { T[k] ^= U[k] } } var destPos = (i - 1) * hLen var len = (i === l ? r : hLen) T.copy(DK, destPos, 0, len) } return DK } }).call(this,require("buffer").Buffer) },{"buffer":18,"create-hmac":3}],3:[function(require,module,exports){ (function (Buffer){ 'use strict'; var createHash = require('create-hash/browser'); var inherits = require('inherits') var Transform = require('stream').Transform var ZEROS = new Buffer(128) ZEROS.fill(0) function Hmac(alg, key) { Transform.call(this) if (typeof key === 'string') { key = new Buffer(key) } var blocksize = (alg === 'sha512' || alg === 'sha384') ? 128 : 64 this._alg = alg this._key = key if (key.length > blocksize) { key = createHash(alg).update(key).digest() } else if (key.length < blocksize) { key = Buffer.concat([key, ZEROS], blocksize) } var ipad = this._ipad = new Buffer(blocksize) var opad = this._opad = new Buffer(blocksize) for (var i = 0; i < blocksize; i++) { ipad[i] = key[i] ^ 0x36 opad[i] = key[i] ^ 0x5C } this._hash = createHash(alg).update(ipad) } inherits(Hmac, Transform) Hmac.prototype.update = function (data, enc) { this._hash.update(data, enc) return this } Hmac.prototype._transform = function (data, _, next) { this._hash.update(data) next() } Hmac.prototype._flush = function (next) { this.push(this.digest()) next() } Hmac.prototype.digest = function (enc) { var h = this._hash.digest() return createHash(this._alg).update(this._opad).update(h).digest(enc) } module.exports = function createHmac(alg, key) { return new Hmac(alg, key) } }).call(this,require("buffer").Buffer) },{"buffer":18,"create-hash/browser":4,"inherits":16,"stream":37}],4:[function(require,module,exports){ (function (Buffer){ 'use strict'; var inherits = require('inherits') var md5 = require('./md5') var rmd160 = require('ripemd160') var sha = require('sha.js') var Transform = require('stream').Transform function HashNoConstructor(hash) { Transform.call(this) this._hash = hash this.buffers = [] } inherits(HashNoConstructor, Transform) HashNoConstructor.prototype._transform = function (data, _, next) { this.buffers.push(data) next() } HashNoConstructor.prototype._flush = function (next) { this.push(this.digest()) next() } HashNoConstructor.prototype.update = function (data, enc) { if (typeof data === 'string') { data = new Buffer(data, enc) } this.buffers.push(data) return this } HashNoConstructor.prototype.digest = function (enc) { var buf = Buffer.concat(this.buffers) var r = this._hash(buf) this.buffers = null return enc ? r.toString(enc) : r } function Hash(hash) { Transform.call(this) this._hash = hash } inherits(Hash, Transform) Hash.prototype._transform = function (data, enc, next) { if (enc) data = new Buffer(data, enc) this._hash.update(data) next() } Hash.prototype._flush = function (next) { this.push(this._hash.digest()) this._hash = null next() } Hash.prototype.update = function (data, enc) { if (typeof data === 'string') { data = new Buffer(data, enc) } this._hash.update(data) return this } Hash.prototype.digest = function (enc) { var outData = this._hash.digest() return enc ? outData.toString(enc) : outData } module.exports = function createHash (alg) { if ('md5' === alg) return new HashNoConstructor(md5) if ('rmd160' === alg) return new HashNoConstructor(rmd160) return new Hash(sha(alg)) } }).call(this,require("buffer").Buffer) },{"./md5":6,"buffer":18,"inherits":16,"ripemd160":7,"sha.js":9,"stream":37}],5:[function(require,module,exports){ (function (Buffer){ 'use strict'; var intSize = 4; var zeroBuffer = new Buffer(intSize); zeroBuffer.fill(0); var chrsz = 8; function toArray(buf, bigEndian) { if ((buf.length % intSize) !== 0) { var len = buf.length + (intSize - (buf.length % intSize)); buf = Buffer.concat([buf, zeroBuffer], len); } var arr = []; var fn = bigEndian ? buf.readInt32BE : buf.readInt32LE; for (var i = 0; i < buf.length; i += intSize) { arr.push(fn.call(buf, i)); } return arr; } function toBuffer(arr, size, bigEndian) { var buf = new Buffer(size); var fn = bigEndian ? buf.writeInt32BE : buf.writeInt32LE; for (var i = 0; i < arr.length; i++) { fn.call(buf, arr[i], i * 4, true); } return buf; } function hash(buf, fn, hashSize, bigEndian) { if (!Buffer.isBuffer(buf)) buf = new Buffer(buf); var arr = fn(toArray(buf, bigEndian), buf.length * chrsz); return toBuffer(arr, hashSize, bigEndian); } exports.hash = hash; }).call(this,require("buffer").Buffer) },{"buffer":18}],6:[function(require,module,exports){ 'use strict'; /* * A JavaScript implementation of the RSA Data Security, Inc. MD5 Message * Digest Algorithm, as defined in RFC 1321. * Version 2.1 Copyright (C) Paul Johnston 1999 - 2002. * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet * Distributed under the BSD License * See http://pajhome.org.uk/crypt/md5 for more info. */ var helpers = require('./helpers'); /* * Calculate the MD5 of an array of little-endian words, and a bit length */ function core_md5(x, len) { /* append padding */ x[len >> 5] |= 0x80 << ((len) % 32); x[(((len + 64) >>> 9) << 4) + 14] = len; var a = 1732584193; var b = -271733879; var c = -1732584194; var d = 271733878; for(var i = 0; i < x.length; i += 16) { var olda = a; var oldb = b; var oldc = c; var oldd = d; a = md5_ff(a, b, c, d, x[i+ 0], 7 , -680876936); d = md5_ff(d, a, b, c, x[i+ 1], 12, -389564586); c = md5_ff(c, d, a, b, x[i+ 2], 17, 606105819); b = md5_ff(b, c, d, a, x[i+ 3], 22, -1044525330); a = md5_ff(a, b, c, d, x[i+ 4], 7 , -176418897); d = md5_ff(d, a, b, c, x[i+ 5], 12, 1200080426); c = md5_ff(c, d, a, b, x[i+ 6], 17, -1473231341); b = md5_ff(b, c, d, a, x[i+ 7], 22, -45705983); a = md5_ff(a, b, c, d, x[i+ 8], 7 , 1770035416); d = md5_ff(d, a, b, c, x[i+ 9], 12, -1958414417); c = md5_ff(c, d, a, b, x[i+10], 17, -42063); b = md5_ff(b, c, d, a, x[i+11], 22, -1990404162); a = md5_ff(a, b, c, d, x[i+12], 7 , 1804603682); d = md5_ff(d, a, b, c, x[i+13], 12, -40341101); c = md5_ff(c, d, a, b, x[i+14], 17, -1502002290); b = md5_ff(b, c, d, a, x[i+15], 22, 1236535329); a = md5_gg(a, b, c, d, x[i+ 1], 5 , -165796510); d = md5_gg(d, a, b, c, x[i+ 6], 9 , -1069501632); c = md5_gg(c, d, a, b, x[i+11], 14, 643717713); b = md5_gg(b, c, d, a, x[i+ 0], 20, -373897302); a = md5_gg(a, b, c, d, x[i+ 5], 5 , -701558691); d = md5_gg(d, a, b, c, x[i+10], 9 , 38016083); c = md5_gg(c, d, a, b, x[i+15], 14, -660478335); b = md5_gg(b, c, d, a, x[i+ 4], 20, -405537848); a = md5_gg(a, b, c, d, x[i+ 9], 5 , 568446438); d = md5_gg(d, a, b, c, x[i+14], 9 , -1019803690); c = md5_gg(c, d, a, b, x[i+ 3], 14, -187363961); b = md5_gg(b, c, d, a, x[i+ 8], 20, 1163531501); a = md5_gg(a, b, c, d, x[i+13], 5 , -1444681467); d = md5_gg(d, a, b, c, x[i+ 2], 9 , -51403784); c = md5_gg(c, d, a, b, x[i+ 7], 14, 1735328473); b = md5_gg(b, c, d, a, x[i+12], 20, -1926607734); a = md5_hh(a, b, c, d, x[i+ 5], 4 , -378558); d = md5_hh(d, a, b, c, x[i+ 8], 11, -2022574463); c = md5_hh(c, d, a, b, x[i+11], 16, 1839030562); b = md5_hh(b, c, d, a, x[i+14], 23, -35309556); a = md5_hh(a, b, c, d, x[i+ 1], 4 , -1530992060); d = md5_hh(d, a, b, c, x[i+ 4], 11, 1272893353); c = md5_hh(c, d, a, b, x[i+ 7], 16, -155497632); b = md5_hh(b, c, d, a, x[i+10], 23, -1094730640); a = md5_hh(a, b, c, d, x[i+13], 4 , 681279174); d = md5_hh(d, a, b, c, x[i+ 0], 11, -358537222); c = md5_hh(c, d, a, b, x[i+ 3], 16, -722521979); b = md5_hh(b, c, d, a, x[i+ 6], 23, 76029189); a = md5_hh(a, b, c, d, x[i+ 9], 4 , -640364487); d = md5_hh(d, a, b, c, x[i+12], 11, -421815835); c = md5_hh(c, d, a, b, x[i+15], 16, 530742520); b = md5_hh(b, c, d, a, x[i+ 2], 23, -995338651); a = md5_ii(a, b, c, d, x[i+ 0], 6 , -198630844); d = md5_ii(d, a, b, c, x[i+ 7], 10, 1126891415); c = md5_ii(c, d, a, b, x[i+14], 15, -1416354905); b = md5_ii(b, c, d, a, x[i+ 5], 21, -57434055); a = md5_ii(a, b, c, d, x[i+12], 6 , 1700485571); d = md5_ii(d, a, b, c, x[i+ 3], 10, -1894986606); c = md5_ii(c, d, a, b, x[i+10], 15, -1051523); b = md5_ii(b, c, d, a, x[i+ 1], 21, -2054922799); a = md5_ii(a, b, c, d, x[i+ 8], 6 , 1873313359); d = md5_ii(d, a, b, c, x[i+15], 10, -30611744); c = md5_ii(c, d, a, b, x[i+ 6], 15, -1560198380); b = md5_ii(b, c, d, a, x[i+13], 21, 1309151649); a = md5_ii(a, b, c, d, x[i+ 4], 6 , -145523070); d = md5_ii(d, a, b, c, x[i+11], 10, -1120210379); c = md5_ii(c, d, a, b, x[i+ 2], 15, 718787259); b = md5_ii(b, c, d, a, x[i+ 9], 21, -343485551); a = safe_add(a, olda); b = safe_add(b, oldb); c = safe_add(c, oldc); d = safe_add(d, oldd); } return Array(a, b, c, d); } /* * These functions implement the four basic operations the algorithm uses. */ function md5_cmn(q, a, b, x, s, t) { return safe_add(bit_rol(safe_add(safe_add(a, q), safe_add(x, t)), s),b); } function md5_ff(a, b, c, d, x, s, t) { return md5_cmn((b & c) | ((~b) & d), a, b, x, s, t); } function md5_gg(a, b, c, d, x, s, t) { return md5_cmn((b & d) | (c & (~d)), a, b, x, s, t); } function md5_hh(a, b, c, d, x, s, t) { return md5_cmn(b ^ c ^ d, a, b, x, s, t); } function md5_ii(a, b, c, d, x, s, t) { return md5_cmn(c ^ (b | (~d)), a, b, x, s, t); } /* * Add integers, wrapping at 2^32. This uses 16-bit operations internally * to work around bugs in some JS interpreters. */ function safe_add(x, y) { var lsw = (x & 0xFFFF) + (y & 0xFFFF); var msw = (x >> 16) + (y >> 16) + (lsw >> 16); return (msw << 16) | (lsw & 0xFFFF); } /* * Bitwise rotate a 32-bit number to the left. */ function bit_rol(num, cnt) { return (num << cnt) | (num >>> (32 - cnt)); } module.exports = function md5(buf) { return helpers.hash(buf, core_md5, 16); }; },{"./helpers":5}],7:[function(require,module,exports){ (function (Buffer){ /* CryptoJS v3.1.2 code.google.com/p/crypto-js (c) 2009-2013 by Jeff Mott. All rights reserved. code.google.com/p/crypto-js/wiki/License */ /** @preserve (c) 2012 by Cédric Mesnil. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: - Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. - Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ // constants table var zl = [ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 7, 4, 13, 1, 10, 6, 15, 3, 12, 0, 9, 5, 2, 14, 11, 8, 3, 10, 14, 4, 9, 15, 8, 1, 2, 7, 0, 6, 13, 11, 5, 12, 1, 9, 11, 10, 0, 8, 12, 4, 13, 3, 7, 15, 14, 5, 6, 2, 4, 0, 5, 9, 7, 12, 2, 10, 14, 1, 3, 8, 11, 6, 15, 13 ] var zr = [ 5, 14, 7, 0, 9, 2, 11, 4, 13, 6, 15, 8, 1, 10, 3, 12, 6, 11, 3, 7, 0, 13, 5, 10, 14, 15, 8, 12, 4, 9, 1, 2, 15, 5, 1, 3, 7, 14, 6, 9, 11, 8, 12, 2, 10, 0, 4, 13, 8, 6, 4, 1, 3, 11, 15, 0, 5, 12, 2, 13, 9, 7, 10, 14, 12, 15, 10, 4, 1, 5, 8, 7, 6, 2, 13, 14, 0, 3, 9, 11 ] var sl = [ 11, 14, 15, 12, 5, 8, 7, 9, 11, 13, 14, 15, 6, 7, 9, 8, 7, 6, 8, 13, 11, 9, 7, 15, 7, 12, 15, 9, 11, 7, 13, 12, 11, 13, 6, 7, 14, 9, 13, 15, 14, 8, 13, 6, 5, 12, 7, 5, 11, 12, 14, 15, 14, 15, 9, 8, 9, 14, 5, 6, 8, 6, 5, 12, 9, 15, 5, 11, 6, 8, 13, 12, 5, 12, 13, 14, 11, 8, 5, 6 ] var sr = [ 8, 9, 9, 11, 13, 15, 15, 5, 7, 7, 8, 11, 14, 14, 12, 6, 9, 13, 15, 7, 12, 8, 9, 11, 7, 7, 12, 7, 6, 15, 13, 11, 9, 7, 15, 11, 8, 6, 6, 14, 12, 13, 5, 14, 13, 13, 7, 5, 15, 5, 8, 11, 14, 14, 6, 14, 6, 9, 12, 9, 12, 5, 15, 8, 8, 5, 12, 9, 12, 5, 14, 6, 8, 13, 6, 5, 15, 13, 11, 11 ] var hl = [0x00000000, 0x5A827999, 0x6ED9EBA1, 0x8F1BBCDC, 0xA953FD4E] var hr = [0x50A28BE6, 0x5C4DD124, 0x6D703EF3, 0x7A6D76E9, 0x00000000] function bytesToWords (bytes) { var words = [] for (var i = 0, b = 0; i < bytes.length; i++, b += 8) { words[b >>> 5] |= bytes[i] << (24 - b % 32) } return words } function wordsToBytes (words) { var bytes = [] for (var b = 0; b < words.length * 32; b += 8) { bytes.push((words[b >>> 5] >>> (24 - b % 32)) & 0xFF) } return bytes } function processBlock (H, M, offset) { // swap endian for (var i = 0; i < 16; i++) { var offset_i = offset + i var M_offset_i = M[offset_i] // Swap M[offset_i] = ( (((M_offset_i << 8) | (M_offset_i >>> 24)) & 0x00ff00ff) | (((M_offset_i << 24) | (M_offset_i >>> 8)) & 0xff00ff00) ) } // Working variables var al, bl, cl, dl, el var ar, br, cr, dr, er ar = al = H[0] br = bl = H[1] cr = cl = H[2] dr = dl = H[3] er = el = H[4] // computation var t for (i = 0; i < 80; i += 1) { t = (al + M[offset + zl[i]]) | 0 if (i < 16) { t += f1(bl, cl, dl) + hl[0] } else if (i < 32) { t += f2(bl, cl, dl) + hl[1] } else if (i < 48) { t += f3(bl, cl, dl) + hl[2] } else if (i < 64) { t += f4(bl, cl, dl) + hl[3] } else {// if (i<80) { t += f5(bl, cl, dl) + hl[4] } t = t | 0 t = rotl(t, sl[i]) t = (t + el) | 0 al = el el = dl dl = rotl(cl, 10) cl = bl bl = t t = (ar + M[offset + zr[i]]) | 0 if (i < 16) { t += f5(br, cr, dr) + hr[0] } else if (i < 32) { t += f4(br, cr, dr) + hr[1] } else if (i < 48) { t += f3(br, cr, dr) + hr[2] } else if (i < 64) { t += f2(br, cr, dr) + hr[3] } else {// if (i<80) { t += f1(br, cr, dr) + hr[4] } t = t | 0 t = rotl(t, sr[i]) t = (t + er) | 0 ar = er er = dr dr = rotl(cr, 10) cr = br br = t } // intermediate hash value t = (H[1] + cl + dr) | 0 H[1] = (H[2] + dl + er) | 0 H[2] = (H[3] + el + ar) | 0 H[3] = (H[4] + al + br) | 0 H[4] = (H[0] + bl + cr) | 0 H[0] = t } function f1 (x, y, z) { return ((x) ^ (y) ^ (z)) } function f2 (x, y, z) { return (((x) & (y)) | ((~x) & (z))) } function f3 (x, y, z) { return (((x) | (~(y))) ^ (z)) } function f4 (x, y, z) { return (((x) & (z)) | ((y) & (~(z)))) } function f5 (x, y, z) { return ((x) ^ ((y) | (~(z)))) } function rotl (x, n) { return (x << n) | (x >>> (32 - n)) } function ripemd160 (message) { var H = [0x67452301, 0xEFCDAB89, 0x98BADCFE, 0x10325476, 0xC3D2E1F0] if (typeof message === 'string') { message = new Buffer(message, 'utf8') } var m = bytesToWords(message) var nBitsLeft = message.length * 8 var nBitsTotal = message.length * 8 // Add padding m[nBitsLeft >>> 5] |= 0x80 << (24 - nBitsLeft % 32) m[(((nBitsLeft + 64) >>> 9) << 4) + 14] = ( (((nBitsTotal << 8) | (nBitsTotal >>> 24)) & 0x00ff00ff) | (((nBitsTotal << 24) | (nBitsTotal >>> 8)) & 0xff00ff00) ) for (var i = 0; i < m.length; i += 16) { processBlock(H, m, i) } // swap endian for (i = 0; i < 5; i++) { // shortcut var H_i = H[i] // Swap H[i] = (((H_i << 8) | (H_i >>> 24)) & 0x00ff00ff) | (((H_i << 24) | (H_i >>> 8)) & 0xff00ff00) } var digestbytes = wordsToBytes(H) return new Buffer(digestbytes) } module.exports = ripemd160 }).call(this,require("buffer").Buffer) },{"buffer":18}],8:[function(require,module,exports){ (function (Buffer){ // prototype class for hash functions function Hash (blockSize, finalSize) { this._block = new Buffer(blockSize) this._finalSize = finalSize this._blockSize = blockSize this._len = 0 this._s = 0 } Hash.prototype.update = function (data, enc) { if (typeof data === 'string') { enc = enc || 'utf8' data = new Buffer(data, enc) } var l = this._len += data.length var s = this._s || 0 var f = 0 var buffer = this._block while (s < l) { var t = Math.min(data.length, f + this._blockSize - (s % this._blockSize)) var ch = (t - f) for (var i = 0; i < ch; i++) { buffer[(s % this._blockSize) + i] = data[i + f] } s += ch f += ch if ((s % this._blockSize) === 0) { this._update(buffer) } } this._s = s return this } Hash.prototype.digest = function (enc) { // Suppose the length of the message M, in bits, is l var l = this._len * 8 // Append the bit 1 to the end of the message this._block[this._len % this._blockSize] = 0x80 // and then k zero bits, where k is the smallest non-negative solution to the equation (l + 1 + k) === finalSize mod blockSize this._block.fill(0, this._len % this._blockSize + 1) if (l % (this._blockSize * 8) >= this._finalSize * 8) { this._update(this._block) this._block.fill(0) } // to this append the block which is equal to the number l written in binary // TODO: handle case where l is > Math.pow(2, 29) this._block.writeInt32BE(l, this._blockSize - 4) var hash = this._update(this._block) || this._hash() return enc ? hash.toString(enc) : hash } Hash.prototype._update = function () { throw new Error('_update must be implemented by subclass') } module.exports = Hash }).call(this,require("buffer").Buffer) },{"buffer":18}],9:[function(require,module,exports){ var exports = module.exports = function SHA (algorithm) { algorithm = algorithm.toLowerCase() var Algorithm = exports[algorithm] if (!Algorithm) throw new Error(algorithm + ' is not supported (we accept pull requests)') return new Algorithm() } exports.sha = require('./sha') exports.sha1 = require('./sha1') exports.sha224 = require('./sha224') exports.sha256 = require('./sha256') exports.sha384 = require('./sha384') exports.sha512 = require('./sha512') },{"./sha":10,"./sha1":11,"./sha224":12,"./sha256":13,"./sha384":14,"./sha512":15}],10:[function(require,module,exports){ (function (Buffer){ /* * A JavaScript implementation of the Secure Hash Algorithm, SHA-0, as defined * in FIPS PUB 180-1 * This source code is derived from sha1.js of the same repository. * The difference between SHA-0 and SHA-1 is just a bitwise rotate left * operation was added. */ var inherits = require('inherits') var Hash = require('./hash') var W = new Array(80) function Sha () { this.init() this._w = W Hash.call(this, 64, 56) } inherits(Sha, Hash) Sha.prototype.init = function () { this._a = 0x67452301 | 0 this._b = 0xefcdab89 | 0 this._c = 0x98badcfe | 0 this._d = 0x10325476 | 0 this._e = 0xc3d2e1f0 | 0 return this } /* * Bitwise rotate a 32-bit number to the left. */ function rol (num, cnt) { return (num << cnt) | (num >>> (32 - cnt)) } Sha.prototype._update = function (M) { var W = this._w var a = this._a var b = this._b var c = this._c var d = this._d var e = this._e var j = 0, k /* * SHA-1 has a bitwise rotate left operation. But, SHA is not * function calcW() { return rol(W[j - 3] ^ W[j - 8] ^ W[j - 14] ^ W[j - 16], 1) } */ function calcW () { return W[j - 3] ^ W[j - 8] ^ W[j - 14] ^ W[j - 16] } function loop (w, f) { W[j] = w var t = rol(a, 5) + f + e + w + k e = d d = c c = rol(b, 30) b = a a = t j++ } k = 1518500249 while (j < 16) loop(M.readInt32BE(j * 4), (b & c) | ((~b) & d)) while (j < 20) loop(calcW(), (b & c) | ((~b) & d)) k = 1859775393 while (j < 40) loop(calcW(), b ^ c ^ d) k = -1894007588 while (j < 60) loop(calcW(), (b & c) | (b & d) | (c & d)) k = -899497514 while (j < 80) loop(calcW(), b ^ c ^ d) this._a = (a + this._a) | 0 this._b = (b + this._b) | 0 this._c = (c + this._c) | 0 this._d = (d + this._d) | 0 this._e = (e + this._e) | 0 } Sha.prototype._hash = function () { var H = new Buffer(20) H.writeInt32BE(this._a | 0, 0) H.writeInt32BE(this._b | 0, 4) H.writeInt32BE(this._c | 0, 8) H.writeInt32BE(this._d | 0, 12) H.writeInt32BE(this._e | 0, 16) return H } module.exports = Sha }).call(this,require("buffer").Buffer) },{"./hash":8,"buffer":18,"inherits":16}],11:[function(require,module,exports){ (function (Buffer){ /* * A JavaScript implementation of the Secure Hash Algorithm, SHA-1, as defined * in FIPS PUB 180-1 * Version 2.1a Copyright Paul Johnston 2000 - 2002. * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet * Distributed under the BSD License * See http://pajhome.org.uk/crypt/md5 for details. */ var inherits = require('inherits') var Hash = require('./hash') var W = new Array(80) function Sha1 () { this.init() this._w = W Hash.call(this, 64, 56) } inherits(Sha1, Hash) Sha1.prototype.init = function () { this._a = 0x67452301 | 0 this._b = 0xefcdab89 | 0 this._c = 0x98badcfe | 0 this._d = 0x10325476 | 0 this._e = 0xc3d2e1f0 | 0 return this } /* * Bitwise rotate a 32-bit number to the left. */ function rol (num, cnt) { return (num << cnt) | (num >>> (32 - cnt)) } Sha1.prototype._update = function (M) { var W = this._w var a = this._a var b = this._b var c = this._c var d = this._d var e = this._e var j = 0, k function calcW () { return rol(W[j - 3] ^ W[j - 8] ^ W[j - 14] ^ W[j - 16], 1) } function loop (w, f) { W[j] = w var t = rol(a, 5) + f + e + w + k e = d d = c c = rol(b, 30) b = a a = t j++ } k = 1518500249 while (j < 16) loop(M.readInt32BE(j * 4), (b & c) | ((~b) & d)) while (j < 20) loop(calcW(), (b & c) | ((~b) & d)) k = 1859775393 while (j < 40) loop(calcW(), b ^ c ^ d) k = -1894007588 while (j < 60) loop(calcW(), (b & c) | (b & d) | (c & d)) k = -899497514 while (j < 80) loop(calcW(), b ^ c ^ d) this._a = (a + this._a) | 0 this._b = (b + this._b) | 0 this._c = (c + this._c) | 0 this._d = (d + this._d) | 0 this._e = (e + this._e) | 0 } Sha1.prototype._hash = function () { var H = new Buffer(20) H.writeInt32BE(this._a | 0, 0) H.writeInt32BE(this._b | 0, 4) H.writeInt32BE(this._c | 0, 8) H.writeInt32BE(this._d | 0, 12) H.writeInt32BE(this._e | 0, 16) return H } module.exports = Sha1 }).call(this,require("buffer").Buffer) },{"./hash":8,"buffer":18,"inherits":16}],12:[function(require,module,exports){ (function (Buffer){ /** * A JavaScript implementation of the Secure Hash Algorithm, SHA-256, as defined * in FIPS 180-2 * Version 2.2-beta Copyright Angel Marin, Paul Johnston 2000 - 2009. * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet * */ var inherits = require('inherits') var Sha256 = require('./sha256') var Hash = require('./hash') var W = new Array(64) function Sha224 () { this.init() this._w = W // new Array(64) Hash.call(this, 64, 56) } inherits(Sha224, Sha256) Sha224.prototype.init = function () { this._a = 0xc1059ed8 | 0 this._b = 0x367cd507 | 0 this._c = 0x3070dd17 | 0 this._d = 0xf70e5939 | 0 this._e = 0xffc00b31 | 0 this._f = 0x68581511 | 0 this._g = 0x64f98fa7 | 0 this._h = 0xbefa4fa4 | 0 return this } Sha224.prototype._hash = function () { var H = new Buffer(28) H.writeInt32BE(this._a, 0) H.writeInt32BE(this._b, 4) H.writeInt32BE(this._c, 8) H.writeInt32BE(this._d, 12) H.writeInt32BE(this._e, 16) H.writeInt32BE(this._f, 20) H.writeInt32BE(this._g, 24) return H } module.exports = Sha224 }).call(this,require("buffer").Buffer) },{"./hash":8,"./sha256":13,"buffer":18,"inherits":16}],13:[function(require,module,exports){ (function (Buffer){ /** * A JavaScript implementation of the Secure Hash Algorithm, SHA-256, as defined * in FIPS 180-2 * Version 2.2-beta Copyright Angel Marin, Paul Johnston 2000 - 2009. * Other contributors: Greg Holt, Andrew Kepert, Ydnar, Lostinet * */ var inherits = require('inherits') var Hash = require('./hash') var K = [ 0x428A2F98, 0x71374491, 0xB5C0FBCF, 0xE9B5DBA5, 0x3956C25B, 0x59F111F1, 0x923F82A4, 0xAB1C5ED5, 0xD807AA98, 0x12835B01, 0x243185BE, 0x550C7DC3, 0x72BE5D74, 0x80DEB1FE, 0x9BDC06A7, 0xC19BF174, 0xE49B69C1, 0xEFBE4786, 0x0FC19DC6, 0x240CA1CC, 0x2DE92C6F, 0x4A7484AA, 0x5CB0A9DC, 0x76F988DA, 0x983E5152, 0xA831C66D, 0xB00327C8, 0xBF597FC7, 0xC6E00BF3, 0xD5A79147, 0x06CA6351, 0x14292967, 0x27B70A85, 0x2E1B2138, 0x4D2C6DFC, 0x53380D13, 0x650A7354, 0x766A0ABB, 0x81C2C92E, 0x92722C85, 0xA2BFE8A1, 0xA81A664B, 0xC24B8B70, 0xC76C51A3, 0xD192E819, 0xD6990624, 0xF40E3585, 0x106AA070, 0x19A4C116, 0x1E376C08, 0x2748774C, 0x34B0BCB5, 0x391C0CB3, 0x4ED8AA4A, 0x5B9CCA4F, 0x682E6FF3, 0x748F82EE, 0x78A5636F, 0x84C87814, 0x8CC70208, 0x90BEFFFA, 0xA4506CEB, 0xBEF9A3F7, 0xC67178F2 ] var W = new Array(64) function Sha256 () { this.init() this._w = W // new Array(64) Hash.call(this, 64, 56) } inherits(Sha256, Hash) Sha256.prototype.init = function () { this._a = 0x6a09e667 | 0 this._b = 0xbb67ae85 | 0 this._c = 0x3c6ef372 | 0 this._d = 0xa54ff53a | 0 this._e = 0x510e527f | 0 this._f = 0x9b05688c | 0 this._g = 0x1f83d9ab | 0 this._h = 0x5be0cd19 | 0 return this } function S (X, n) { return (X >>> n) | (X << (32 - n)) } function R (X, n) { return (X >>> n) } function Ch (x, y, z) { return ((x & y) ^ ((~x) & z)) } function Maj (x, y, z) { return ((x & y) ^ (x & z) ^ (y & z)) } function Sigma0256 (x) { return (S(x, 2) ^ S(x, 13) ^ S(x, 22)) } function Sigma1256 (x) { return (S(x, 6) ^ S(x, 11) ^ S(x, 25)) } function Gamma0256 (x) { return (S(x, 7) ^ S(x, 18) ^ R(x, 3)) } function Gamma1256 (x) { return (S(x, 17) ^ S(x, 19) ^ R(x, 10)) } Sha256.prototype._update = function (M) { var W = this._w var a = this._a | 0 var b = this._b | 0 var c = this._c | 0 var d = this._d | 0 var e = this._e | 0 var f = this._f | 0 var g = this._g | 0 var h = this._h | 0 var j = 0 function calcW () { return Gamma1256(W[j - 2]) + W[j - 7] + Gamma0256(W[j - 15]) + W[j - 16] } function loop (w) { W[j] = w var T1 = h + Sigma1256(e) + Ch(e, f, g) + K[j] + w var T2 = Sigma0256(a) + Maj(a, b, c) h = g g = f f = e e = d + T1 d = c c = b b = a a = T1 + T2 j++ } while (j < 16) loop(M.readInt32BE(j * 4)) while (j < 64) loop(calcW()) this._a = (a + this._a) | 0 this._b = (b + this._b) | 0 this._c = (c + this._c) | 0 this._d = (d + this._d) | 0 this._e = (e + this._e) | 0 this._f = (f + this._f) | 0 this._g = (g + this._g) | 0 this._h = (h + this._h) | 0 } Sha256.prototype._hash = function () { var H = new Buffer(32) H.writeInt32BE(this._a, 0) H.writeInt32BE(this._b, 4) H.writeInt32BE(this._c, 8) H.writeInt32BE(this._d, 12) H.writeInt32BE(this._e, 16) H.writeInt32BE(this._f, 20) H.writeInt32BE(this._g, 24) H.writeInt32BE(this._h, 28) return H } module.exports = Sha256 }).call(this,require("buffer").Buffer) },{"./hash":8,"buffer":18,"inherits":16}],14:[function(require,module,exports){ (function (Buffer){ var inherits = require('inherits') var SHA512 = require('./sha512') var Hash = require('./hash') var W = new Array(160) function Sha384 () { this.init() this._w = W Hash.call(this, 128, 112) } inherits(Sha384, SHA512) Sha384.prototype.init = function () { this._a = 0xcbbb9d5d | 0 this._b = 0x629a292a | 0 this._c = 0x9159015a | 0 this._d = 0x152fecd8 | 0 this._e = 0x67332667 | 0 this._f = 0x8eb44a87 | 0 this._g = 0xdb0c2e0d | 0 this._h = 0x47b5481d | 0 this._al = 0xc1059ed8 | 0 this._bl = 0x367cd507 | 0 this._cl = 0x3070dd17 | 0 this._dl = 0xf70e5939 | 0 this._el = 0xffc00b31 | 0 this._fl = 0x68581511 | 0 this._gl = 0x64f98fa7 | 0 this._hl = 0xbefa4fa4 | 0 return this } Sha384.prototype._hash = function () { var H = new Buffer(48) function writeInt64BE (h, l, offset) { H.writeInt32BE(h, offset) H.writeInt32BE(l, offset + 4) } writeInt64BE(this._a, this._al, 0) writeInt64BE(this._b, this._bl, 8) writeInt64BE(this._c, this._cl, 16) writeInt64BE(this._d, this._dl, 24) writeInt64BE(this._e, this._el, 32) writeInt64BE(this._f, this._fl, 40) return H } module.exports = Sha384 }).call(this,require("buffer").Buffer) },{"./hash":8,"./sha512":15,"buffer":18,"inherits":16}],15:[function(require,module,exports){ (function (Buffer){ var inherits = require('inherits') var Hash = require('./hash') var K = [ 0x428a2f98, 0xd728ae22, 0x71374491, 0x23ef65cd, 0xb5c0fbcf, 0xec4d3b2f, 0xe9b5dba5, 0x8189dbbc, 0x3956c25b, 0xf348b538, 0x59f111f1, 0xb605d019, 0x923f82a4, 0xaf194f9b, 0xab1c5ed5, 0xda6d8118, 0xd807aa98, 0xa3030242, 0x12835b01, 0x45706fbe, 0x243185be, 0x4ee4b28c, 0x550c7dc3, 0xd5ffb4e2, 0x72be5d74, 0xf27b896f, 0x80deb1fe, 0x3b1696b1, 0x9bdc06a7, 0x25c71235, 0xc19bf174, 0xcf692694, 0xe49b69c1, 0x9ef14ad2, 0xefbe4786, 0x384f25e3, 0x0fc19dc6, 0x8b8cd5b5, 0x240ca1cc, 0x77ac9c65, 0x2de92c6f, 0x592b0275, 0x4a7484aa, 0x6ea6e483, 0x5cb0a9dc, 0xbd41fbd4, 0x76f988da, 0x831153b5, 0x983e5152, 0xee66dfab, 0xa831c66d, 0x2db43210, 0xb00327c8, 0x98fb213f, 0xbf597fc7, 0xbeef0ee4, 0xc6e00bf3, 0x3da88fc2, 0xd5a79147, 0x930aa725, 0x06ca6351, 0xe003826f, 0x14292967, 0x0a0e6e70, 0x27b70a85, 0x46d22ffc, 0x2e1b2138, 0x5c26c926, 0x4d2c6dfc, 0x5ac42aed, 0x53380d13, 0x9d95b3df, 0x650a7354, 0x8baf63de, 0x766a0abb, 0x3c77b2a8, 0x81c2c92e, 0x47edaee6, 0x92722c85, 0x1482353b, 0xa2bfe8a1, 0x4cf10364, 0xa81a664b, 0xbc423001, 0xc24b8b70, 0xd0f89791, 0xc76c51a3, 0x0654be30, 0xd192e819, 0xd6ef5218, 0xd6990624, 0x5565a910, 0xf40e3585, 0x5771202a, 0x106aa070, 0x32bbd1b8, 0x19a4c116, 0xb8d2d0c8, 0x1e376c08, 0x5141ab53, 0x2748774c, 0xdf8eeb99, 0x34b0bcb5, 0xe19b48a8, 0x391c0cb3, 0xc5c95a63, 0x4ed8aa4a, 0xe3418acb, 0x5b9cca4f, 0x7763e373, 0x682e6ff3, 0xd6b2b8a3, 0x748f82ee, 0x5defb2fc, 0x78a5636f, 0x43172f60, 0x84c87814, 0xa1f0ab72, 0x8cc70208, 0x1a6439ec, 0x90befffa, 0x23631e28, 0xa4506ceb, 0xde82bde9, 0xbef9a3f7, 0xb2c67915, 0xc67178f2, 0xe372532b, 0xca273ece, 0xea26619c, 0xd186b8c7, 0x21c0c207, 0xeada7dd6, 0xcde0eb1e, 0xf57d4f7f, 0xee6ed178, 0x06f067aa, 0x72176fba, 0x0a637dc5, 0xa2c898a6, 0x113f9804, 0xbef90dae, 0x1b710b35, 0x131c471b, 0x28db77f5, 0x23047d84, 0x32caab7b, 0x40c72493, 0x3c9ebe0a, 0x15c9bebc, 0x431d67c4, 0x9c100d4c, 0x4cc5d4be, 0xcb3e42b6, 0x597f299c, 0xfc657e2a, 0x5fcb6fab, 0x3ad6faec, 0x6c44198c, 0x4a475817 ] var W = new Array(160) function Sha512 () { this.init() this._w = W Hash.call(this, 128, 112) } inherits(Sha512, Hash) Sha512.prototype.init = function () { this._a = 0x6a09e667 | 0 this._b = 0xbb67ae85 | 0 this._c = 0x3c6ef372 | 0 this._d = 0xa54ff53a | 0 this._e = 0x510e527f | 0 this._f = 0x9b05688c | 0 this._g = 0x1f83d9ab | 0 this._h = 0x5be0cd19 | 0 this._al = 0xf3bcc908 | 0 this._bl = 0x84caa73b | 0 this._cl = 0xfe94f82b | 0 this._dl = 0x5f1d36f1 | 0 this._el = 0xade682d1 | 0 this._fl = 0x2b3e6c1f | 0 this._gl = 0xfb41bd6b | 0 this._hl = 0x137e2179 | 0 return this } function S (X, Xl, n) { return (X >>> n) | (Xl << (32 - n)) } function Ch (x, y, z) { return ((x & y) ^ ((~x) & z)) } function Maj (x, y, z) { return ((x & y) ^ (x & z) ^ (y & z)) } Sha512.prototype._update = function (M) { var W = this._w var a = this._a | 0 var b = this._b | 0 var c = this._c | 0 var d = this._d | 0 var e = this._e | 0 var f = this._f | 0 var g = this._g | 0 var h = this._h | 0 var al = this._al | 0 var bl = this._bl | 0 var cl = this._cl | 0 var dl = this._dl | 0 var el = this._el | 0 var fl = this._fl | 0 var gl = this._gl | 0 var hl = this._hl | 0 var i = 0, j = 0 var Wi, Wil function calcW () { var x = W[j - 15 * 2] var xl = W[j - 15 * 2 + 1] var gamma0 = S(x, xl, 1) ^ S(x, xl, 8) ^ (x >>> 7) var gamma0l = S(xl, x, 1) ^ S(xl, x, 8) ^ S(xl, x, 7) x = W[j - 2 * 2] xl = W[j - 2 * 2 + 1] var gamma1 = S(x, xl, 19) ^ S(xl, x, 29) ^ (x >>> 6) var gamma1l = S(xl, x, 19) ^ S(x, xl, 29) ^ S(xl, x, 6) // W[i] = gamma0 + W[i - 7] + gamma1 + W[i - 16] var Wi7 = W[j - 7 * 2] var Wi7l = W[j - 7 * 2 + 1] var Wi16 = W[j - 16 * 2] var Wi16l = W[j - 16 * 2 + 1] Wil = gamma0l + Wi7l Wi = gamma0 + Wi7 + ((Wil >>> 0) < (gamma0l >>> 0) ? 1 : 0) Wil = Wil + gamma1l Wi = Wi + gamma1 + ((Wil >>> 0) < (gamma1l >>> 0) ? 1 : 0) Wil = Wil + Wi16l Wi = Wi + Wi16 + ((Wil >>> 0) < (Wi16l >>> 0) ? 1 : 0) } function loop () { W[j] = Wi W[j + 1] = Wil var maj = Maj(a, b, c) var majl = Maj(al, bl, cl) var sigma0h = S(a, al, 28) ^ S(al, a, 2) ^ S(al, a, 7) var sigma0l = S(al, a, 28) ^ S(a, al, 2) ^ S(a, al, 7) var sigma1h = S(e, el, 14) ^ S(e, el, 18) ^ S(el, e, 9) var sigma1l = S(el, e, 14) ^ S(el, e, 18) ^ S(e, el, 9) // t1 = h + sigma1 + ch + K[i] + W[i] var Ki = K[j] var Kil = K[j + 1] var ch = Ch(e, f, g) var chl = Ch(el, fl, gl) var t1l = hl + sigma1l var t1 = h + sigma1h + ((t1l >>> 0) < (hl >>> 0) ? 1 : 0) t1l = t1l + chl t1 = t1 + ch + ((t1l >>> 0) < (chl >>> 0) ? 1 : 0) t1l = t1l + Kil t1 = t1 + Ki + ((t1l >>> 0) < (Kil >>> 0) ? 1 : 0) t1l = t1l + Wil t1 = t1 + Wi + ((t1l >>> 0) < (Wil >>> 0) ? 1 : 0) // t2 = sigma0 + maj var t2l = sigma0l + majl var t2 = sigma0h + maj + ((t2l >>> 0) < (sigma0l >>> 0) ? 1 : 0) h = g hl = gl g = f gl = fl f = e fl = el el = (dl + t1l) | 0 e = (d + t1 + ((el >>> 0) < (dl >>> 0) ? 1 : 0)) | 0 d = c dl = cl c = b cl = bl b = a bl = al al = (t1l + t2l) | 0 a = (t1 + t2 + ((al >>> 0) < (t1l >>> 0) ? 1 : 0)) | 0 i++ j += 2 } while (i < 16) { Wi = M.readInt32BE(j * 4) Wil = M.readInt32BE(j * 4 + 4) loop() } while (i < 80) { calcW() loop() } this._al = (this._al + al) | 0 this._bl = (this._bl + bl) | 0 this._cl = (this._cl + cl) | 0 this._dl = (this._dl + dl) | 0 this._el = (this._el + el) | 0 this._fl = (this._fl + fl) | 0 this._gl = (this._gl + gl) | 0 this._hl = (this._hl + hl) | 0 this._a = (this._a + a + ((this._al >>> 0) < (al >>> 0) ? 1 : 0)) | 0 this._b = (this._b + b + ((this._bl >>> 0) < (bl >>> 0) ? 1 : 0)) | 0 this._c = (this._c + c + ((this._cl >>> 0) < (cl >>> 0) ? 1 : 0)) | 0 this._d = (this._d + d + ((this._dl >>> 0) < (dl >>> 0) ? 1 : 0)) | 0 this._e = (this._e + e + ((this._el >>> 0) < (el >>> 0) ? 1 : 0)) | 0 this._f = (this._f + f + ((this._fl >>> 0) < (fl >>> 0) ? 1 : 0)) | 0 this._g = (this._g + g + ((this._gl >>> 0) < (gl >>> 0) ? 1 : 0)) | 0 this._h = (this._h + h + ((this._hl >>> 0) < (hl >>> 0) ? 1 : 0)) | 0 } Sha512.prototype._hash = function () { var H = new Buffer(64) function writeInt64BE (h, l, offset) { H.writeInt32BE(h, offset) H.writeInt32BE(l, offset + 4) } writeInt64BE(this._a, this._al, 0) writeInt64BE(this._b, this._bl, 8) writeInt64BE(this._c, this._cl, 16) writeInt64BE(this._d, this._dl, 24) writeInt64BE(this._e, this._el, 32) writeInt64BE(this._f, this._fl, 40) writeInt64BE(this._g, this._gl, 48) writeInt64BE(this._h, this._hl, 56) return H } module.exports = Sha512 }).call(this,require("buffer").Buffer) },{"./hash":8,"buffer":18,"inherits":16}],16:[function(require,module,exports){ if (typeof Object.create === 'function') { // implementation from standard node.js 'util' module module.exports = function inherits(ctor, superCtor) { ctor.super_ = superCtor ctor.prototype = Object.create(superCtor.prototype, { constructor: { value: ctor, enumerable: false, writable: true, configurable: true } }); }; } else { // old school shim for old browsers module.exports = function inherits(ctor, superCtor) { ctor.super_ = superCtor var TempCtor = function () {} TempCtor.prototype = superCtor.prototype ctor.prototype = new TempCtor() ctor.prototype.constructor = ctor } } },{}],17:[function(require,module,exports){ },{}],18:[function(require,module,exports){ /*! * The buffer module from node.js, for the browser. * * @author Feross Aboukhadijeh <[email protected]> <http://feross.org> * @license MIT */ var base64 = require('base64-js') var ieee754 = require('ieee754') var isArray = require('is-array') exports.Buffer = Buffer exports.SlowBuffer = SlowBuffer exports.INSPECT_MAX_BYTES = 50 Buffer.poolSize = 8192 // not used by this implementation var kMaxLength = 0x3fffffff var rootParent = {} /** * If `Buffer.TYPED_ARRAY_SUPPORT`: * === true Use Uint8Array implementation (fastest) * === false Use Object implementation (most compatible, even IE6) * * Browsers that support typed arrays are IE 10+, Firefox 4+, Chrome 7+, Safari 5.1+, * Opera 11.6+, iOS 4.2+. * * Note: * * - Implementation must support adding new properties to `Uint8Array` instances. * Firefox 4-29 lacked support, fixed in Firefox 30+. * See: https://bugzilla.mozilla.org/show_bug.cgi?id=695438. * * - Chrome 9-10 is missing the `TypedArray.prototype.subarray` function. * * - IE10 has a broken `TypedArray.prototype.subarray` function which returns arrays of * incorrect length in some situations. * * We detect these buggy browsers and set `Buffer.TYPED_ARRAY_SUPPORT` to `false` so they will * get the Object implementation, which is slower but will work correctly. */ Buffer.TYPED_ARRAY_SUPPORT = (function () { try { var buf = new ArrayBuffer(0) var arr = new Uint8Array(buf) arr.foo = function () { return 42 } return arr.foo() === 42 && // typed array instances can be augmented typeof arr.subarray === 'function' && // chrome 9-10 lack `subarray` new Uint8Array(1).subarray(1, 1).byteLength === 0 // ie10 has broken `subarray` } catch (e) { return false } })() /** * Class: Buffer * ============= * * The Buffer constructor returns instances of `Uint8Array` that are augmented * with function properties for all the node `Buffer` API functions. We use * `Uint8Array` so that square bracket notation works as expected -- it returns * a single octet. * * By augmenting the instances, we can avoid modifying the `Uint8Array` * prototype. */ function Buffer (arg) { if (!(this instanceof Buffer)) { // Avoid going through an ArgumentsAdaptorTrampoline in the common case. if (arguments.length > 1) return new Buffer(arg, arguments[1]) return new Buffer(arg) } this.length = 0 this.parent = undefined // Common case. if (typeof arg === 'number') { return fromNumber(this, arg) } // Slightly less common case. if (typeof arg === 'string') { return fromString(this, arg, arguments.length > 1 ? arguments[1] : 'utf8') } // Unusual. return fromObject(this, arg) } function fromNumber (that, length) { that = allocate(that, length < 0 ? 0 : checked(length) | 0) if (!Buffer.TYPED_ARRAY_SUPPORT) { for (var i = 0; i < length; i++) { that[i] = 0 } } return that } function fromString (that, string, encoding) { if (typeof encoding !== 'string' || encoding === '') encoding = 'utf8' // Assumption: byteLength() return value is always < kMaxLength. var length = byteLength(string, encoding) | 0 that = allocate(that, length) that.write(string, encoding) return that } function fromObject (that, object) { if (Buffer.isBuffer(object)) return fromBuffer(that, object) if (isArray(object)) return fromArray(that, object) if (object == null) { throw new TypeError('must start with number, buffer, array or string') } if (typeof ArrayBuffer !== 'undefined' && object.buffer instanceof ArrayBuffer) { return fromTypedArray(that, object) } if (object.length) return fromArrayLike(that, object) return fromJsonObject(that, object) } function fromBuffer (that, buffer) { var length = checked(buffer.length) | 0 that = allocate(that, length) buffer.copy(that, 0, 0, length) return that } function fromArray (that, array) { var length = checked(array.length) | 0 that = allocate(that, length) for (var i = 0; i < length; i += 1) { that[i] = array[i] & 255 } return that } // Duplicate of fromArray() to keep fromArray() monomorphic. function fromTypedArray (that, array) { var length = checked(array.length) | 0 that = allocate(that, length) // Truncating the elements is probably not what people expect from typed // arrays with BYTES_PER_ELEMENT > 1 but it's compatible with the behavior // of the old Buffer constructor. for (var i = 0; i < length; i += 1) { that[i] = array[i] & 255 } return that } function fromArrayLike (that, array) { var length = checked(array.length) | 0 that = allocate(that, length) for (var i = 0; i < length; i += 1) { that[i] = array[i] & 255<|fim▁hole|> } return that } // Deserialize { type: 'Buffer', data: [1,2,3,...] } into a Buffer object. // Returns a zero-length buffer for inputs that don't conform to the spec. function fromJsonObject (that, object) { var array var length = 0 if (object.type === 'Buffer' && isArray(object.data)) { array = object.data length = checked(array.length) | 0 } that = allocate(that, length) for (var i = 0; i < length; i += 1) { that[i] = array[i] & 255 } return that } function allocate (that, length) { if (Buffer.TYPED_ARRAY_SUPPORT) { // Return an augmented `Uint8Array` instance, for best performance that = Buffer._augment(new Uint8Array(length)) } else { // Fallback: Return an object instance of the Buffer class that.length = length that._isBuffer = true } var fromPool = length !== 0 && length <= Buffer.poolSize >>> 1 if (fromPool) that.parent = rootParent return that } function checked (length) { // Note: cannot use `length < kMaxLength` here because that fails when // length is NaN (which is otherwise coerced to zero.) if (length >= kMaxLength) { throw new RangeError('Attempt to allocate Buffer larger than maximum ' + 'size: 0x' + kMaxLength.toString(16) + ' bytes') } return length | 0 } function SlowBuffer (subject, encoding) { if (!(this instanceof SlowBuffer)) return new SlowBuffer(subject, encoding) var buf = new Buffer(subject, encoding) delete buf.parent return buf } Buffer.isBuffer = function isBuffer (b) { return !!(b != null && b._isBuffer) } Buffer.compare = function compare (a, b) { if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { throw new TypeError('Arguments must be Buffers') } if (a === b) return 0 var x = a.length var y = b.length var i = 0 var len = Math.min(x, y) while (i < len) { if (a[i] !== b[i]) break ++i } if (i !== len) { x = a[i] y = b[i] } if (x < y) return -1 if (y < x) return 1 return 0 } Buffer.isEncoding = function isEncoding (encoding) { switch (String(encoding).toLowerCase()) { case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'raw': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return true default: return false } } Buffer.concat = function concat (list, length) { if (!isArray(list)) throw new TypeError('list argument must be an Array of Buffers.') if (list.length === 0) { return new Buffer(0) } else if (list.length === 1) { return list[0] } var i if (length === undefined) { length = 0 for (i = 0; i < list.length; i++) { length += list[i].length } } var buf = new Buffer(length) var pos = 0 for (i = 0; i < list.length; i++) { var item = list[i] item.copy(buf, pos) pos += item.length } return buf } function byteLength (string, encoding) { if (typeof string !== 'string') string = String(string) if (string.length === 0) return 0 switch (encoding || 'utf8') { case 'ascii': case 'binary': case 'raw': return string.length case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return string.length * 2 case 'hex': return string.length >>> 1 case 'utf8': case 'utf-8': return utf8ToBytes(string).length case 'base64': return base64ToBytes(string).length default: return string.length } } Buffer.byteLength = byteLength // pre-set for values that may exist in the future Buffer.prototype.length = undefined Buffer.prototype.parent = undefined // toString(encoding, start=0, end=buffer.length) Buffer.prototype.toString = function toString (encoding, start, end) { var loweredCase = false start = start | 0 end = end === undefined || end === Infinity ? this.length : end | 0 if (!encoding) encoding = 'utf8' if (start < 0) start = 0 if (end > this.length) end = this.length if (end <= start) return '' while (true) { switch (encoding) { case 'hex': return hexSlice(this, start, end) case 'utf8': case 'utf-8': return utf8Slice(this, start, end) case 'ascii': return asciiSlice(this, start, end) case 'binary': return binarySlice(this, start, end) case 'base64': return base64Slice(this, start, end) case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return utf16leSlice(this, start, end) default: if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) encoding = (encoding + '').toLowerCase() loweredCase = true } } } Buffer.prototype.equals = function equals (b) { if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') if (this === b) return true return Buffer.compare(this, b) === 0 } Buffer.prototype.inspect = function inspect () { var str = '' var max = exports.INSPECT_MAX_BYTES if (this.length > 0) { str = this.toString('hex', 0, max).match(/.{2}/g).join(' ') if (this.length > max) str += ' ... ' } return '<Buffer ' + str + '>' } Buffer.prototype.compare = function compare (b) { if (!Buffer.isBuffer(b)) throw new TypeError('Argument must be a Buffer') if (this === b) return 0 return Buffer.compare(this, b) } Buffer.prototype.indexOf = function indexOf (val, byteOffset) { if (byteOffset > 0x7fffffff) byteOffset = 0x7fffffff else if (byteOffset < -0x80000000) byteOffset = -0x80000000 byteOffset >>= 0 if (this.length === 0) return -1 if (byteOffset >= this.length) return -1 // Negative offsets start from the end of the buffer if (byteOffset < 0) byteOffset = Math.max(this.length + byteOffset, 0) if (typeof val === 'string') { if (val.length === 0) return -1 // special case: looking for empty string always fails return String.prototype.indexOf.call(this, val, byteOffset) } if (Buffer.isBuffer(val)) { return arrayIndexOf(this, val, byteOffset) } if (typeof val === 'number') { if (Buffer.TYPED_ARRAY_SUPPORT && Uint8Array.prototype.indexOf === 'function') { return Uint8Array.prototype.indexOf.call(this, val, byteOffset) } return arrayIndexOf(this, [ val ], byteOffset) } function arrayIndexOf (arr, val, byteOffset) { var foundIndex = -1 for (var i = 0; byteOffset + i < arr.length; i++) { if (arr[byteOffset + i] === val[foundIndex === -1 ? 0 : i - foundIndex]) { if (foundIndex === -1) foundIndex = i if (i - foundIndex + 1 === val.length) return byteOffset + foundIndex } else { foundIndex = -1 } } return -1 } throw new TypeError('val must be string, number or Buffer') } // `get` will be removed in Node 0.13+ Buffer.prototype.get = function get (offset) { console.log('.get() is deprecated. Access using array indexes instead.') return this.readUInt8(offset) } // `set` will be removed in Node 0.13+ Buffer.prototype.set = function set (v, offset) { console.log('.set() is deprecated. Access using array indexes instead.') return this.writeUInt8(v, offset) } function hexWrite (buf, string, offset, length) { offset = Number(offset) || 0 var remaining = buf.length - offset if (!length) { length = remaining } else { length = Number(length) if (length > remaining) { length = remaining } } // must be an even number of digits var strLen = string.length if (strLen % 2 !== 0) throw new Error('Invalid hex string') if (length > strLen / 2) { length = strLen / 2 } for (var i = 0; i < length; i++) { var parsed = parseInt(string.substr(i * 2, 2), 16) if (isNaN(parsed)) throw new Error('Invalid hex string') buf[offset + i] = parsed } return i } function utf8Write (buf, string, offset, length) { return blitBuffer(utf8ToBytes(string, buf.length - offset), buf, offset, length) } function asciiWrite (buf, string, offset, length) { return blitBuffer(asciiToBytes(string), buf, offset, length) } function binaryWrite (buf, string, offset, length) { return asciiWrite(buf, string, offset, length) } function base64Write (buf, string, offset, length) { return blitBuffer(base64ToBytes(string), buf, offset, length) } function ucs2Write (buf, string, offset, length) { return blitBuffer(utf16leToBytes(string, buf.length - offset), buf, offset, length) } Buffer.prototype.write = function write (string, offset, length, encoding) { // Buffer#write(string) if (offset === undefined) { encoding = 'utf8' length = this.length offset = 0 // Buffer#write(string, encoding) } else if (length === undefined && typeof offset === 'string') { encoding = offset length = this.length offset = 0 // Buffer#write(string, offset[, length][, encoding]) } else if (isFinite(offset)) { offset = offset | 0 if (isFinite(length)) { length = length | 0 if (encoding === undefined) encoding = 'utf8' } else { encoding = length length = undefined } // legacy write(string, encoding, offset, length) - remove in v0.13 } else { var swap = encoding encoding = offset offset = length | 0 length = swap } var remaining = this.length - offset if (length === undefined || length > remaining) length = remaining if ((string.length > 0 && (length < 0 || offset < 0)) || offset > this.length) { throw new RangeError('attempt to write outside buffer bounds') } if (!encoding) encoding = 'utf8' var loweredCase = false for (;;) { switch (encoding) { case 'hex': return hexWrite(this, string, offset, length) case 'utf8': case 'utf-8': return utf8Write(this, string, offset, length) case 'ascii': return asciiWrite(this, string, offset, length) case 'binary': return binaryWrite(this, string, offset, length) case 'base64': // Warning: maxLength not taken into account in base64Write return base64Write(this, string, offset, length) case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return ucs2Write(this, string, offset, length) default: if (loweredCase) throw new TypeError('Unknown encoding: ' + encoding) encoding = ('' + encoding).toLowerCase() loweredCase = true } } } Buffer.prototype.toJSON = function toJSON () { return { type: 'Buffer', data: Array.prototype.slice.call(this._arr || this, 0) } } function base64Slice (buf, start, end) { if (start === 0 && end === buf.length) { return base64.fromByteArray(buf) } else { return base64.fromByteArray(buf.slice(start, end)) } } function utf8Slice (buf, start, end) { var res = '' var tmp = '' end = Math.min(buf.length, end) for (var i = start; i < end; i++) { if (buf[i] <= 0x7F) { res += decodeUtf8Char(tmp) + String.fromCharCode(buf[i]) tmp = '' } else { tmp += '%' + buf[i].toString(16) } } return res + decodeUtf8Char(tmp) } function asciiSlice (buf, start, end) { var ret = '' end = Math.min(buf.length, end) for (var i = start; i < end; i++) { ret += String.fromCharCode(buf[i] & 0x7F) } return ret } function binarySlice (buf, start, end) { var ret = '' end = Math.min(buf.length, end) for (var i = start; i < end; i++) { ret += String.fromCharCode(buf[i]) } return ret } function hexSlice (buf, start, end) { var len = buf.length if (!start || start < 0) start = 0 if (!end || end < 0 || end > len) end = len var out = '' for (var i = start; i < end; i++) { out += toHex(buf[i]) } return out } function utf16leSlice (buf, start, end) { var bytes = buf.slice(start, end) var res = '' for (var i = 0; i < bytes.length; i += 2) { res += String.fromCharCode(bytes[i] + bytes[i + 1] * 256) } return res } Buffer.prototype.slice = function slice (start, end) { var len = this.length start = ~~start end = end === undefined ? len : ~~end if (start < 0) { start += len if (start < 0) start = 0 } else if (start > len) { start = len } if (end < 0) { end += len if (end < 0) end = 0 } else if (end > len) { end = len } if (end < start) end = start var newBuf if (Buffer.TYPED_ARRAY_SUPPORT) { newBuf = Buffer._augment(this.subarray(start, end)) } else { var sliceLen = end - start newBuf = new Buffer(sliceLen, undefined) for (var i = 0; i < sliceLen; i++) { newBuf[i] = this[i + start] } } if (newBuf.length) newBuf.parent = this.parent || this return newBuf } /* * Need to make sure that buffer isn't trying to write out of bounds. */ function checkOffset (offset, ext, length) { if ((offset % 1) !== 0 || offset < 0) throw new RangeError('offset is not uint') if (offset + ext > length) throw new RangeError('Trying to access beyond buffer length') } Buffer.prototype.readUIntLE = function readUIntLE (offset, byteLength, noAssert) { offset = offset | 0 byteLength = byteLength | 0 if (!noAssert) checkOffset(offset, byteLength, this.length) var val = this[offset] var mul = 1 var i = 0 while (++i < byteLength && (mul *= 0x100)) { val += this[offset + i] * mul } return val } Buffer.prototype.readUIntBE = function readUIntBE (offset, byteLength, noAssert) { offset = offset | 0 byteLength = byteLength | 0 if (!noAssert) { checkOffset(offset, byteLength, this.length) } var val = this[offset + --byteLength] var mul = 1 while (byteLength > 0 && (mul *= 0x100)) { val += this[offset + --byteLength] * mul } return val } Buffer.prototype.readUInt8 = function readUInt8 (offset, noAssert) { if (!noAssert) checkOffset(offset, 1, this.length) return this[offset] } Buffer.prototype.readUInt16LE = function readUInt16LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) return this[offset] | (this[offset + 1] << 8) } Buffer.prototype.readUInt16BE = function readUInt16BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) return (this[offset] << 8) | this[offset + 1] } Buffer.prototype.readUInt32LE = function readUInt32LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return ((this[offset]) | (this[offset + 1] << 8) | (this[offset + 2] << 16)) + (this[offset + 3] * 0x1000000) } Buffer.prototype.readUInt32BE = function readUInt32BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return (this[offset] * 0x1000000) + ((this[offset + 1] << 16) | (this[offset + 2] << 8) | this[offset + 3]) } Buffer.prototype.readIntLE = function readIntLE (offset, byteLength, noAssert) { offset = offset | 0 byteLength = byteLength | 0 if (!noAssert) checkOffset(offset, byteLength, this.length) var val = this[offset] var mul = 1 var i = 0 while (++i < byteLength && (mul *= 0x100)) { val += this[offset + i] * mul } mul *= 0x80 if (val >= mul) val -= Math.pow(2, 8 * byteLength) return val } Buffer.prototype.readIntBE = function readIntBE (offset, byteLength, noAssert) { offset = offset | 0 byteLength = byteLength | 0 if (!noAssert) checkOffset(offset, byteLength, this.length) var i = byteLength var mul = 1 var val = this[offset + --i] while (i > 0 && (mul *= 0x100)) { val += this[offset + --i] * mul } mul *= 0x80 if (val >= mul) val -= Math.pow(2, 8 * byteLength) return val } Buffer.prototype.readInt8 = function readInt8 (offset, noAssert) { if (!noAssert) checkOffset(offset, 1, this.length) if (!(this[offset] & 0x80)) return (this[offset]) return ((0xff - this[offset] + 1) * -1) } Buffer.prototype.readInt16LE = function readInt16LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) var val = this[offset] | (this[offset + 1] << 8) return (val & 0x8000) ? val | 0xFFFF0000 : val } Buffer.prototype.readInt16BE = function readInt16BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 2, this.length) var val = this[offset + 1] | (this[offset] << 8) return (val & 0x8000) ? val | 0xFFFF0000 : val } Buffer.prototype.readInt32LE = function readInt32LE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return (this[offset]) | (this[offset + 1] << 8) | (this[offset + 2] << 16) | (this[offset + 3] << 24) } Buffer.prototype.readInt32BE = function readInt32BE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return (this[offset] << 24) | (this[offset + 1] << 16) | (this[offset + 2] << 8) | (this[offset + 3]) } Buffer.prototype.readFloatLE = function readFloatLE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return ieee754.read(this, offset, true, 23, 4) } Buffer.prototype.readFloatBE = function readFloatBE (offset, noAssert) { if (!noAssert) checkOffset(offset, 4, this.length) return ieee754.read(this, offset, false, 23, 4) } Buffer.prototype.readDoubleLE = function readDoubleLE (offset, noAssert) { if (!noAssert) checkOffset(offset, 8, this.length) return ieee754.read(this, offset, true, 52, 8) } Buffer.prototype.readDoubleBE = function readDoubleBE (offset, noAssert) { if (!noAssert) checkOffset(offset, 8, this.length) return ieee754.read(this, offset, false, 52, 8) } function checkInt (buf, value, offset, ext, max, min) { if (!Buffer.isBuffer(buf)) throw new TypeError('buffer must be a Buffer instance') if (value > max || value < min) throw new RangeError('value is out of bounds') if (offset + ext > buf.length) throw new RangeError('index out of range') } Buffer.prototype.writeUIntLE = function writeUIntLE (value, offset, byteLength, noAssert) { value = +value offset = offset | 0 byteLength = byteLength | 0 if (!noAssert) checkInt(this, value, offset, byteLength, Math.pow(2, 8 * byteLength), 0) var mul = 1 var i = 0 this[offset] = value & 0xFF while (++i < byteLength && (mul *= 0x100)) { this[offset + i] = (value / mul) & 0xFF } return offset + byteLength } Buffer.prototype.writeUIntBE = function writeUIntBE (value, offset, byteLength, noAssert) { value = +value offset = offset | 0 byteLength = byteLength | 0 if (!noAssert) checkInt(this, value, offset, byteLength, Math.pow(2, 8 * byteLength), 0) var i = byteLength - 1 var mul = 1 this[offset + i] = value & 0xFF while (--i >= 0 && (mul *= 0x100)) { this[offset + i] = (value / mul) & 0xFF } return offset + byteLength } Buffer.prototype.writeUInt8 = function writeUInt8 (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 1, 0xff, 0) if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) this[offset] = value return offset + 1 } function objectWriteUInt16 (buf, value, offset, littleEndian) { if (value < 0) value = 0xffff + value + 1 for (var i = 0, j = Math.min(buf.length - offset, 2); i < j; i++) { buf[offset + i] = (value & (0xff << (8 * (littleEndian ? i : 1 - i)))) >>> (littleEndian ? i : 1 - i) * 8 } } Buffer.prototype.writeUInt16LE = function writeUInt16LE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = value this[offset + 1] = (value >>> 8) } else { objectWriteUInt16(this, value, offset, true) } return offset + 2 } Buffer.prototype.writeUInt16BE = function writeUInt16BE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 2, 0xffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 8) this[offset + 1] = value } else { objectWriteUInt16(this, value, offset, false) } return offset + 2 } function objectWriteUInt32 (buf, value, offset, littleEndian) { if (value < 0) value = 0xffffffff + value + 1 for (var i = 0, j = Math.min(buf.length - offset, 4); i < j; i++) { buf[offset + i] = (value >>> (littleEndian ? i : 3 - i) * 8) & 0xff } } Buffer.prototype.writeUInt32LE = function writeUInt32LE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset + 3] = (value >>> 24) this[offset + 2] = (value >>> 16) this[offset + 1] = (value >>> 8) this[offset] = value } else { objectWriteUInt32(this, value, offset, true) } return offset + 4 } Buffer.prototype.writeUInt32BE = function writeUInt32BE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 4, 0xffffffff, 0) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 24) this[offset + 1] = (value >>> 16) this[offset + 2] = (value >>> 8) this[offset + 3] = value } else { objectWriteUInt32(this, value, offset, false) } return offset + 4 } Buffer.prototype.writeIntLE = function writeIntLE (value, offset, byteLength, noAssert) { value = +value offset = offset | 0 if (!noAssert) { var limit = Math.pow(2, 8 * byteLength - 1) checkInt(this, value, offset, byteLength, limit - 1, -limit) } var i = 0 var mul = 1 var sub = value < 0 ? 1 : 0 this[offset] = value & 0xFF while (++i < byteLength && (mul *= 0x100)) { this[offset + i] = ((value / mul) >> 0) - sub & 0xFF } return offset + byteLength } Buffer.prototype.writeIntBE = function writeIntBE (value, offset, byteLength, noAssert) { value = +value offset = offset | 0 if (!noAssert) { var limit = Math.pow(2, 8 * byteLength - 1) checkInt(this, value, offset, byteLength, limit - 1, -limit) } var i = byteLength - 1 var mul = 1 var sub = value < 0 ? 1 : 0 this[offset + i] = value & 0xFF while (--i >= 0 && (mul *= 0x100)) { this[offset + i] = ((value / mul) >> 0) - sub & 0xFF } return offset + byteLength } Buffer.prototype.writeInt8 = function writeInt8 (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 1, 0x7f, -0x80) if (!Buffer.TYPED_ARRAY_SUPPORT) value = Math.floor(value) if (value < 0) value = 0xff + value + 1 this[offset] = value return offset + 1 } Buffer.prototype.writeInt16LE = function writeInt16LE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = value this[offset + 1] = (value >>> 8) } else { objectWriteUInt16(this, value, offset, true) } return offset + 2 } Buffer.prototype.writeInt16BE = function writeInt16BE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 2, 0x7fff, -0x8000) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 8) this[offset + 1] = value } else { objectWriteUInt16(this, value, offset, false) } return offset + 2 } Buffer.prototype.writeInt32LE = function writeInt32LE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = value this[offset + 1] = (value >>> 8) this[offset + 2] = (value >>> 16) this[offset + 3] = (value >>> 24) } else { objectWriteUInt32(this, value, offset, true) } return offset + 4 } Buffer.prototype.writeInt32BE = function writeInt32BE (value, offset, noAssert) { value = +value offset = offset | 0 if (!noAssert) checkInt(this, value, offset, 4, 0x7fffffff, -0x80000000) if (value < 0) value = 0xffffffff + value + 1 if (Buffer.TYPED_ARRAY_SUPPORT) { this[offset] = (value >>> 24) this[offset + 1] = (value >>> 16) this[offset + 2] = (value >>> 8) this[offset + 3] = value } else { objectWriteUInt32(this, value, offset, false) } return offset + 4 } function checkIEEE754 (buf, value, offset, ext, max, min) { if (value > max || value < min) throw new RangeError('value is out of bounds') if (offset + ext > buf.length) throw new RangeError('index out of range') if (offset < 0) throw new RangeError('index out of range') } function writeFloat (buf, value, offset, littleEndian, noAssert) { if (!noAssert) { checkIEEE754(buf, value, offset, 4, 3.4028234663852886e+38, -3.4028234663852886e+38) } ieee754.write(buf, value, offset, littleEndian, 23, 4) return offset + 4 } Buffer.prototype.writeFloatLE = function writeFloatLE (value, offset, noAssert) { return writeFloat(this, value, offset, true, noAssert) } Buffer.prototype.writeFloatBE = function writeFloatBE (value, offset, noAssert) { return writeFloat(this, value, offset, false, noAssert) } function writeDouble (buf, value, offset, littleEndian, noAssert) { if (!noAssert) { checkIEEE754(buf, value, offset, 8, 1.7976931348623157E+308, -1.7976931348623157E+308) } ieee754.write(buf, value, offset, littleEndian, 52, 8) return offset + 8 } Buffer.prototype.writeDoubleLE = function writeDoubleLE (value, offset, noAssert) { return writeDouble(this, value, offset, true, noAssert) } Buffer.prototype.writeDoubleBE = function writeDoubleBE (value, offset, noAssert) { return writeDouble(this, value, offset, false, noAssert) } // copy(targetBuffer, targetStart=0, sourceStart=0, sourceEnd=buffer.length) Buffer.prototype.copy = function copy (target, targetStart, start, end) { if (!start) start = 0 if (!end && end !== 0) end = this.length if (targetStart >= target.length) targetStart = target.length if (!targetStart) targetStart = 0 if (end > 0 && end < start) end = start // Copy 0 bytes; we're done if (end === start) return 0 if (target.length === 0 || this.length === 0) return 0 // Fatal error conditions if (targetStart < 0) { throw new RangeError('targetStart out of bounds') } if (start < 0 || start >= this.length) throw new RangeError('sourceStart out of bounds') if (end < 0) throw new RangeError('sourceEnd out of bounds') // Are we oob? if (end > this.length) end = this.length if (target.length - targetStart < end - start) { end = target.length - targetStart + start } var len = end - start if (len < 1000 || !Buffer.TYPED_ARRAY_SUPPORT) { for (var i = 0; i < len; i++) { target[i + targetStart] = this[i + start] } } else { target._set(this.subarray(start, start + len), targetStart) } return len } // fill(value, start=0, end=buffer.length) Buffer.prototype.fill = function fill (value, start, end) { if (!value) value = 0 if (!start) start = 0 if (!end) end = this.length if (end < start) throw new RangeError('end < start') // Fill 0 bytes; we're done if (end === start) return if (this.length === 0) return if (start < 0 || start >= this.length) throw new RangeError('start out of bounds') if (end < 0 || end > this.length) throw new RangeError('end out of bounds') var i if (typeof value === 'number') { for (i = start; i < end; i++) { this[i] = value } } else { var bytes = utf8ToBytes(value.toString()) var len = bytes.length for (i = start; i < end; i++) { this[i] = bytes[i % len] } } return this } /** * Creates a new `ArrayBuffer` with the *copied* memory of the buffer instance. * Added in Node 0.12. Only available in browsers that support ArrayBuffer. */ Buffer.prototype.toArrayBuffer = function toArrayBuffer () { if (typeof Uint8Array !== 'undefined') { if (Buffer.TYPED_ARRAY_SUPPORT) { return (new Buffer(this)).buffer } else { var buf = new Uint8Array(this.length) for (var i = 0, len = buf.length; i < len; i += 1) { buf[i] = this[i] } return buf.buffer } } else { throw new TypeError('Buffer.toArrayBuffer not supported in this browser') } } // HELPER FUNCTIONS // ================ var BP = Buffer.prototype /** * Augment a Uint8Array *instance* (not the Uint8Array class!) with Buffer methods */ Buffer._augment = function _augment (arr) { arr.constructor = Buffer arr._isBuffer = true // save reference to original Uint8Array set method before overwriting arr._set = arr.set // deprecated, will be removed in node 0.13+ arr.get = BP.get arr.set = BP.set arr.write = BP.write arr.toString = BP.toString arr.toLocaleString = BP.toString arr.toJSON = BP.toJSON arr.equals = BP.equals arr.compare = BP.compare arr.indexOf = BP.indexOf arr.copy = BP.copy arr.slice = BP.slice arr.readUIntLE = BP.readUIntLE arr.readUIntBE = BP.readUIntBE arr.readUInt8 = BP.readUInt8 arr.readUInt16LE = BP.readUInt16LE arr.readUInt16BE = BP.readUInt16BE arr.readUInt32LE = BP.readUInt32LE arr.readUInt32BE = BP.readUInt32BE arr.readIntLE = BP.readIntLE arr.readIntBE = BP.readIntBE arr.readInt8 = BP.readInt8 arr.readInt16LE = BP.readInt16LE arr.readInt16BE = BP.readInt16BE arr.readInt32LE = BP.readInt32LE arr.readInt32BE = BP.readInt32BE arr.readFloatLE = BP.readFloatLE arr.readFloatBE = BP.readFloatBE arr.readDoubleLE = BP.readDoubleLE arr.readDoubleBE = BP.readDoubleBE arr.writeUInt8 = BP.writeUInt8 arr.writeUIntLE = BP.writeUIntLE arr.writeUIntBE = BP.writeUIntBE arr.writeUInt16LE = BP.writeUInt16LE arr.writeUInt16BE = BP.writeUInt16BE arr.writeUInt32LE = BP.writeUInt32LE arr.writeUInt32BE = BP.writeUInt32BE arr.writeIntLE = BP.writeIntLE arr.writeIntBE = BP.writeIntBE arr.writeInt8 = BP.writeInt8 arr.writeInt16LE = BP.writeInt16LE arr.writeInt16BE = BP.writeInt16BE arr.writeInt32LE = BP.writeInt32LE arr.writeInt32BE = BP.writeInt32BE arr.writeFloatLE = BP.writeFloatLE arr.writeFloatBE = BP.writeFloatBE arr.writeDoubleLE = BP.writeDoubleLE arr.writeDoubleBE = BP.writeDoubleBE arr.fill = BP.fill arr.inspect = BP.inspect arr.toArrayBuffer = BP.toArrayBuffer return arr } var INVALID_BASE64_RE = /[^+\/0-9A-z\-]/g function base64clean (str) { // Node strips out invalid characters like \n and \t from the string, base64-js does not str = stringtrim(str).replace(INVALID_BASE64_RE, '') // Node converts strings with length < 2 to '' if (str.length < 2) return '' // Node allows for non-padded base64 strings (missing trailing ===), base64-js does not while (str.length % 4 !== 0) { str = str + '=' } return str } function stringtrim (str) { if (str.trim) return str.trim() return str.replace(/^\s+|\s+$/g, '') } function toHex (n) { if (n < 16) return '0' + n.toString(16) return n.toString(16) } function utf8ToBytes (string, units) { units = units || Infinity var codePoint var length = string.length var leadSurrogate = null var bytes = [] var i = 0 for (; i < length; i++) { codePoint = string.charCodeAt(i) // is surrogate component if (codePoint > 0xD7FF && codePoint < 0xE000) { // last char was a lead if (leadSurrogate) { // 2 leads in a row if (codePoint < 0xDC00) { if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) leadSurrogate = codePoint continue } else { // valid surrogate pair codePoint = leadSurrogate - 0xD800 << 10 | codePoint - 0xDC00 | 0x10000 leadSurrogate = null } } else { // no lead yet if (codePoint > 0xDBFF) { // unexpected trail if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) continue } else if (i + 1 === length) { // unpaired lead if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) continue } else { // valid lead leadSurrogate = codePoint continue } } } else if (leadSurrogate) { // valid bmp char, but last char was a lead if ((units -= 3) > -1) bytes.push(0xEF, 0xBF, 0xBD) leadSurrogate = null } // encode utf8 if (codePoint < 0x80) { if ((units -= 1) < 0) break bytes.push(codePoint) } else if (codePoint < 0x800) { if ((units -= 2) < 0) break bytes.push( codePoint >> 0x6 | 0xC0, codePoint & 0x3F | 0x80 ) } else if (codePoint < 0x10000) { if ((units -= 3) < 0) break bytes.push( codePoint >> 0xC | 0xE0, codePoint >> 0x6 & 0x3F | 0x80, codePoint & 0x3F | 0x80 ) } else if (codePoint < 0x200000) { if ((units -= 4) < 0) break bytes.push( codePoint >> 0x12 | 0xF0, codePoint >> 0xC & 0x3F | 0x80, codePoint >> 0x6 & 0x3F | 0x80, codePoint & 0x3F | 0x80 ) } else { throw new Error('Invalid code point') } } return bytes } function asciiToBytes (str) { var byteArray = [] for (var i = 0; i < str.length; i++) { // Node's code seems to be doing this and not & 0x7F.. byteArray.push(str.charCodeAt(i) & 0xFF) } return byteArray } function utf16leToBytes (str, units) { var c, hi, lo var byteArray = [] for (var i = 0; i < str.length; i++) { if ((units -= 2) < 0) break c = str.charCodeAt(i) hi = c >> 8 lo = c % 256 byteArray.push(lo) byteArray.push(hi) } return byteArray } function base64ToBytes (str) { return base64.toByteArray(base64clean(str)) } function blitBuffer (src, dst, offset, length) { for (var i = 0; i < length; i++) { if ((i + offset >= dst.length) || (i >= src.length)) break dst[i + offset] = src[i] } return i } function decodeUtf8Char (str) { try { return decodeURIComponent(str) } catch (err) { return String.fromCharCode(0xFFFD) // UTF 8 invalid char } } },{"base64-js":19,"ieee754":20,"is-array":21}],19:[function(require,module,exports){ var lookup = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'; ;(function (exports) { 'use strict'; var Arr = (typeof Uint8Array !== 'undefined') ? Uint8Array : Array var PLUS = '+'.charCodeAt(0) var SLASH = '/'.charCodeAt(0) var NUMBER = '0'.charCodeAt(0) var LOWER = 'a'.charCodeAt(0) var UPPER = 'A'.charCodeAt(0) var PLUS_URL_SAFE = '-'.charCodeAt(0) var SLASH_URL_SAFE = '_'.charCodeAt(0) function decode (elt) { var code = elt.charCodeAt(0) if (code === PLUS || code === PLUS_URL_SAFE) return 62 // '+' if (code === SLASH || code === SLASH_URL_SAFE) return 63 // '/' if (code < NUMBER) return -1 //no match if (code < NUMBER + 10) return code - NUMBER + 26 + 26 if (code < UPPER + 26) return code - UPPER if (code < LOWER + 26) return code - LOWER + 26 } function b64ToByteArray (b64) { var i, j, l, tmp, placeHolders, arr if (b64.length % 4 > 0) { throw new Error('Invalid string. Length must be a multiple of 4') } // the number of equal signs (place holders) // if there are two placeholders, than the two characters before it // represent one byte // if there is only one, then the three characters before it represent 2 bytes // this is just a cheap hack to not do indexOf twice var len = b64.length placeHolders = '=' === b64.charAt(len - 2) ? 2 : '=' === b64.charAt(len - 1) ? 1 : 0 // base64 is 4/3 + up to two characters of the original data arr = new Arr(b64.length * 3 / 4 - placeHolders) // if there are placeholders, only get up to the last complete 4 chars l = placeHolders > 0 ? b64.length - 4 : b64.length var L = 0 function push (v) { arr[L++] = v } for (i = 0, j = 0; i < l; i += 4, j += 3) { tmp = (decode(b64.charAt(i)) << 18) | (decode(b64.charAt(i + 1)) << 12) | (decode(b64.charAt(i + 2)) << 6) | decode(b64.charAt(i + 3)) push((tmp & 0xFF0000) >> 16) push((tmp & 0xFF00) >> 8) push(tmp & 0xFF) } if (placeHolders === 2) { tmp = (decode(b64.charAt(i)) << 2) | (decode(b64.charAt(i + 1)) >> 4) push(tmp & 0xFF) } else if (placeHolders === 1) { tmp = (decode(b64.charAt(i)) << 10) | (decode(b64.charAt(i + 1)) << 4) | (decode(b64.charAt(i + 2)) >> 2) push((tmp >> 8) & 0xFF) push(tmp & 0xFF) } return arr } function uint8ToBase64 (uint8) { var i, extraBytes = uint8.length % 3, // if we have 1 byte left, pad 2 bytes output = "", temp, length function encode (num) { return lookup.charAt(num) } function tripletToBase64 (num) { return encode(num >> 18 & 0x3F) + encode(num >> 12 & 0x3F) + encode(num >> 6 & 0x3F) + encode(num & 0x3F) } // go through the array every three bytes, we'll deal with trailing stuff later for (i = 0, length = uint8.length - extraBytes; i < length; i += 3) { temp = (uint8[i] << 16) + (uint8[i + 1] << 8) + (uint8[i + 2]) output += tripletToBase64(temp) } // pad the end with zeros, but make sure to not forget the extra bytes switch (extraBytes) { case 1: temp = uint8[uint8.length - 1] output += encode(temp >> 2) output += encode((temp << 4) & 0x3F) output += '==' break case 2: temp = (uint8[uint8.length - 2] << 8) + (uint8[uint8.length - 1]) output += encode(temp >> 10) output += encode((temp >> 4) & 0x3F) output += encode((temp << 2) & 0x3F) output += '=' break } return output } exports.toByteArray = b64ToByteArray exports.fromByteArray = uint8ToBase64 }(typeof exports === 'undefined' ? (this.base64js = {}) : exports)) },{}],20:[function(require,module,exports){ exports.read = function (buffer, offset, isLE, mLen, nBytes) { var e, m var eLen = nBytes * 8 - mLen - 1 var eMax = (1 << eLen) - 1 var eBias = eMax >> 1 var nBits = -7 var i = isLE ? (nBytes - 1) : 0 var d = isLE ? -1 : 1 var s = buffer[offset + i] i += d e = s & ((1 << (-nBits)) - 1) s >>= (-nBits) nBits += eLen for (; nBits > 0; e = e * 256 + buffer[offset + i], i += d, nBits -= 8) {} m = e & ((1 << (-nBits)) - 1) e >>= (-nBits) nBits += mLen for (; nBits > 0; m = m * 256 + buffer[offset + i], i += d, nBits -= 8) {} if (e === 0) { e = 1 - eBias } else if (e === eMax) { return m ? NaN : ((s ? -1 : 1) * Infinity) } else { m = m + Math.pow(2, mLen) e = e - eBias } return (s ? -1 : 1) * m * Math.pow(2, e - mLen) } exports.write = function (buffer, value, offset, isLE, mLen, nBytes) { var e, m, c var eLen = nBytes * 8 - mLen - 1 var eMax = (1 << eLen) - 1 var eBias = eMax >> 1 var rt = (mLen === 23 ? Math.pow(2, -24) - Math.pow(2, -77) : 0) var i = isLE ? 0 : (nBytes - 1) var d = isLE ? 1 : -1 var s = value < 0 || (value === 0 && 1 / value < 0) ? 1 : 0 value = Math.abs(value) if (isNaN(value) || value === Infinity) { m = isNaN(value) ? 1 : 0 e = eMax } else { e = Math.floor(Math.log(value) / Math.LN2) if (value * (c = Math.pow(2, -e)) < 1) { e-- c *= 2 } if (e + eBias >= 1) { value += rt / c } else { value += rt * Math.pow(2, 1 - eBias) } if (value * c >= 2) { e++ c /= 2 } if (e + eBias >= eMax) { m = 0 e = eMax } else if (e + eBias >= 1) { m = (value * c - 1) * Math.pow(2, mLen) e = e + eBias } else { m = value * Math.pow(2, eBias - 1) * Math.pow(2, mLen) e = 0 } } for (; mLen >= 8; buffer[offset + i] = m & 0xff, i += d, m /= 256, mLen -= 8) {} e = (e << mLen) | m eLen += mLen for (; eLen > 0; buffer[offset + i] = e & 0xff, i += d, e /= 256, eLen -= 8) {} buffer[offset + i - d] |= s * 128 } },{}],21:[function(require,module,exports){ /** * isArray */ var isArray = Array.isArray; /** * toString */ var str = Object.prototype.toString; /** * Whether or not the given `val` * is an array. * * example: * * isArray([]); * // > true * isArray(arguments); * // > false * isArray(''); * // > false * * @param {mixed} val * @return {bool} */ module.exports = isArray || function (val) { return !! val && '[object Array]' == str.call(val); }; },{}],22:[function(require,module,exports){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. function EventEmitter() { this._events = this._events || {}; this._maxListeners = this._maxListeners || undefined; } module.exports = EventEmitter; // Backwards-compat with node 0.10.x EventEmitter.EventEmitter = EventEmitter; EventEmitter.prototype._events = undefined; EventEmitter.prototype._maxListeners = undefined; // By default EventEmitters will print a warning if more than 10 listeners are // added to it. This is a useful default which helps finding memory leaks. EventEmitter.defaultMaxListeners = 10; // Obviously not all Emitters should be limited to 10. This function allows // that to be increased. Set to zero for unlimited. EventEmitter.prototype.setMaxListeners = function(n) { if (!isNumber(n) || n < 0 || isNaN(n)) throw TypeError('n must be a positive number'); this._maxListeners = n; return this; }; EventEmitter.prototype.emit = function(type) { var er, handler, len, args, i, listeners; if (!this._events) this._events = {}; // If there is no 'error' event listener then throw. if (type === 'error') { if (!this._events.error || (isObject(this._events.error) && !this._events.error.length)) { er = arguments[1]; if (er instanceof Error) { throw er; // Unhandled 'error' event } throw TypeError('Uncaught, unspecified "error" event.'); } } handler = this._events[type]; if (isUndefined(handler)) return false; if (isFunction(handler)) { switch (arguments.length) { // fast cases case 1: handler.call(this); break; case 2: handler.call(this, arguments[1]); break; case 3: handler.call(this, arguments[1], arguments[2]); break; // slower default: len = arguments.length; args = new Array(len - 1); for (i = 1; i < len; i++) args[i - 1] = arguments[i]; handler.apply(this, args); } } else if (isObject(handler)) { len = arguments.length; args = new Array(len - 1); for (i = 1; i < len; i++) args[i - 1] = arguments[i]; listeners = handler.slice(); len = listeners.length; for (i = 0; i < len; i++) listeners[i].apply(this, args); } return true; }; EventEmitter.prototype.addListener = function(type, listener) { var m; if (!isFunction(listener)) throw TypeError('listener must be a function'); if (!this._events) this._events = {}; // To avoid recursion in the case that type === "newListener"! Before // adding it to the listeners, first emit "newListener". if (this._events.newListener) this.emit('newListener', type, isFunction(listener.listener) ? listener.listener : listener); if (!this._events[type]) // Optimize the case of one listener. Don't need the extra array object. this._events[type] = listener; else if (isObject(this._events[type])) // If we've already got an array, just append. this._events[type].push(listener); else // Adding the second element, need to change to array. this._events[type] = [this._events[type], listener]; // Check for listener leak if (isObject(this._events[type]) && !this._events[type].warned) { var m; if (!isUndefined(this._maxListeners)) { m = this._maxListeners; } else { m = EventEmitter.defaultMaxListeners; } if (m && m > 0 && this._events[type].length > m) { this._events[type].warned = true; console.error('(node) warning: possible EventEmitter memory ' + 'leak detected. %d listeners added. ' + 'Use emitter.setMaxListeners() to increase limit.', this._events[type].length); if (typeof console.trace === 'function') { // not supported in IE 10 console.trace(); } } } return this; }; EventEmitter.prototype.on = EventEmitter.prototype.addListener; EventEmitter.prototype.once = function(type, listener) { if (!isFunction(listener)) throw TypeError('listener must be a function'); var fired = false; function g() { this.removeListener(type, g); if (!fired) { fired = true; listener.apply(this, arguments); } } g.listener = listener; this.on(type, g); return this; }; // emits a 'removeListener' event iff the listener was removed EventEmitter.prototype.removeListener = function(type, listener) { var list, position, length, i; if (!isFunction(listener)) throw TypeError('listener must be a function'); if (!this._events || !this._events[type]) return this; list = this._events[type]; length = list.length; position = -1; if (list === listener || (isFunction(list.listener) && list.listener === listener)) { delete this._events[type]; if (this._events.removeListener) this.emit('removeListener', type, listener); } else if (isObject(list)) { for (i = length; i-- > 0;) { if (list[i] === listener || (list[i].listener && list[i].listener === listener)) { position = i; break; } } if (position < 0) return this; if (list.length === 1) { list.length = 0; delete this._events[type]; } else { list.splice(position, 1); } if (this._events.removeListener) this.emit('removeListener', type, listener); } return this; }; EventEmitter.prototype.removeAllListeners = function(type) { var key, listeners; if (!this._events) return this; // not listening for removeListener, no need to emit if (!this._events.removeListener) { if (arguments.length === 0) this._events = {}; else if (this._events[type]) delete this._events[type]; return this; } // emit removeListener for all listeners on all events if (arguments.length === 0) { for (key in this._events) { if (key === 'removeListener') continue; this.removeAllListeners(key); } this.removeAllListeners('removeListener'); this._events = {}; return this; } listeners = this._events[type]; if (isFunction(listeners)) { this.removeListener(type, listeners); } else { // LIFO order while (listeners.length) this.removeListener(type, listeners[listeners.length - 1]); } delete this._events[type]; return this; }; EventEmitter.prototype.listeners = function(type) { var ret; if (!this._events || !this._events[type]) ret = []; else if (isFunction(this._events[type])) ret = [this._events[type]]; else ret = this._events[type].slice(); return ret; }; EventEmitter.listenerCount = function(emitter, type) { var ret; if (!emitter._events || !emitter._events[type]) ret = 0; else if (isFunction(emitter._events[type])) ret = 1; else ret = emitter._events[type].length; return ret; }; function isFunction(arg) { return typeof arg === 'function'; } function isNumber(arg) { return typeof arg === 'number'; } function isObject(arg) { return typeof arg === 'object' && arg !== null; } function isUndefined(arg) { return arg === void 0; } },{}],23:[function(require,module,exports){ arguments[4][16][0].apply(exports,arguments) },{"dup":16}],24:[function(require,module,exports){ module.exports = Array.isArray || function (arr) { return Object.prototype.toString.call(arr) == '[object Array]'; }; },{}],25:[function(require,module,exports){ // shim for using process in browser var process = module.exports = {}; var queue = []; var draining = false; var currentQueue; var queueIndex = -1; function cleanUpNextTick() { draining = false; if (currentQueue.length) { queue = currentQueue.concat(queue); } else { queueIndex = -1; } if (queue.length) { drainQueue(); } } function drainQueue() { if (draining) { return; } var timeout = setTimeout(cleanUpNextTick); draining = true; var len = queue.length; while(len) { currentQueue = queue; queue = []; while (++queueIndex < len) { currentQueue[queueIndex].run(); } queueIndex = -1; len = queue.length; } currentQueue = null; draining = false; clearTimeout(timeout); } process.nextTick = function (fun) { var args = new Array(arguments.length - 1); if (arguments.length > 1) { for (var i = 1; i < arguments.length; i++) { args[i - 1] = arguments[i]; } } queue.push(new Item(fun, args)); if (queue.length === 1 && !draining) { setTimeout(drainQueue, 0); } }; // v8 likes predictible objects function Item(fun, array) { this.fun = fun; this.array = array; } Item.prototype.run = function () { this.fun.apply(null, this.array); }; process.title = 'browser'; process.browser = true; process.env = {}; process.argv = []; process.version = ''; // empty string to avoid regexp issues process.versions = {}; function noop() {} process.on = noop; process.addListener = noop; process.once = noop; process.off = noop; process.removeListener = noop; process.removeAllListeners = noop; process.emit = noop; process.binding = function (name) { throw new Error('process.binding is not supported'); }; // TODO(shtylman) process.cwd = function () { return '/' }; process.chdir = function (dir) { throw new Error('process.chdir is not supported'); }; process.umask = function() { return 0; }; },{}],26:[function(require,module,exports){ module.exports = require("./lib/_stream_duplex.js") },{"./lib/_stream_duplex.js":27}],27:[function(require,module,exports){ (function (process){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. module.exports = Duplex; /*<replacement>*/ var objectKeys = Object.keys || function (obj) { var keys = []; for (var key in obj) keys.push(key); return keys; } /*</replacement>*/ /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ var Readable = require('./_stream_readable'); var Writable = require('./_stream_writable'); util.inherits(Duplex, Readable); forEach(objectKeys(Writable.prototype), function(method) { if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; }); function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); Readable.call(this, options); Writable.call(this, options); if (options && options.readable === false) this.readable = false; if (options && options.writable === false) this.writable = false; this.allowHalfOpen = true; if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; this.once('end', onend); } // the no-half-open enforcer function onend() { // if we allow half-open state, or if the writable side ended, // then we're ok. if (this.allowHalfOpen || this._writableState.ended) return; // no more data can be written. // But allow more writes to happen in this tick. process.nextTick(this.end.bind(this)); } function forEach (xs, f) { for (var i = 0, l = xs.length; i < l; i++) { f(xs[i], i); } } }).call(this,require('_process')) },{"./_stream_readable":29,"./_stream_writable":31,"_process":25,"core-util-is":32,"inherits":23}],28:[function(require,module,exports){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a passthrough stream. // basically just the most minimal sort of Transform stream. // Every written chunk gets output as-is. module.exports = PassThrough; var Transform = require('./_stream_transform'); /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ util.inherits(PassThrough, Transform); function PassThrough(options) { if (!(this instanceof PassThrough)) return new PassThrough(options); Transform.call(this, options); } PassThrough.prototype._transform = function(chunk, encoding, cb) { cb(null, chunk); }; },{"./_stream_transform":30,"core-util-is":32,"inherits":23}],29:[function(require,module,exports){ (function (process){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. module.exports = Readable; /*<replacement>*/ var isArray = require('isarray'); /*</replacement>*/ /*<replacement>*/ var Buffer = require('buffer').Buffer; /*</replacement>*/ Readable.ReadableState = ReadableState; var EE = require('events').EventEmitter; /*<replacement>*/ if (!EE.listenerCount) EE.listenerCount = function(emitter, type) { return emitter.listeners(type).length; }; /*</replacement>*/ var Stream = require('stream'); /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ var StringDecoder; /*<replacement>*/ var debug = require('util'); if (debug && debug.debuglog) { debug = debug.debuglog('stream'); } else { debug = function () {}; } /*</replacement>*/ util.inherits(Readable, Stream); function ReadableState(options, stream) { var Duplex = require('./_stream_duplex'); options = options || {}; // the point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" var hwm = options.highWaterMark; var defaultHwm = options.objectMode ? 16 : 16 * 1024; this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm; // cast to ints. this.highWaterMark = ~~this.highWaterMark; this.buffer = []; this.length = 0; this.pipes = null; this.pipesCount = 0; this.flowing = null; this.ended = false; this.endEmitted = false; this.reading = false; // a flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. this.sync = true; // whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. this.needReadable = false; this.emittedReadable = false; this.readableListening = false; // object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away this.objectMode = !!options.objectMode; if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. this.defaultEncoding = options.defaultEncoding || 'utf8'; // when piping, we only care about 'readable' events that happen // after read()ing all the bytes and not getting any pushback. this.ranOut = false; // the number of writers that are awaiting a drain event in .pipe()s this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled this.readingMore = false; this.decoder = null; this.encoding = null; if (options.encoding) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } } function Readable(options) { var Duplex = require('./_stream_duplex'); if (!(this instanceof Readable)) return new Readable(options); this._readableState = new ReadableState(options, this); // legacy this.readable = true; Stream.call(this); } // Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. Readable.prototype.push = function(chunk, encoding) { var state = this._readableState; if (util.isString(chunk) && !state.objectMode) { encoding = encoding || state.defaultEncoding; if (encoding !== state.encoding) { chunk = new Buffer(chunk, encoding); encoding = ''; } } return readableAddChunk(this, state, chunk, encoding, false); }; // Unshift should *always* be something directly out of read() Readable.prototype.unshift = function(chunk) { var state = this._readableState; return readableAddChunk(this, state, chunk, '', true); }; function readableAddChunk(stream, state, chunk, encoding, addToFront) { var er = chunkInvalid(state, chunk); if (er) { stream.emit('error', er); } else if (util.isNullOrUndefined(chunk)) { state.reading = false; if (!state.ended) onEofChunk(stream, state); } else if (state.objectMode || chunk && chunk.length > 0) { if (state.ended && !addToFront) { var e = new Error('stream.push() after EOF'); stream.emit('error', e); } else if (state.endEmitted && addToFront) { var e = new Error('stream.unshift() after end event'); stream.emit('error', e); } else { if (state.decoder && !addToFront && !encoding) chunk = state.decoder.write(chunk); if (!addToFront) state.reading = false; // if we want the data now, just emit it. if (state.flowing && state.length === 0 && !state.sync) { stream.emit('data', chunk); stream.read(0); } else { // update the buffer info. state.length += state.objectMode ? 1 : chunk.length; if (addToFront) state.buffer.unshift(chunk); else state.buffer.push(chunk); if (state.needReadable) emitReadable(stream); } maybeReadMore(stream, state); } } else if (!addToFront) { state.reading = false; } return needMoreData(state); } // if it's past the high water mark, we can push in some more. // Also, if we have no data yet, we can stand some // more bytes. This is to work around cases where hwm=0, // such as the repl. Also, if the push() triggered a // readable event, and the user called read(largeNumber) such that // needReadable was set, then we ought to push more, so that another // 'readable' event will be triggered. function needMoreData(state) { return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); } // backwards compatibility. Readable.prototype.setEncoding = function(enc) { if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; this._readableState.decoder = new StringDecoder(enc); this._readableState.encoding = enc; return this; }; // Don't raise the hwm > 128MB var MAX_HWM = 0x800000; function roundUpToNextPowerOf2(n) { if (n >= MAX_HWM) { n = MAX_HWM; } else { // Get the next highest power of 2 n--; for (var p = 1; p < 32; p <<= 1) n |= n >> p; n++; } return n; } function howMuchToRead(n, state) { if (state.length === 0 && state.ended) return 0; if (state.objectMode) return n === 0 ? 0 : 1; if (isNaN(n) || util.isNull(n)) { // only flow one buffer at a time if (state.flowing && state.buffer.length) return state.buffer[0].length; else return state.length; } if (n <= 0) return 0; // If we're asking for more than the target buffer level, // then raise the water mark. Bump up to the next highest // power of 2, to prevent increasing it excessively in tiny // amounts. if (n > state.highWaterMark) state.highWaterMark = roundUpToNextPowerOf2(n); // don't have that much. return null, unless we've ended. if (n > state.length) { if (!state.ended) { state.needReadable = true; return 0; } else return state.length; } return n; } // you can override either this method, or the async _read(n) below. Readable.prototype.read = function(n) { debug('read', n); var state = this._readableState; var nOrig = n; if (!util.isNumber(n) || n > 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { debug('read: emitReadable', state.length, state.ended); if (state.length === 0 && state.ended) endReadable(this); else emitReadable(this); return null; } n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { if (state.length === 0) endReadable(this); return null; } // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change // the state of the read buffer, providing enough data when // before there was *not* enough. // // So, the steps are: // 1. Figure out what the state of things will be after we do // a read from the buffer. // // 2. If that resulting state will trigger a _read, then call _read. // Note that this may be asynchronous, or synchronous. Yes, it is // deeply ugly to write APIs this way, but that still doesn't mean // that the Readable class should behave improperly, as streams are // designed to be sync/async agnostic. // Take note if the _read call is sync or async (ie, if the read call // has returned yet), so that we know whether or not it's safe to emit // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. // if we need a readable event, then we need to do some reading. var doRead = state.needReadable; debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some if (state.length === 0 || state.length - n < state.highWaterMark) { doRead = true; debug('length less than watermark', doRead); } // however, if we've ended, then there's no point, and if we're already // reading, then it's unnecessary. if (state.ended || state.reading) { doRead = false; debug('reading or ended', doRead); } if (doRead) { debug('do read'); state.reading = true; state.sync = true; // if the length is currently zero, then we *need* a readable event. if (state.length === 0) state.needReadable = true; // call internal read method this._read(state.highWaterMark); state.sync = false; } // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. if (doRead && !state.reading) n = howMuchToRead(nOrig, state); var ret; if (n > 0) ret = fromList(n, state); else ret = null; if (util.isNull(ret)) { state.needReadable = true; n = 0; } state.length -= n; // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. if (state.length === 0 && !state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. if (nOrig !== n && state.ended && state.length === 0) endReadable(this); if (!util.isNull(ret)) this.emit('data', ret); return ret; }; function chunkInvalid(state, chunk) { var er = null; if (!util.isBuffer(chunk) && !util.isString(chunk) && !util.isNullOrUndefined(chunk) && !state.objectMode) { er = new TypeError('Invalid non-string/buffer chunk'); } return er; } function onEofChunk(stream, state) { if (state.decoder && !state.ended) { var chunk = state.decoder.end(); if (chunk && chunk.length) { state.buffer.push(chunk); state.length += state.objectMode ? 1 : chunk.length; } } state.ended = true; // emit 'readable' now to make sure it gets picked up. emitReadable(stream); } // Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. function emitReadable(stream) { var state = stream._readableState; state.needReadable = false; if (!state.emittedReadable) { debug('emitReadable', state.flowing); state.emittedReadable = true; if (state.sync) process.nextTick(function() { emitReadable_(stream); }); else emitReadable_(stream); } } function emitReadable_(stream) { debug('emit readable'); stream.emit('readable'); flow(stream); } // at this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; process.nextTick(function() { maybeReadMore_(stream, state); }); } } function maybeReadMore_(stream, state) { var len = state.length; while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { debug('maybeReadMore read 0'); stream.read(0); if (len === state.length) // didn't get any data, stop spinning. break; else len = state.length; } state.readingMore = false; } // abstract method. to be overridden in specific implementation classes. // call cb(er, data) where data is <= n in length. // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. Readable.prototype._read = function(n) { this.emit('error', new Error('not implemented')); }; Readable.prototype.pipe = function(dest, pipeOpts) { var src = this; var state = this._readableState; switch (state.pipesCount) { case 0: state.pipes = dest; break; case 1: state.pipes = [state.pipes, dest]; break; default: state.pipes.push(dest); break; } state.pipesCount += 1; debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; var endFn = doEnd ? onend : cleanup; if (state.endEmitted) process.nextTick(endFn); else src.once('end', endFn); dest.on('unpipe', onunpipe); function onunpipe(readable) { debug('onunpipe'); if (readable === src) { cleanup(); } } function onend() { debug('onend'); dest.end(); } // when the dest drains, it reduces the awaitDrain counter // on the source. This would be more elegant with a .once() // handler in flow(), but adding and removing repeatedly is // too slow. var ondrain = pipeOnDrain(src); dest.on('drain', ondrain); function cleanup() { debug('cleanup'); // cleanup event handlers once the pipe is broken dest.removeListener('close', onclose); dest.removeListener('finish', onfinish); dest.removeListener('drain', ondrain); dest.removeListener('error', onerror); dest.removeListener('unpipe', onunpipe); src.removeListener('end', onend); src.removeListener('end', cleanup); src.removeListener('data', ondata); // if the reader is waiting for a drain event from this // specific writer, then it would cause it to never start // flowing again. // So, if this is awaiting a drain, then we just call it now. // If we don't know, then assume that we are waiting for one. if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); } src.on('data', ondata); function ondata(chunk) { debug('ondata'); var ret = dest.write(chunk); if (false === ret) { debug('false write response, pause', src._readableState.awaitDrain); src._readableState.awaitDrain++; src.pause(); } } // if the dest has an error, then stop piping into it. // however, don't suppress the throwing behavior for this. function onerror(er) { debug('onerror', er); unpipe(); dest.removeListener('error', onerror); if (EE.listenerCount(dest, 'error') === 0) dest.emit('error', er); } // This is a brutally ugly hack to make sure that our error handler // is attached before any userland ones. NEVER DO THIS. if (!dest._events || !dest._events.error) dest.on('error', onerror); else if (isArray(dest._events.error)) dest._events.error.unshift(onerror); else dest._events.error = [onerror, dest._events.error]; // Both close and finish should trigger unpipe, but only once. function onclose() { dest.removeListener('finish', onfinish); unpipe(); } dest.once('close', onclose); function onfinish() { debug('onfinish'); dest.removeListener('close', onclose); unpipe(); } dest.once('finish', onfinish); function unpipe() { debug('unpipe'); src.unpipe(dest); } // tell the dest that it's being piped to dest.emit('pipe', src); // start the flow if it hasn't been started already. if (!state.flowing) { debug('pipe resume'); src.resume(); } return dest; }; function pipeOnDrain(src) { return function() { var state = src._readableState; debug('pipeOnDrain', state.awaitDrain); if (state.awaitDrain) state.awaitDrain--; if (state.awaitDrain === 0 && EE.listenerCount(src, 'data')) { state.flowing = true; flow(src); } }; } Readable.prototype.unpipe = function(dest) { var state = this._readableState; // if we're not piping anywhere, then do nothing. if (state.pipesCount === 0) return this; // just one destination. most common case. if (state.pipesCount === 1) { // passed in one, but it's not the right one. if (dest && dest !== state.pipes) return this; if (!dest) dest = state.pipes; // got a match. state.pipes = null; state.pipesCount = 0; state.flowing = false; if (dest) dest.emit('unpipe', this); return this; } // slow case. multiple pipe destinations. if (!dest) { // remove all. var dests = state.pipes; var len = state.pipesCount; state.pipes = null; state.pipesCount = 0; state.flowing = false; for (var i = 0; i < len; i++) dests[i].emit('unpipe', this); return this; } // try to find the right one. var i = indexOf(state.pipes, dest); if (i === -1) return this; state.pipes.splice(i, 1); state.pipesCount -= 1; if (state.pipesCount === 1) state.pipes = state.pipes[0]; dest.emit('unpipe', this); return this; }; // set up data events if they are asked for // Ensure readable listeners eventually get something Readable.prototype.on = function(ev, fn) { var res = Stream.prototype.on.call(this, ev, fn); // If listening to data, and it has not explicitly been paused, // then call resume to start the flow of data on the next tick. if (ev === 'data' && false !== this._readableState.flowing) { this.resume(); } if (ev === 'readable' && this.readable) { var state = this._readableState; if (!state.readableListening) { state.readableListening = true; state.emittedReadable = false; state.needReadable = true; if (!state.reading) { var self = this; process.nextTick(function() { debug('readable nexttick read 0'); self.read(0); }); } else if (state.length) { emitReadable(this, state); } } } return res; }; Readable.prototype.addListener = Readable.prototype.on; // pause() and resume() are remnants of the legacy readable stream API // If the user uses them, then switch into old mode. Readable.prototype.resume = function() { var state = this._readableState; if (!state.flowing) { debug('resume'); state.flowing = true; if (!state.reading) { debug('resume read 0'); this.read(0); } resume(this, state); } return this; }; function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; process.nextTick(function() { resume_(stream, state); }); } } function resume_(stream, state) { state.resumeScheduled = false; stream.emit('resume'); flow(stream); if (state.flowing && !state.reading) stream.read(0); } Readable.prototype.pause = function() { debug('call pause flowing=%j', this._readableState.flowing); if (false !== this._readableState.flowing) { debug('pause'); this._readableState.flowing = false; this.emit('pause'); } return this; }; function flow(stream) { var state = stream._readableState; debug('flow', state.flowing); if (state.flowing) { do { var chunk = stream.read(); } while (null !== chunk && state.flowing); } } // wrap an old-style stream as the async data source. // This is *not* part of the readable stream interface. // It is an ugly unfortunate mess of history. Readable.prototype.wrap = function(stream) { var state = this._readableState; var paused = false; var self = this; stream.on('end', function() { debug('wrapped end'); if (state.decoder && !state.ended) { var chunk = state.decoder.end(); if (chunk && chunk.length) self.push(chunk); } self.push(null); }); stream.on('data', function(chunk) { debug('wrapped data'); if (state.decoder) chunk = state.decoder.write(chunk); if (!chunk || !state.objectMode && !chunk.length) return; var ret = self.push(chunk); if (!ret) { paused = true; stream.pause(); } }); // proxy all the other methods. // important when wrapping filters and duplexes. for (var i in stream) { if (util.isFunction(stream[i]) && util.isUndefined(this[i])) { this[i] = function(method) { return function() { return stream[method].apply(stream, arguments); }}(i); } } // proxy certain important events. var events = ['error', 'close', 'destroy', 'pause', 'resume']; forEach(events, function(ev) { stream.on(ev, self.emit.bind(self, ev)); }); // when we try to consume some more bytes, simply unpause the // underlying stream. self._read = function(n) { debug('wrapped _read', n); if (paused) { paused = false; stream.resume(); } }; return self; }; // exposed for testing purposes only. Readable._fromList = fromList; // Pluck off n bytes from an array of buffers. // Length is the combined lengths of all the buffers in the list. function fromList(n, state) { var list = state.buffer; var length = state.length; var stringMode = !!state.decoder; var objectMode = !!state.objectMode; var ret; // nothing in the list, definitely empty. if (list.length === 0) return null; if (length === 0) ret = null; else if (objectMode) ret = list.shift(); else if (!n || n >= length) { // read it all, truncate the array. if (stringMode) ret = list.join(''); else ret = Buffer.concat(list, length); list.length = 0; } else { // read just some of it. if (n < list[0].length) { // just take a part of the first list item. // slice is the same for buffers and strings. var buf = list[0]; ret = buf.slice(0, n); list[0] = buf.slice(n); } else if (n === list[0].length) { // first list is a perfect match ret = list.shift(); } else { // complex case. // we have enough to cover it, but it spans past the first buffer. if (stringMode) ret = ''; else ret = new Buffer(n); var c = 0; for (var i = 0, l = list.length; i < l && c < n; i++) { var buf = list[0]; var cpy = Math.min(n - c, buf.length); if (stringMode) ret += buf.slice(0, cpy); else buf.copy(ret, c, 0, cpy); if (cpy < buf.length) list[0] = buf.slice(cpy); else list.shift(); c += cpy; } } } return ret; } function endReadable(stream) { var state = stream._readableState; // If we get here before consuming all the bytes, then that is a // bug in node. Should never happen. if (state.length > 0) throw new Error('endReadable called on non-empty stream'); if (!state.endEmitted) { state.ended = true; process.nextTick(function() { // Check that we didn't get one last unshift. if (!state.endEmitted && state.length === 0) { state.endEmitted = true; stream.readable = false; stream.emit('end'); } }); } } function forEach (xs, f) { for (var i = 0, l = xs.length; i < l; i++) { f(xs[i], i); } } function indexOf (xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; } return -1; } }).call(this,require('_process')) },{"./_stream_duplex":27,"_process":25,"buffer":18,"core-util-is":32,"events":22,"inherits":23,"isarray":24,"stream":37,"string_decoder/":38,"util":17}],30:[function(require,module,exports){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a transform stream is a readable/writable stream where you do // something with the data. Sometimes it's called a "filter", // but that's not a great name for it, since that implies a thing where // some bits pass through, and others are simply ignored. (That would // be a valid example of a transform, of course.) // // While the output is causally related to the input, it's not a // necessarily symmetric or synchronous transformation. For example, // a zlib stream might take multiple plain-text writes(), and then // emit a single compressed chunk some time in the future. // // Here's how this works: // // The Transform stream has all the aspects of the readable and writable // stream classes. When you write(chunk), that calls _write(chunk,cb) // internally, and returns false if there's a lot of pending writes // buffered up. When you call read(), that calls _read(n) until // there's enough pending readable data buffered up. // // In a transform stream, the written data is placed in a buffer. When // _read(n) is called, it transforms the queued up data, calling the // buffered _write cb's as it consumes chunks. If consuming a single // written chunk would result in multiple output chunks, then the first // outputted bit calls the readcb, and subsequent chunks just go into // the read buffer, and will cause it to emit 'readable' if necessary. // // This way, back-pressure is actually determined by the reading side, // since _read has to be called to start processing a new chunk. However, // a pathological inflate type of transform can cause excessive buffering // here. For example, imagine a stream where every byte of input is // interpreted as an integer from 0-255, and then results in that many // bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in // 1kb of data being output. In this case, you could write a very small // amount of input, and end up with a very large amount of output. In // such a pathological inflating mechanism, there'd be no way to tell // the system to stop doing the transform. A single 4MB write could // cause the system to run out of memory. // // However, even in such a pathological case, only a single written chunk // would be consumed, and then the rest would wait (un-transformed) until // the results of the previous transformed chunk were consumed. module.exports = Transform; var Duplex = require('./_stream_duplex'); /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ util.inherits(Transform, Duplex); function TransformState(options, stream) { this.afterTransform = function(er, data) { return afterTransform(stream, er, data); }; this.needTransform = false; this.transforming = false; this.writecb = null; this.writechunk = null; } function afterTransform(stream, er, data) { var ts = stream._transformState; ts.transforming = false; var cb = ts.writecb; if (!cb) return stream.emit('error', new Error('no writecb in Transform class')); ts.writechunk = null; ts.writecb = null; if (!util.isNullOrUndefined(data)) stream.push(data); if (cb) cb(er); var rs = stream._readableState; rs.reading = false; if (rs.needReadable || rs.length < rs.highWaterMark) { stream._read(rs.highWaterMark); } } function Transform(options) { if (!(this instanceof Transform)) return new Transform(options); Duplex.call(this, options); this._transformState = new TransformState(options, this); // when the writable side finishes, then flush out anything remaining. var stream = this; // start out asking for a readable event once data is transformed. this._readableState.needReadable = true; // we have implemented the _read method, and done the other things // that Readable wants before the first _read call, so unset the // sync guard flag. this._readableState.sync = false; this.once('prefinish', function() { if (util.isFunction(this._flush)) this._flush(function(er) { done(stream, er); }); else done(stream); }); } Transform.prototype.push = function(chunk, encoding) { this._transformState.needTransform = false; return Duplex.prototype.push.call(this, chunk, encoding); }; // This is the part where you do stuff! // override this function in implementation classes. // 'chunk' is an input chunk. // // Call `push(newChunk)` to pass along transformed output // to the readable side. You may call 'push' zero or more times. // // Call `cb(err)` when you are done with this chunk. If you pass // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. Transform.prototype._transform = function(chunk, encoding, cb) { throw new Error('not implemented'); }; Transform.prototype._write = function(chunk, encoding, cb) { var ts = this._transformState; ts.writecb = cb; ts.writechunk = chunk; ts.writeencoding = encoding; if (!ts.transforming) { var rs = this._readableState; if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } }; // Doesn't matter what the args are here. // _transform does all the work. // That we got here means that the readable side wants more data. Transform.prototype._read = function(n) { var ts = this._transformState; if (!util.isNull(ts.writechunk) && ts.writecb && !ts.transforming) { ts.transforming = true; this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); } else { // mark that we need a transform, so that any data that comes in // will get processed, now that we've asked for it. ts.needTransform = true; } }; function done(stream, er) { if (er) return stream.emit('error', er); // if there's nothing in the write buffer, then that means // that nothing more will ever be provided var ws = stream._writableState; var ts = stream._transformState; if (ws.length) throw new Error('calling transform done when ws.length != 0'); if (ts.transforming) throw new Error('calling transform done when still transforming'); return stream.push(null); } },{"./_stream_duplex":27,"core-util-is":32,"inherits":23}],31:[function(require,module,exports){ (function (process){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // A bit simpler than readable streams. // Implement an async ._write(chunk, cb), and it'll handle all // the drain event emission and buffering. module.exports = Writable; /*<replacement>*/ var Buffer = require('buffer').Buffer; /*</replacement>*/ Writable.WritableState = WritableState; /*<replacement>*/ var util = require('core-util-is'); util.inherits = require('inherits'); /*</replacement>*/ var Stream = require('stream'); util.inherits(Writable, Stream); function WriteReq(chunk, encoding, cb) { this.chunk = chunk; this.encoding = encoding; this.callback = cb; } function WritableState(options, stream) { var Duplex = require('./_stream_duplex'); options = options || {}; // the point at which write() starts returning false // Note: 0 is a valid value, means that we always return false if // the entire buffer is not flushed immediately on write() var hwm = options.highWaterMark; var defaultHwm = options.objectMode ? 16 : 16 * 1024; this.highWaterMark = (hwm || hwm === 0) ? hwm : defaultHwm; // object stream flag to indicate whether or not this stream // contains buffers or objects. this.objectMode = !!options.objectMode; if (stream instanceof Duplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // cast to ints. this.highWaterMark = ~~this.highWaterMark; this.needDrain = false; // at the start of calling end() this.ending = false; // when end() has been called, and returned this.ended = false; // when 'finish' is emitted this.finished = false; // should we decode strings into buffers before passing to _write? // this is here so that some node-core streams can optimize string // handling at a lower level. var noDecode = options.decodeStrings === false; this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement // of how much we're waiting to get pushed to some underlying // socket or file. this.length = 0; // a flag to see when we're in the middle of a write. this.writing = false; // when true all writes will be buffered until .uncork() call this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. this.sync = true; // a flag to know if we're processing previously buffered items, which // may call the _write() callback in the same tick, so that we don't // end up in an overlapped onwrite situation. this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) this.onwrite = function(er) { onwrite(stream, er); }; // the callback that the user supplies to write(chunk,encoding,cb) this.writecb = null; // the amount that is being written when _write is called. this.writelen = 0; this.buffer = []; // number of pending user-supplied write callbacks // this must be 0 before 'finish' can be emitted this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams this.prefinished = false; // True if the error was already emitted and should not be thrown again this.errorEmitted = false; } function Writable(options) { var Duplex = require('./_stream_duplex'); // Writable ctor is applied to Duplexes, though they're not // instanceof Writable, they're instanceof Readable. if (!(this instanceof Writable) && !(this instanceof Duplex)) return new Writable(options); this._writableState = new WritableState(options, this); // legacy. this.writable = true; Stream.call(this); } // Otherwise people can pipe Writable streams, which is just wrong. Writable.prototype.pipe = function() { this.emit('error', new Error('Cannot pipe. Not readable.')); }; function writeAfterEnd(stream, state, cb) { var er = new Error('write after end'); // TODO: defer error events consistently everywhere, not just the cb stream.emit('error', er); process.nextTick(function() { cb(er); }); } // If we get something that is not a buffer, string, null, or undefined, // and we're not in objectMode, then that's an error. // Otherwise stream chunks are all considered to be of length=1, and the // watermarks determine how many objects to keep in the buffer, rather than // how many bytes or characters. function validChunk(stream, state, chunk, cb) { var valid = true; if (!util.isBuffer(chunk) && !util.isString(chunk) && !util.isNullOrUndefined(chunk) && !state.objectMode) { var er = new TypeError('Invalid non-string/buffer chunk'); stream.emit('error', er); process.nextTick(function() { cb(er); }); valid = false; } return valid; } Writable.prototype.write = function(chunk, encoding, cb) { var state = this._writableState; var ret = false; if (util.isFunction(encoding)) { cb = encoding; encoding = null; } if (util.isBuffer(chunk)) encoding = 'buffer'; else if (!encoding) encoding = state.defaultEncoding; if (!util.isFunction(cb)) cb = function() {}; if (state.ended) writeAfterEnd(this, state, cb); else if (validChunk(this, state, chunk, cb)) { state.pendingcb++; ret = writeOrBuffer(this, state, chunk, encoding, cb); } return ret; }; Writable.prototype.cork = function() { var state = this._writableState; state.corked++; }; Writable.prototype.uncork = function() { var state = this._writableState; if (state.corked) { state.corked--; if (!state.writing && !state.corked && !state.finished && !state.bufferProcessing && state.buffer.length) clearBuffer(this, state); } }; function decodeChunk(state, chunk, encoding) { if (!state.objectMode && state.decodeStrings !== false && util.isString(chunk)) { chunk = new Buffer(chunk, encoding); } return chunk; } // if we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. function writeOrBuffer(stream, state, chunk, encoding, cb) { chunk = decodeChunk(state, chunk, encoding); if (util.isBuffer(chunk)) encoding = 'buffer'; var len = state.objectMode ? 1 : chunk.length; state.length += len; var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. if (!ret) state.needDrain = true; if (state.writing || state.corked) state.buffer.push(new WriteReq(chunk, encoding, cb)); else doWrite(stream, state, false, len, chunk, encoding, cb); return ret; } function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writelen = len; state.writecb = cb; state.writing = true; state.sync = true; if (writev) stream._writev(chunk, state.onwrite); else stream._write(chunk, encoding, state.onwrite); state.sync = false; } function onwriteError(stream, state, sync, er, cb) { if (sync) process.nextTick(function() { state.pendingcb--; cb(er); }); else { state.pendingcb--; cb(er); } stream._writableState.errorEmitted = true; stream.emit('error', er); } function onwriteStateUpdate(state) { state.writing = false; state.writecb = null; state.length -= state.writelen; state.writelen = 0; } function onwrite(stream, er) { var state = stream._writableState; var sync = state.sync; var cb = state.writecb; onwriteStateUpdate(state); if (er) onwriteError(stream, state, sync, er, cb); else { // Check if we're actually ready to finish, but don't emit yet var finished = needFinish(stream, state); if (!finished && !state.corked && !state.bufferProcessing && state.buffer.length) { clearBuffer(stream, state); } if (sync) { process.nextTick(function() { afterWrite(stream, state, finished, cb); }); } else { afterWrite(stream, state, finished, cb); } } } function afterWrite(stream, state, finished, cb) { if (!finished) onwriteDrain(stream, state); state.pendingcb--; cb(); finishMaybe(stream, state); } // Must force callback to be called on nextTick, so that we don't // emit 'drain' before the write() consumer gets the 'false' return // value, and has a chance to attach a 'drain' listener. function onwriteDrain(stream, state) { if (state.length === 0 && state.needDrain) { state.needDrain = false; stream.emit('drain'); } } // if there's something in the buffer waiting, then process it function clearBuffer(stream, state) { state.bufferProcessing = true; if (stream._writev && state.buffer.length > 1) { // Fast case, write everything using _writev() var cbs = []; for (var c = 0; c < state.buffer.length; c++) cbs.push(state.buffer[c].callback); // count the one we are adding, as well. // TODO(isaacs) clean this up state.pendingcb++; doWrite(stream, state, true, state.length, state.buffer, '', function(err) { for (var i = 0; i < cbs.length; i++) { state.pendingcb--; cbs[i](err); } }); // Clear buffer state.buffer = []; } else { // Slow case, write chunks one-by-one for (var c = 0; c < state.buffer.length; c++) { var entry = state.buffer[c]; var chunk = entry.chunk; var encoding = entry.encoding; var cb = entry.callback; var len = state.objectMode ? 1 : chunk.length; doWrite(stream, state, false, len, chunk, encoding, cb); // if we didn't call the onwrite immediately, then // it means that we need to wait until it does. // also, that means that the chunk and cb are currently // being processed, so move the buffer counter past them. if (state.writing) { c++; break; } } if (c < state.buffer.length) state.buffer = state.buffer.slice(c); else state.buffer.length = 0; } state.bufferProcessing = false; } Writable.prototype._write = function(chunk, encoding, cb) { cb(new Error('not implemented')); }; Writable.prototype._writev = null; Writable.prototype.end = function(chunk, encoding, cb) { var state = this._writableState; if (util.isFunction(chunk)) { cb = chunk; chunk = null; encoding = null; } else if (util.isFunction(encoding)) { cb = encoding; encoding = null; } if (!util.isNullOrUndefined(chunk)) this.write(chunk, encoding); // .end() fully uncorks if (state.corked) { state.corked = 1; this.uncork(); } // ignore unnecessary end() calls. if (!state.ending && !state.finished) endWritable(this, state, cb); }; function needFinish(stream, state) { return (state.ending && state.length === 0 && !state.finished && !state.writing); } function prefinish(stream, state) { if (!state.prefinished) { state.prefinished = true; stream.emit('prefinish'); } } function finishMaybe(stream, state) { var need = needFinish(stream, state); if (need) { if (state.pendingcb === 0) { prefinish(stream, state); state.finished = true; stream.emit('finish'); } else prefinish(stream, state); } return need; } function endWritable(stream, state, cb) { state.ending = true; finishMaybe(stream, state); if (cb) { if (state.finished) process.nextTick(cb); else stream.once('finish', cb); } state.ended = true; } }).call(this,require('_process')) },{"./_stream_duplex":27,"_process":25,"buffer":18,"core-util-is":32,"inherits":23,"stream":37}],32:[function(require,module,exports){ (function (Buffer){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // NOTE: These type checking functions intentionally don't use `instanceof` // because it is fragile and can be easily faked with `Object.create()`. function isArray(ar) { return Array.isArray(ar); } exports.isArray = isArray; function isBoolean(arg) { return typeof arg === 'boolean'; } exports.isBoolean = isBoolean; function isNull(arg) { return arg === null; } exports.isNull = isNull; function isNullOrUndefined(arg) { return arg == null; } exports.isNullOrUndefined = isNullOrUndefined; function isNumber(arg) { return typeof arg === 'number'; } exports.isNumber = isNumber; function isString(arg) { return typeof arg === 'string'; } exports.isString = isString; function isSymbol(arg) { return typeof arg === 'symbol'; } exports.isSymbol = isSymbol; function isUndefined(arg) { return arg === void 0; } exports.isUndefined = isUndefined; function isRegExp(re) { return isObject(re) && objectToString(re) === '[object RegExp]'; } exports.isRegExp = isRegExp; function isObject(arg) { return typeof arg === 'object' && arg !== null; } exports.isObject = isObject; function isDate(d) { return isObject(d) && objectToString(d) === '[object Date]'; } exports.isDate = isDate; function isError(e) { return isObject(e) && (objectToString(e) === '[object Error]' || e instanceof Error); } exports.isError = isError; function isFunction(arg) { return typeof arg === 'function'; } exports.isFunction = isFunction; function isPrimitive(arg) { return arg === null || typeof arg === 'boolean' || typeof arg === 'number' || typeof arg === 'string' || typeof arg === 'symbol' || // ES6 symbol typeof arg === 'undefined'; } exports.isPrimitive = isPrimitive; function isBuffer(arg) { return Buffer.isBuffer(arg); } exports.isBuffer = isBuffer; function objectToString(o) { return Object.prototype.toString.call(o); } }).call(this,require("buffer").Buffer) },{"buffer":18}],33:[function(require,module,exports){ module.exports = require("./lib/_stream_passthrough.js") },{"./lib/_stream_passthrough.js":28}],34:[function(require,module,exports){ exports = module.exports = require('./lib/_stream_readable.js'); exports.Stream = require('stream'); exports.Readable = exports; exports.Writable = require('./lib/_stream_writable.js'); exports.Duplex = require('./lib/_stream_duplex.js'); exports.Transform = require('./lib/_stream_transform.js'); exports.PassThrough = require('./lib/_stream_passthrough.js'); },{"./lib/_stream_duplex.js":27,"./lib/_stream_passthrough.js":28,"./lib/_stream_readable.js":29,"./lib/_stream_transform.js":30,"./lib/_stream_writable.js":31,"stream":37}],35:[function(require,module,exports){ module.exports = require("./lib/_stream_transform.js") },{"./lib/_stream_transform.js":30}],36:[function(require,module,exports){ module.exports = require("./lib/_stream_writable.js") },{"./lib/_stream_writable.js":31}],37:[function(require,module,exports){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. module.exports = Stream; var EE = require('events').EventEmitter; var inherits = require('inherits'); inherits(Stream, EE); Stream.Readable = require('readable-stream/readable.js'); Stream.Writable = require('readable-stream/writable.js'); Stream.Duplex = require('readable-stream/duplex.js'); Stream.Transform = require('readable-stream/transform.js'); Stream.PassThrough = require('readable-stream/passthrough.js'); // Backwards-compat with node 0.4.x Stream.Stream = Stream; // old-style streams. Note that the pipe method (the only relevant // part of this class) is overridden in the Readable class. function Stream() { EE.call(this); } Stream.prototype.pipe = function(dest, options) { var source = this; function ondata(chunk) { if (dest.writable) { if (false === dest.write(chunk) && source.pause) { source.pause(); } } } source.on('data', ondata); function ondrain() { if (source.readable && source.resume) { source.resume(); } } dest.on('drain', ondrain); // If the 'end' option is not supplied, dest.end() will be called when // source gets the 'end' or 'close' events. Only dest.end() once. if (!dest._isStdio && (!options || options.end !== false)) { source.on('end', onend); source.on('close', onclose); } var didOnEnd = false; function onend() { if (didOnEnd) return; didOnEnd = true; dest.end(); } function onclose() { if (didOnEnd) return; didOnEnd = true; if (typeof dest.destroy === 'function') dest.destroy(); } // don't leave dangling pipes when there are errors. function onerror(er) { cleanup(); if (EE.listenerCount(this, 'error') === 0) { throw er; // Unhandled stream error in pipe. } } source.on('error', onerror); dest.on('error', onerror); // remove all the event listeners that were added. function cleanup() { source.removeListener('data', ondata); dest.removeListener('drain', ondrain); source.removeListener('end', onend); source.removeListener('close', onclose); source.removeListener('error', onerror); dest.removeListener('error', onerror); source.removeListener('end', cleanup); source.removeListener('close', cleanup); dest.removeListener('close', cleanup); } source.on('end', cleanup); source.on('close', cleanup); dest.on('close', cleanup); dest.emit('pipe', source); // Allow for unix-like usage: A.pipe(B).pipe(C) return dest; }; },{"events":22,"inherits":23,"readable-stream/duplex.js":26,"readable-stream/passthrough.js":33,"readable-stream/readable.js":34,"readable-stream/transform.js":35,"readable-stream/writable.js":36}],38:[function(require,module,exports){ // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. var Buffer = require('buffer').Buffer; var isBufferEncoding = Buffer.isEncoding || function(encoding) { switch (encoding && encoding.toLowerCase()) { case 'hex': case 'utf8': case 'utf-8': case 'ascii': case 'binary': case 'base64': case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': case 'raw': return true; default: return false; } } function assertEncoding(encoding) { if (encoding && !isBufferEncoding(encoding)) { throw new Error('Unknown encoding: ' + encoding); } } // StringDecoder provides an interface for efficiently splitting a series of // buffers into a series of JS strings without breaking apart multi-byte // characters. CESU-8 is handled as part of the UTF-8 encoding. // // @TODO Handling all encodings inside a single object makes it very difficult // to reason about this code, so it should be split up in the future. // @TODO There should be a utf8-strict encoding that rejects invalid UTF-8 code // points as used by CESU-8. var StringDecoder = exports.StringDecoder = function(encoding) { this.encoding = (encoding || 'utf8').toLowerCase().replace(/[-_]/, ''); assertEncoding(encoding); switch (this.encoding) { case 'utf8': // CESU-8 represents each of Surrogate Pair by 3-bytes this.surrogateSize = 3; break; case 'ucs2': case 'utf16le': // UTF-16 represents each of Surrogate Pair by 2-bytes this.surrogateSize = 2; this.detectIncompleteChar = utf16DetectIncompleteChar; break; case 'base64': // Base-64 stores 3 bytes in 4 chars, and pads the remainder. this.surrogateSize = 3; this.detectIncompleteChar = base64DetectIncompleteChar; break; default: this.write = passThroughWrite; return; } // Enough space to store all bytes of a single character. UTF-8 needs 4 // bytes, but CESU-8 may require up to 6 (3 bytes per surrogate). this.charBuffer = new Buffer(6); // Number of bytes received for the current incomplete multi-byte character. this.charReceived = 0; // Number of bytes expected for the current incomplete multi-byte character. this.charLength = 0; }; // write decodes the given buffer and returns it as JS string that is // guaranteed to not contain any partial multi-byte characters. Any partial // character found at the end of the buffer is buffered up, and will be // returned when calling write again with the remaining bytes. // // Note: Converting a Buffer containing an orphan surrogate to a String // currently works, but converting a String to a Buffer (via `new Buffer`, or // Buffer#write) will replace incomplete surrogates with the unicode // replacement character. See https://codereview.chromium.org/121173009/ . StringDecoder.prototype.write = function(buffer) { var charStr = ''; // if our last write ended with an incomplete multibyte character while (this.charLength) { // determine how many remaining bytes this buffer has to offer for this char var available = (buffer.length >= this.charLength - this.charReceived) ? this.charLength - this.charReceived : buffer.length; // add the new bytes to the char buffer buffer.copy(this.charBuffer, this.charReceived, 0, available); this.charReceived += available; if (this.charReceived < this.charLength) { // still not enough chars in this buffer? wait for more ... return ''; } // remove bytes belonging to the current character from the buffer buffer = buffer.slice(available, buffer.length); // get the character that was split charStr = this.charBuffer.slice(0, this.charLength).toString(this.encoding); // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character var charCode = charStr.charCodeAt(charStr.length - 1); if (charCode >= 0xD800 && charCode <= 0xDBFF) { this.charLength += this.surrogateSize; charStr = ''; continue; } this.charReceived = this.charLength = 0; // if there are no more bytes in this buffer, just emit our char if (buffer.length === 0) { return charStr; } break; } // determine and set charLength / charReceived this.detectIncompleteChar(buffer); var end = buffer.length; if (this.charLength) { // buffer the incomplete character bytes we got buffer.copy(this.charBuffer, 0, buffer.length - this.charReceived, end); end -= this.charReceived; } charStr += buffer.toString(this.encoding, 0, end); var end = charStr.length - 1; var charCode = charStr.charCodeAt(end); // CESU-8: lead surrogate (D800-DBFF) is also the incomplete character if (charCode >= 0xD800 && charCode <= 0xDBFF) { var size = this.surrogateSize; this.charLength += size; this.charReceived += size; this.charBuffer.copy(this.charBuffer, size, 0, size); buffer.copy(this.charBuffer, 0, 0, size); return charStr.substring(0, end); } // or just emit the charStr return charStr; }; // detectIncompleteChar determines if there is an incomplete UTF-8 character at // the end of the given buffer. If so, it sets this.charLength to the byte // length that character, and sets this.charReceived to the number of bytes // that are available for this character. StringDecoder.prototype.detectIncompleteChar = function(buffer) { // determine how many bytes we have to check at the end of this buffer var i = (buffer.length >= 3) ? 3 : buffer.length; // Figure out if one of the last i bytes of our buffer announces an // incomplete char. for (; i > 0; i--) { var c = buffer[buffer.length - i]; // See http://en.wikipedia.org/wiki/UTF-8#Description // 110XXXXX if (i == 1 && c >> 5 == 0x06) { this.charLength = 2; break; } // 1110XXXX if (i <= 2 && c >> 4 == 0x0E) { this.charLength = 3; break; } // 11110XXX if (i <= 3 && c >> 3 == 0x1E) { this.charLength = 4; break; } } this.charReceived = i; }; StringDecoder.prototype.end = function(buffer) { var res = ''; if (buffer && buffer.length) res = this.write(buffer); if (this.charReceived) { var cr = this.charReceived; var buf = this.charBuffer; var enc = this.encoding; res += buf.slice(0, cr).toString(enc); } return res; }; function passThroughWrite(buffer) { return buffer.toString(this.encoding); } function utf16DetectIncompleteChar(buffer) { this.charReceived = buffer.length % 2; this.charLength = this.charReceived ? 2 : 0; } function base64DetectIncompleteChar(buffer) { this.charReceived = buffer.length % 3; this.charLength = this.charReceived ? 3 : 0; } },{"buffer":18}]},{},[1]);<|fim▁end|>
<|file_name|>test_different_outputs.py<|end_file_name|><|fim▁begin|>import unittest<|fim▁hole|> class TestDifferentOutputs(unittest.TestCase): """Tests the various output methods: KML style, WKT, lat-lon and lon-lat.""" def setUp(self): self.latitude = 32.074322 self.longitude = 34.792081 self.radius_meters = 100 self.number_of_vertices = 36 self.polycircle = \ polycircles.Polycircle(latitude=self.latitude, longitude=self.longitude, radius=self.radius_meters, number_of_vertices=self.number_of_vertices) def test_lat_lon_output(self): """Asserts that the vertices in the lat-lon output are in the right order (lat before long).""" for vertex in self.polycircle.to_lat_lon(): assert_almost_equal(vertex[0], self.latitude, places=2) assert_almost_equal(vertex[1], self.longitude, places=2) def test_lon_lat_output(self): """Asserts that the vertices in the lat-lon output are in the right order (lat before long).""" for vertex in self.polycircle.to_lon_lat(): assert_almost_equal(vertex[0], self.longitude, places=2) assert_almost_equal(vertex[1], self.latitude, places=2) def test_vertices_equals_lat_lon(self): """Asserts that the "vertices" property is identical to the return value of to_lat_lon().""" assert_equal(self.polycircle.vertices, self.polycircle.to_lat_lon()) def test_kml_equals_lon_lat(self): """Asserts that the return value of to_kml() property is identical to the return value of to_lon_lat().""" assert_equal(self.polycircle.to_kml(), self.polycircle.to_lon_lat()) if __name__ == '__main__': unittest.main()<|fim▁end|>
from polycircles import polycircles from nose.tools import assert_equal, assert_almost_equal
<|file_name|>kcm_view_models.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2010 Andriy Rysin ([email protected]) * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. */ #include "kcm_view_models.h" #include <klocalizedstring.h> #include <kkeysequencewidget.h> #include <QtGui/QTreeView> #include <QtGui/QComboBox> #include <QtGui/QLineEdit> #include <QtGui/QPainter> #include <QtGui/QKeySequence> #ifdef DRAG_ENABLED #include <QtCore/QMimeData> #endif #include "keyboard_config.h" #include "xkb_rules.h" #include "flags.h" #include "x11_helper.h" #include "bindings.h" const int LayoutsTableModel::MAP_COLUMN = 0; const int LayoutsTableModel::LAYOUT_COLUMN = 1; const int LayoutsTableModel::VARIANT_COLUMN = 2; const int LayoutsTableModel::DISPLAY_NAME_COLUMN = 3; const int LayoutsTableModel::SHORTCUT_COLUMN = 4; static const int COLUMN_COUNT = 5; LayoutsTableModel::LayoutsTableModel(Rules* rules_, Flags *flags_, KeyboardConfig* keyboardConfig_, QObject* parent): QAbstractTableModel(parent), keyboardConfig(keyboardConfig_), rules(rules_), countryFlags(flags_) { } void LayoutsTableModel::refresh() { beginResetModel(); endResetModel(); } int LayoutsTableModel::rowCount(const QModelIndex &/*parent*/) const { return keyboardConfig->layouts.count(); } int LayoutsTableModel::columnCount(const QModelIndex&) const { return COLUMN_COUNT; } Qt::ItemFlags LayoutsTableModel::flags(const QModelIndex &index) const { if (!index.isValid()) return 0; Qt::ItemFlags flags = QAbstractTableModel::flags(index); if( index.column() == DISPLAY_NAME_COLUMN || index.column() == VARIANT_COLUMN || index.column() == SHORTCUT_COLUMN ) { flags |= Qt::ItemIsEditable; } #ifdef DRAG_ENABLED flags |= Qt::ItemIsDragEnabled | Qt::ItemIsDropEnabled; #endif return flags; } #ifdef DRAG_ENABLED QStringList LayoutsTableModel::mimeTypes() const { QStringList types; types << "application/keyboard-layout-item"; return types; } QMimeData *LayoutsTableModel::mimeData(const QModelIndexList &indexes) const { QMimeData *mimeData = new QMimeData(); QByteArray encodedData; QDataStream stream(&encodedData, QIODevice::WriteOnly); QSet<int> rows; foreach (const QModelIndex& index, indexes) { if (index.isValid()) { rows << index.row(); } } foreach (int row, rows) { stream << row; } mimeData->setData("application/keyboard-layout-item", encodedData); return mimeData; } #endif QVariant LayoutsTableModel::data(const QModelIndex &index, int role) const { if (!index.isValid()) return QVariant(); if (index.row() >= keyboardConfig->layouts.size()) return QVariant(); const LayoutUnit& layoutUnit = keyboardConfig->layouts.at(index.row()); if (role == Qt::DecorationRole) { switch( index.column() ) { case DISPLAY_NAME_COLUMN: { // if( keyboardConfig->isFlagShown() ) { QIcon icon = countryFlags->getIconWithText(layoutUnit, *keyboardConfig); return icon.isNull() ? countryFlags->getTransparentPixmap() : icon; // } } //TODO: show the cells are editable // case VARIANT_COLUMN: { // case DISPLAY_NAME_COLUMN: { // int sz = 5; // QPixmap pm = QPixmap(sz, sz+5); // pm.fill(Qt::transparent); // QPainter p(&pm); // QPoint points[] = { QPoint(0, 0), QPoint(0, sz), QPoint(sz, 0) }; // p.drawPolygon(points, 3); // return pm; // } break; } } else if( role == Qt::BackgroundRole ) { if( keyboardConfig->layoutLoopCount != KeyboardConfig::NO_LOOPING && index.row() >= keyboardConfig->layoutLoopCount ) { return QBrush(Qt::lightGray); } } else if (role == Qt::DisplayRole) { switch( index.column() ) { case MAP_COLUMN: return layoutUnit.layout; break; case LAYOUT_COLUMN: { const LayoutInfo* layoutInfo = rules->getLayoutInfo(layoutUnit.layout); return layoutInfo != NULL ? layoutInfo->description : layoutUnit.layout; } case VARIANT_COLUMN: { if( layoutUnit.variant.isEmpty() ) return QVariant(); const LayoutInfo* layoutInfo = rules->getLayoutInfo(layoutUnit.layout); if( layoutInfo == NULL ) return QVariant(); const VariantInfo* variantInfo = layoutInfo->getVariantInfo(layoutUnit.variant); return variantInfo != NULL ? variantInfo->description : layoutUnit.variant; } break; case DISPLAY_NAME_COLUMN: // if( keyboardConfig->indicatorType == KeyboardConfig::SHOW_LABEL ) { // return layoutUnit.getDisplayName(); // } break; case SHORTCUT_COLUMN: { return layoutUnit.getShortcut().toString(); } break; } } else if (role==Qt::EditRole ) { switch( index.column() ) { case DISPLAY_NAME_COLUMN: return layoutUnit.getDisplayName(); break; case VARIANT_COLUMN: return layoutUnit.variant; break; case SHORTCUT_COLUMN: return layoutUnit.getShortcut().toString(); break; default:; } } else if( role == Qt::TextAlignmentRole ) { switch( index.column() ) { case MAP_COLUMN: case DISPLAY_NAME_COLUMN: case SHORTCUT_COLUMN: return Qt::AlignCenter; break; default:; } } return QVariant(); } QVariant LayoutsTableModel::headerData(int section, Qt::Orientation orientation, int role) const { if (role != Qt::DisplayRole) return QVariant(); if (orientation == Qt::Horizontal) { const QString headers[] = {i18nc("layout map name", "Map"), i18n("Layout"), i18n("Variant"), i18n("Label"), i18n("Shortcut")}; return headers[section]; } return QVariant(); } bool LayoutsTableModel::setData(const QModelIndex &index, const QVariant &value, int role) { if (role != Qt::EditRole || (index.column() != DISPLAY_NAME_COLUMN && index.column() != VARIANT_COLUMN && index.column() != SHORTCUT_COLUMN) ) return false; if (index.row() >= keyboardConfig->layouts.size()) return false; LayoutUnit& layoutUnit = keyboardConfig->layouts[index.row()]; switch( index.column() ) { case DISPLAY_NAME_COLUMN: { QString displayText = value.toString().left(3); layoutUnit.setDisplayName(displayText); countryFlags->clearCache(); // regenerate the label } break; case VARIANT_COLUMN: { QString variant = value.toString(); layoutUnit.variant = variant; } break; case SHORTCUT_COLUMN: { QString shortcut = value.toString(); layoutUnit.setShortcut(QKeySequence(shortcut)); } break; } emit dataChanged(index, index); return true; } // // LabelEditDelegate // LabelEditDelegate::LabelEditDelegate(const KeyboardConfig* keyboardConfig_, QObject *parent): QStyledItemDelegate(parent), keyboardConfig(keyboardConfig_) {} QWidget *LabelEditDelegate::createEditor(QWidget *parent, const QStyleOptionViewItem & option , const QModelIndex & index ) const { if( keyboardConfig->indicatorType == KeyboardConfig::SHOW_FLAG ) return NULL; QWidget* widget = QStyledItemDelegate::createEditor(parent, option, index); QLineEdit* lineEdit = static_cast<QLineEdit*>(widget); if( lineEdit != NULL ) { lineEdit->setMaxLength(LayoutUnit::MAX_LABEL_LENGTH); } return widget; } //void LabelEditDelegate::paint( QPainter * painter, const QStyleOptionViewItem & option, const QModelIndex & index ) const //{ // QStyleOptionViewItem option2(option); //// option2.decorationPosition = QStyleOptionViewItem::Right; // option2.decorationAlignment = Qt::AlignHCenter | Qt::AlignVCenter; // QStyledItemDelegate::paint(painter, option2, index); //} // // VariantComboDelegate // //TODO: reuse this function in kcm_add_layout_dialog.cpp static void populateComboWithVariants(QComboBox* combo, const QString& layout, const Rules* rules) { combo->clear(); const LayoutInfo* layoutInfo = rules->getLayoutInfo(layout); foreach(const VariantInfo* variantInfo, layoutInfo->variantInfos) { combo->addItem(variantInfo->description, variantInfo->name); } combo->model()->sort(0); combo->insertItem(0, i18nc("variant", "Default"), ""); combo->setCurrentIndex(0); } VariantComboDelegate::VariantComboDelegate(const KeyboardConfig* keyboardConfig_, const Rules* rules_, QObject *parent): QStyledItemDelegate(parent), keyboardConfig(keyboardConfig_), rules(rules_) {} QWidget *VariantComboDelegate::createEditor(QWidget *parent, const QStyleOptionViewItem &/* option */, const QModelIndex & index ) const { QComboBox *editor = new QComboBox(parent); const LayoutUnit& layoutUnit = keyboardConfig->layouts[index.row()]; populateComboWithVariants(editor, layoutUnit.layout, rules); return editor; } void VariantComboDelegate::setEditorData(QWidget *editor, const QModelIndex &index) const { QComboBox *combo = static_cast<QComboBox*>(editor); QString variant = index.model()->data(index, Qt::EditRole).toString(); int itemIndex = combo->findData(variant); if( itemIndex == -1 ) { itemIndex = 0; } combo->setCurrentIndex(itemIndex); } void VariantComboDelegate::setModelData(QWidget *editor, QAbstractItemModel *model, const QModelIndex &index) const { QComboBox *combo = static_cast<QComboBox*>(editor); QString variant = combo->itemData(combo->currentIndex()).toString(); model->setData(index, variant, Qt::EditRole);<|fim▁hole|> const QStyleOptionViewItem &option, const QModelIndex &/* index */) const { editor->setGeometry(option.rect); } // // KKeySequenceWidgetDelegate // KKeySequenceWidgetDelegate::KKeySequenceWidgetDelegate(const KeyboardConfig* keyboardConfig_, QObject *parent): QStyledItemDelegate(parent), keyboardConfig(keyboardConfig_) {} QWidget *KKeySequenceWidgetDelegate::createEditor(QWidget *parent, const QStyleOptionViewItem & /*option*/, const QModelIndex & index ) const { itemsBeingEdited.insert(index); KKeySequenceWidget *editor = new KKeySequenceWidget(parent); editor->setFocusPolicy(Qt::StrongFocus); editor->setModifierlessAllowed(false); const LayoutUnit& layoutUnit = keyboardConfig->layouts[index.row()]; editor->setKeySequence(layoutUnit.getShortcut()); editor->captureKeySequence(); return editor; } //void KKeySequenceWidgetDelegate::setEditorData(QWidget *editor, const QModelIndex &index) const //{ // KKeySequenceWidget *kkeysequencewidget = static_cast<KKeySequenceWidget*>(editor); // QString shortcut = index.model()->data(index, Qt::EditRole).toString(); // kkeysequencewidget->setKeySequence(QKeySequence(shortcut)); // kkeysequencewidget->captureKeySequence(); //// kDebug() << "set editor data"; //} void KKeySequenceWidgetDelegate::setModelData(QWidget *editor, QAbstractItemModel *model, const QModelIndex &index) const { KKeySequenceWidget *kkeysequencewidget = static_cast<KKeySequenceWidget*>(editor); QString shortcut = kkeysequencewidget->keySequence().toString(); model->setData(index, shortcut, Qt::EditRole); itemsBeingEdited.remove(index); } void KKeySequenceWidgetDelegate::paint(QPainter* painter, const QStyleOptionViewItem& option, const QModelIndex& index) const { if (itemsBeingEdited.contains(index)) { // StyledBackgroundPainter::drawBackground(painter,option,index); } else { QStyledItemDelegate::paint(painter,option,index); } } // // Xkb Options Tree View // int XkbOptionsTreeModel::rowCount(const QModelIndex& parent) const { if( ! parent.isValid() ) return rules->optionGroupInfos.count(); if( ! parent.parent().isValid() ) return rules->optionGroupInfos[parent.row()]->optionInfos.count(); return 0; } QVariant XkbOptionsTreeModel::data(const QModelIndex& index, int role) const { if (!index.isValid()) return QVariant(); int row = index.row(); if (role == Qt::DisplayRole) { if( ! index.parent().isValid() ) { return rules->optionGroupInfos[row]->description; } else { int groupRow = index.parent().row(); const OptionGroupInfo* xkbGroup = rules->optionGroupInfos[groupRow]; return xkbGroup->optionInfos[row]->description; } } else if (role==Qt::CheckStateRole ) { if( index.parent().isValid() ) { int groupRow = index.parent().row(); const OptionGroupInfo* xkbGroup = rules->optionGroupInfos[groupRow]; const QString& xkbOptionName = xkbGroup->optionInfos[row]->name; return keyboardConfig->xkbOptions.indexOf(xkbOptionName) == -1 ? Qt::Unchecked : Qt::Checked; } else { int groupRow = index.row(); const OptionGroupInfo* xkbGroup = rules->optionGroupInfos[groupRow]; foreach(const OptionInfo* optionInfo, xkbGroup->optionInfos) { if( keyboardConfig->xkbOptions.indexOf(optionInfo->name) != -1 ) return Qt::PartiallyChecked; } return Qt::Unchecked; } } return QVariant(); } bool XkbOptionsTreeModel::setData(const QModelIndex & index, const QVariant & value, int role) { int groupRow = index.parent().row(); if( groupRow < 0 ) return false; const OptionGroupInfo* xkbGroup = rules->optionGroupInfos[groupRow]; const OptionInfo* option = xkbGroup->optionInfos[index.row()]; if( value.toInt() == Qt::Checked ) { if( xkbGroup->exclusive ) { // clear if exclusive (TODO: radiobutton) int idx = keyboardConfig->xkbOptions.indexOf(QRegExp(xkbGroup->name + ".*")); if( idx >= 0 ) { for(int i=0; i<xkbGroup->optionInfos.count(); i++) if( xkbGroup->optionInfos[i]->name == keyboardConfig->xkbOptions[idx] ) { setData(createIndex(i, index.column(), (quint32)index.internalId()-index.row()+i), Qt::Unchecked, role); break; } // m_kxkbConfig->m_options.removeAt(idx); // idx = m_kxkbConfig->m_options.indexOf(QRegExp(xkbGroupNm+".*")); } } if( keyboardConfig->xkbOptions.indexOf(option->name) < 0 ) { keyboardConfig->xkbOptions.append(option->name); } } else { keyboardConfig->xkbOptions.removeAll(option->name); } emit dataChanged(index, index); emit dataChanged(index.parent(), index.parent()); return true; } void XkbOptionsTreeModel::gotoGroup(const QString& groupName, QTreeView* view) { const OptionGroupInfo* optionGroupInfo = rules->getOptionGroupInfo(groupName); int index = rules->optionGroupInfos.indexOf((OptionGroupInfo*)optionGroupInfo); if( index != -1 ) { QModelIndex modelIdx = createIndex(index,0); // view->selectionModel()->setCurrentIndex(createIndex(index,0), QItemSelectionModel::NoUpdate); view->setExpanded(modelIdx, true); view->scrollTo(modelIdx, QAbstractItemView::PositionAtTop); view->selectionModel()->setCurrentIndex(modelIdx, QItemSelectionModel::Current); view->setFocus(Qt::OtherFocusReason); } // else { // kDebug() << "can't scroll to group" << group; // } }<|fim▁end|>
} void VariantComboDelegate::updateEditorGeometry(QWidget *editor,
<|file_name|>xmlFactory.js<|end_file_name|><|fim▁begin|>/** * ag-grid - Advanced Data Grid / Data Table supporting Javascript / React / AngularJS / Web Components * @version v9.0.3 * @link http://www.ag-grid.com/ * @license MIT */ "use strict"; var __decorate = (this && this.__decorate) || function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; Object.defineProperty(exports, "__esModule", { value: true }); var context_1 = require("./context/context"); var LINE_SEPARATOR = '\r\n'; var XmlFactory = (function () { function XmlFactory() { } XmlFactory.prototype.createXml = function (xmlElement, booleanTransformer) { var _this = this; var props = ""; if (xmlElement.properties) { if (xmlElement.properties.prefixedAttributes) { xmlElement.properties.prefixedAttributes.forEach(function (prefixedSet) { Object.keys(prefixedSet.map).forEach(function (key) { props += _this.returnAttributeIfPopulated(prefixedSet.prefix + key, prefixedSet.map[key], booleanTransformer); }); }); } if (xmlElement.properties.rawMap) { Object.keys(xmlElement.properties.rawMap).forEach(function (key) { props += _this.returnAttributeIfPopulated(key, xmlElement.properties.rawMap[key], booleanTransformer); }); } } var result = "<" + xmlElement.name + props; if (!xmlElement.children && !xmlElement.textNode) { return result + "/>" + LINE_SEPARATOR; } if (xmlElement.textNode) { return result + ">" + xmlElement.textNode + "</" + xmlElement.name + ">" + LINE_SEPARATOR; } result += ">" + LINE_SEPARATOR; xmlElement.children.forEach(function (it) { result += _this.createXml(it, booleanTransformer); }); return result + "</" + xmlElement.name + ">" + LINE_SEPARATOR; }; XmlFactory.prototype.returnAttributeIfPopulated = function (key, value, booleanTransformer) { if (!value) { return ""; } var xmlValue = value; if ((typeof (value) === 'boolean')) { if (booleanTransformer) { xmlValue = booleanTransformer(value); } }<|fim▁hole|> return XmlFactory; }()); XmlFactory = __decorate([ context_1.Bean('xmlFactory') ], XmlFactory); exports.XmlFactory = XmlFactory;<|fim▁end|>
xmlValue = '"' + xmlValue + '"'; return " " + key + "=" + xmlValue; };
<|file_name|>emulate_binary.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. from __future__ import absolute_import from __future__ import print_function import sys import time from barf import BARF from barf.arch import ARCH_ARM from barf.arch import ARCH_X86 from barf.arch import ARCH_X86_MODE_32 from barf.arch import ARCH_X86_MODE_64 from barf.core.symbols import load_symbols from barf.utils.cconv import ArmSystemV from barf.utils.cconv import X86SystemV from barf.utils.cconv import X86_64SystemV from barf.utils.utils import read_c_string from barf.utils.utils import write_c_string def split_command_line(argv): if '--' in argv: prg_options = argv[1:argv.index('--')] prg_arguments = argv[argv.index('--')+1:] else: prg_options = argv[1:] prg_arguments = [] return prg_options, prg_arguments def atoi_hook(emulator, state): print("[+] atoi hooked!") # int atoi(const char *nptr); cc = state['cc'] # Read parameters. nptr = cc.parameters[0] # Emulate function behavior. value = int(read_c_string(emulator, nptr, max_length=1024)) # Save result. cc.return_value = value def printf_hook(emulator, state): print("[+] printf hooked!") # int printf(const char *format, ...); cc = state["cc"] # Read parameters. fmt_ptr = cc.parameters[0] # Emulate function behavior. fmt = read_c_string(emulator, fmt_ptr, max_length=1024) out = fmt print(out) # Save result. cc.return_value = len(out) def get_symbols(binary_path): symbols_by_addr = load_symbols(binary_path) symbols_by_name = {} for addr in symbols_by_addr: name, size, returns = symbols_by_addr[addr] symbols_by_name[name] = (addr, size, returns) return symbols_by_addr, symbols_by_name def setup_argv(emulator, argv, base_addr): addr_size = emulator.arch_info.address_size // 8 argv_entry_addr = {} # Copy arguments into the stack but first leave space for the argv # array (null-terminated). addr = base_addr + (len(argv) + 1) * addr_size for index, arg in enumerate(argv): argv_entry_addr[index] = addr write_c_string(emulator, addr, arg) addr += len(arg) + 1 # each argument is null-terminated # Build argv array. for index in range(len(argv)):<|fim▁hole|> # Add null terminator. emulator.write_memory(base_addr + len(argv) * addr_size, addr_size, 0x0) def setup_emulator(emulator, binary, args): # Instantiate calling convention. if binary.architecture == ARCH_X86: if binary.architecture_mode == ARCH_X86_MODE_32: cc = X86SystemV(emulator) else: cc = X86_64SystemV(emulator) elif binary.architecture == ARCH_ARM: cc = ArmSystemV(emulator) arch = emulator.arch_info sp = 0x1500 base_argv = 0x2500 emulator.registers[arch.stack_pointer_register()] = sp setup_argv(emulator, args, base_argv) # Setup main's parameters: argc, argv and envp. cc.parameters[0] = len(args) # argc cc.parameters[1] = base_argv # argv cc.parameters[2] = 0x0 # envp # Load symbols. print("[+] Loading symbols...") symbols_by_addr, symbols_by_name = get_symbols(binary.filename) start = symbols_by_name["main"][0] size = symbols_by_name["main"][1] # TODO Remove hardcoded addresses. if binary.architecture == ARCH_X86: end = start + size - 1 if binary.architecture_mode == ARCH_X86_MODE_32: atoi_addr = 0x8048380 printf_addr = 0x8048350 if binary.architecture_mode == ARCH_X86_MODE_64: atoi_addr = 0x4004d0 printf_addr = 0x4004a0 if binary.architecture == ARCH_ARM: end = start + size - 8 - 8 if start & 0x1 == 0x1: # ARCH_ARM_MODE_THUMB atoi_addr = 0x10394 printf_addr = 0x1035c if start & 0x1 == 0x0: # ARCH_ARM_MODE_ARM atoi_addr = 0x10388 printf_addr = 0x10358 state = { 'cc': cc, } ctx_init = { 'registers': { arch.flags_register(): arch.flags_default_value(), arch.stack_pointer_register(): sp, } } hooks = { atoi_addr: (atoi_hook, state, True, 0), printf_addr: (printf_hook, state, True, 0), } return ctx_init, start, end, hooks def main(): start_time = time.time() # Split program arguments. # ======================================================================== # prg_options, prg_arguments = split_command_line(sys.argv) binary_path = prg_arguments[0] # Loading binary. # ======================================================================== # print("[+] Loading binary...") barf = BARF(binary_path) if barf.binary.architecture not in [ARCH_X86, ARCH_ARM]: print("[-] Architecture not supported!") sys.exit(1) # Setup emulator. # ======================================================================== # ctx_init, start, end, hooks = setup_emulator(barf.emulator, barf.binary, prg_arguments) # Emulate. # ======================================================================== # barf.emulate(context=ctx_init, start=start, end=end, hooks=hooks, print_asm=False) end_time = time.time() total_time = end_time - start_time print("[+] Total processing time: {0:8.3f}s".format(total_time)) if __name__ == '__main__': if len(sys.argv) != 4: print("Usage: {} -- samples/bin/loop-simple1.[x86|x86_64|arm|arm_thumb] <iters>".format(sys.argv[0])) sys.exit(1) main()<|fim▁end|>
addr = argv_entry_addr[index] emulator.write_memory(base_addr + index * addr_size, addr_size, addr)
<|file_name|>fontconfig.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Servo Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. #![allow(non_upper_case_globals)] #![allow(non_camel_case_types)] use libc::*; pub type FcChar8 = c_uchar; pub type FcChar16 = c_ushort; pub type FcChar32 = c_uint; pub type FcBool = c_int; pub type enum__FcType = c_uint; pub const FcTypeVoid: u32 = 0_u32; pub const FcTypeInteger: u32 = 1_u32; pub const FcTypeDouble: u32 = 2_u32; pub const FcTypeString: u32 = 3_u32; pub const FcTypeBool: u32 = 4_u32; pub const FcTypeMatrix: u32 = 5_u32; pub const FcTypeCharSet: u32 = 6_u32; pub const FcTypeFTFace: u32 = 7_u32; pub const FcTypeLangSet: u32 = 8_u32; pub type FcType = enum__FcType; pub const FC_WEIGHT_THIN: c_int = 0; pub const FC_WEIGHT_EXTRALIGHT: c_int = 40; pub const FC_WEIGHT_ULTRALIGHT: c_int = FC_WEIGHT_EXTRALIGHT; pub const FC_WEIGHT_LIGHT: c_int = 50; pub const FC_WEIGHT_BOOK: c_int = 75; pub const FC_WEIGHT_REGULAR: c_int = 80; pub const FC_WEIGHT_NORMAL: c_int = FC_WEIGHT_REGULAR; pub const FC_WEIGHT_MEDIUM: c_int = 100; pub const FC_WEIGHT_DEMIBOLD: c_int = 180; pub const FC_WEIGHT_SEMIBOLD: c_int = FC_WEIGHT_DEMIBOLD; pub const FC_WEIGHT_BOLD: c_int = 200; pub const FC_WEIGHT_EXTRABOLD: c_int = 205; pub const FC_WEIGHT_ULTRABOLD: c_int = FC_WEIGHT_EXTRABOLD; pub const FC_WEIGHT_BLACK: c_int = 210; pub const FC_WEIGHT_HEAVY: c_int = FC_WEIGHT_BLACK; pub const FC_WEIGHT_EXTRABLACK: c_int = 215; pub const FC_WEIGHT_ULTRABLACK: c_int = FC_WEIGHT_EXTRABLACK; pub const FC_SLANT_ROMAN: c_int = 0; pub const FC_SLANT_ITALIC: c_int = 100; pub const FC_SLANT_OBLIQUE: c_int = 110; #[repr(C)] #[derive(Copy, Clone)] pub struct struct__FcMatrix { pub xx: c_double, pub xy: c_double, pub yx: c_double, pub yy: c_double, } pub type FcMatrix = struct__FcMatrix; pub type struct__FcCharSet = c_void; pub type FcCharSet = struct__FcCharSet; #[repr(C)] #[allow(missing_copy_implementations)] pub struct struct__FcObjectType { pub object: *mut c_char, pub _type: FcType, } pub type FcObjectType = struct__FcObjectType; #[repr(C)] #[allow(missing_copy_implementations)] pub struct struct__FcConstant { pub name: *mut FcChar8, pub object: *mut c_char, pub value: c_int, } pub type FcConstant = struct__FcConstant; pub type enum__FcResult = c_uint; pub const FcResultMatch: u32 = 0_u32; pub const FcResultNoMatch: u32 = 1_u32; pub const FcResultTypeMismatch: u32 = 2_u32; pub const FcResultNoId: u32 = 3_u32; pub const FcResultOutOfMemory: u32 = 4_u32; pub type FcResult = enum__FcResult; pub type struct__FcPattern = c_void; pub type FcPattern = struct__FcPattern; pub type struct__FcLangSet = c_void; pub type FcLangSet = struct__FcLangSet; #[repr(C)] #[allow(missing_copy_implementations)] pub struct struct__FcValue { pub _type: FcType, pub u: union_unnamed1, } pub type FcValue = struct__FcValue; #[repr(C)] #[allow(missing_copy_implementations)] pub struct struct__FcFontSet { pub nfont: c_int, pub sfont: c_int, pub fonts: *mut *mut FcPattern, } pub type FcFontSet = struct__FcFontSet; #[repr(C)] #[allow(missing_copy_implementations)] pub struct struct__FcObjectSet { pub nobject: c_int, pub sobject: c_int, pub objects: *mut *mut c_char, } pub type FcObjectSet = struct__FcObjectSet; pub type enum__FcMatchKind = c_uint; pub const FcMatchPattern: u32 = 0_u32; pub const FcMatchFont: u32 = 1_u32; pub const FcMatchScan: u32 = 2_u32; pub type FcMatchKind = enum__FcMatchKind; pub type enum__FcLangResult = c_uint; pub const FcLangEqual: u32 = 0_u32; pub const FcLangDifferentCountry: u32 = 1_u32; pub const FcLangDifferentTerritory: u32 = 1_u32; pub const FcLangDifferentLang: u32 = 2_u32; pub type FcLangResult = enum__FcLangResult; pub type enum__FcSetName = c_uint; pub const FcSetSystem: u32 = 0_u32; pub const FcSetApplication: u32 = 1_u32; pub type FcSetName = enum__FcSetName; pub type struct__FcAtomic = c_void; pub type FcAtomic = struct__FcAtomic; pub type FcEndian = c_uint; pub const FcEndianBig: u32 = 0_u32; pub const FcEndianLittle: u32 = 1_u32; pub type struct__FcConfig = c_void; pub type FcConfig = struct__FcConfig; pub type struct__FcGlobalCache = c_void; pub type FcFileCache = struct__FcGlobalCache; pub type struct__FcBlanks = c_void; <|fim▁hole|> pub type FcStrList = struct__FcStrList; pub type struct__FcStrSet = c_void; pub type FcStrSet = struct__FcStrSet; pub type struct__FcCache = c_void; pub type FcCache = struct__FcCache; pub type union_unnamed1 = c_void /* FIXME: union type */; extern { pub fn FcBlanksCreate() -> *mut FcBlanks; pub fn FcBlanksDestroy(b: *mut FcBlanks); pub fn FcBlanksAdd(b: *mut FcBlanks, ucs4: FcChar32) -> FcBool; pub fn FcBlanksIsMember(b: *mut FcBlanks, ucs4: FcChar32) -> FcBool; pub fn FcCacheDir(c: *mut FcCache) -> *const FcChar8; pub fn FcCacheCopySet(c: *const FcCache) -> *mut FcFontSet; pub fn FcCacheSubdir(c: *const FcCache, i: c_int) -> *const FcChar8; pub fn FcCacheNumSubdir(c: *const FcCache) -> c_int; pub fn FcCacheNumFont(c: *const FcCache) -> c_int; pub fn FcDirCacheUnlink(dir: *const FcChar8, config: *mut FcConfig) -> FcBool; pub fn FcDirCacheValid(cache_file: *const FcChar8) -> FcBool; pub fn FcConfigHome() -> *mut FcChar8; pub fn FcConfigEnableHome(enable: FcBool) -> FcBool; pub fn FcConfigFilename(url: *const FcChar8) -> *mut FcChar8; pub fn FcConfigCreate() -> *mut FcConfig; pub fn FcConfigReference(config: *mut FcConfig) -> *mut FcConfig; pub fn FcConfigDestroy(config: *mut FcConfig); pub fn FcConfigSetCurrent(config: *mut FcConfig) -> FcBool; pub fn FcConfigGetCurrent() -> *mut FcConfig; pub fn FcConfigUptoDate(config: *mut FcConfig) -> FcBool; pub fn FcConfigBuildFonts(config: *mut FcConfig) -> FcBool; pub fn FcConfigGetFontDirs(config: *mut FcConfig) -> *mut FcStrList; pub fn FcConfigGetConfigDirs(config: *mut FcConfig) -> *mut FcStrList; pub fn FcConfigGetConfigFiles(config: *mut FcConfig) -> *mut FcStrList; pub fn FcConfigGetCache(config: *mut FcConfig) -> *mut FcChar8; pub fn FcConfigGetBlanks(config: *mut FcConfig) -> *mut FcBlanks; pub fn FcConfigGetCacheDirs(config: *const FcConfig) -> *mut FcStrList; pub fn FcConfigGetRescanInterval(config: *mut FcConfig) -> c_int; pub fn FcConfigSetRescanInterval(config: *mut FcConfig, rescanInterval: c_int) -> FcBool; pub fn FcConfigGetFonts(config: *mut FcConfig, set: FcSetName) -> *mut FcFontSet; pub fn FcConfigAppFontAddFile(config: *mut FcConfig, file: *const FcChar8) -> FcBool; pub fn FcConfigAppFontAddDir(config: *mut FcConfig, dir: *const FcChar8) -> FcBool; pub fn FcConfigAppFontClear(config: *mut FcConfig); pub fn FcConfigSubstituteWithPat(config: *mut FcConfig, p: *mut FcPattern, p_pat: *mut FcPattern, kind: FcMatchKind) -> FcBool; pub fn FcConfigSubstitute(config: *mut FcConfig, p: *mut FcPattern, kind: FcMatchKind) -> FcBool; pub fn FcCharSetCreate() -> *mut FcCharSet; pub fn FcCharSetNew() -> *mut FcCharSet; pub fn FcCharSetDestroy(fcs: *mut FcCharSet); pub fn FcCharSetAddChar(fcs: *mut FcCharSet, ucs4: FcChar32) -> FcBool; pub fn FcCharSetCopy(src: *mut FcCharSet) -> *mut FcCharSet; pub fn FcCharSetEqual(a: *const FcCharSet, b: *const FcCharSet) -> FcBool; pub fn FcCharSetIntersect(a: *const FcCharSet, b: *const FcCharSet) -> *mut FcCharSet; pub fn FcCharSetUnion(a: *const FcCharSet, b: *const FcCharSet) -> *mut FcCharSet; pub fn FcCharSetSubtract(a: *const FcCharSet, b: *const FcCharSet) -> *mut FcCharSet; pub fn FcCharSetMerge(a: *mut FcCharSet, b: *const FcCharSet, changed: *mut FcBool) -> FcBool; pub fn FcCharSetHasChar(fcs: *const FcCharSet, ucs4: FcChar32) -> FcBool; pub fn FcCharSetCount(a: *const FcCharSet) -> FcChar32; pub fn FcCharSetIntersectCount(a: *const FcCharSet, b: *const FcCharSet) -> FcChar32; pub fn FcCharSetSubtractCount(a: *const FcCharSet, b: *const FcCharSet) -> FcChar32; pub fn FcCharSetIsSubset(a: *const FcCharSet, bi: *const FcCharSet) -> FcBool; pub fn FcCharSetFirstPage(a: *const FcCharSet, map: *mut FcChar32, next: *mut FcChar32) -> FcChar32; pub fn FcCharSetNextPage(a: *const FcCharSet, map: *mut FcChar32, next: *mut FcChar32) -> FcChar32; pub fn FcCharSetCoverage(a: *const FcCharSet, page: FcChar32, result: *mut FcChar32) -> FcChar32; pub fn FcValuePrint(v: FcValue); pub fn FcPatternPrint(p: *const FcPattern); pub fn FcFontSetPrint(s: *mut FcFontSet); pub fn FcDefaultSubstitute(pattern: *mut FcPattern); pub fn FcFileIsDir(file: *const FcChar8) -> FcBool; pub fn FcFileScan(set: *mut FcFontSet, dirs: *mut FcStrSet, cache: *mut FcFileCache, blanks: *mut FcBlanks, file: *const FcChar8, force: FcBool) -> FcBool; pub fn FcDirScan(set: *mut FcFontSet, dirs: *mut FcStrSet, cache: *mut FcFileCache, blanks: *mut FcBlanks, dir: *const FcChar8, force: FcBool) -> FcBool; pub fn FcDirSave(set: *mut FcFontSet, dirs: *const FcStrSet, dir: *mut FcChar8) -> FcBool; pub fn FcDirCacheLoad(dir: *const FcChar8, config: *mut FcConfig, cache_file: *mut *mut FcChar8) -> *mut FcCache; pub fn FcDirCacheRead(dir: *const FcChar8, force: FcBool, config: *mut FcConfig) -> *mut FcCache; //pub fn FcDirCacheLoadFile(cache_file: *mut FcChar8, file_stat: *mut struct_stat) -> *mut FcCache; pub fn FcDirCacheUnload(cache: *mut FcCache); pub fn FcFreeTypeQuery(file: *const FcChar8, id: c_int, blanks: *mut FcBlanks, count: *mut c_int) -> *mut FcPattern; pub fn FcFontSetCreate() -> *mut FcFontSet; pub fn FcFontSetDestroy(s: *mut FcFontSet); pub fn FcFontSetAdd(s: *mut FcFontSet, font: *mut FcPattern) -> FcBool; pub fn FcInitLoadConfig() -> *mut FcConfig; pub fn FcInitLoadConfigAndFonts() -> *mut FcConfig; pub fn FcInit() -> FcBool; pub fn FcFini(); pub fn FcGetVersion() -> c_int; pub fn FcInitReinitialize() -> FcBool; pub fn FcInitBringUptoDate() -> FcBool; pub fn FcGetLangs() -> *mut FcStrSet; pub fn FcLangGetCharSet(lang: *const FcChar8) -> *mut FcCharSet; pub fn FcLangSetCreate() -> *mut FcLangSet; pub fn FcLangSetDestroy(ls: *mut FcLangSet); pub fn FcLangSetCopy(ls: *const FcLangSet) -> *mut FcLangSet; pub fn FcLangSetAdd(ls: *mut FcLangSet, lang: *const FcChar8) -> FcBool; pub fn FcLangSetHasLang(ls: *const FcLangSet, lang: *const FcChar8) -> FcLangResult; pub fn FcLangSetCompare(lsa: *const FcLangSet, lsb: *const FcLangSet) -> FcLangResult; pub fn FcLangSetContains(lsa: *const FcLangSet, lsb: *const FcLangSet) -> FcBool; pub fn FcLangSetEqual(lsa: *const FcLangSet, lsb: *const FcLangSet) -> FcBool; pub fn FcLangSetHash(ls: *const FcLangSet) -> FcChar32; pub fn FcLangSetGetLangs(ls: *const FcLangSet) -> *mut FcStrSet; pub fn FcObjectSetCreate() -> *mut FcObjectSet; pub fn FcObjectSetAdd(os: *mut FcObjectSet, object: *const c_char) -> FcBool; pub fn FcObjectSetDestroy(os: *mut FcObjectSet); //pub fn FcObjectSetVaBuild(first: *mut c_char, va: *mut __va_list_tag) -> *mut FcObjectSet; pub fn FcObjectSetBuild(first: *mut c_char/* FIXME: variadic function */) -> *mut FcObjectSet; pub fn FcFontSetList(config: *mut FcConfig, sets: *mut *mut FcFontSet, nsets: c_int, p: *mut FcPattern, os: *mut FcObjectSet) -> *mut FcFontSet; pub fn FcFontList(config: *mut FcConfig, p: *mut FcPattern, os: *mut FcObjectSet) -> *mut FcFontSet; pub fn FcAtomicCreate(file: *const FcChar8) -> *mut FcAtomic; pub fn FcAtomicLock(atomic: *mut FcAtomic) -> FcBool; pub fn FcAtomicNewFile(atomic: *mut FcAtomic) -> *mut FcChar8; pub fn FcAtomicOrigFile(atomic: *mut FcAtomic) -> *mut FcChar8; pub fn FcAtomicReplaceOrig(atomic: *mut FcAtomic) -> FcBool; pub fn FcAtomicDeleteNew(atomic: *mut FcAtomic); pub fn FcAtomicUnlock(atomic: *mut FcAtomic); pub fn FcAtomicDestroy(atomic: *mut FcAtomic); pub fn FcFontSetMatch(config: *mut FcConfig, sets: *mut *mut FcFontSet, nsets: c_int, p: *mut FcPattern, result: *mut FcResult) -> *mut FcPattern; pub fn FcFontMatch(config: *mut FcConfig, p: *mut FcPattern, result: *mut FcResult) -> *mut FcPattern; pub fn FcFontRenderPrepare(config: *mut FcConfig, pat: *mut FcPattern, font: *mut FcPattern) -> *mut FcPattern; pub fn FcFontSetSort(config: *mut FcConfig, sets: *mut *mut FcFontSet, nsets: c_int, p: *mut FcPattern, trim: FcBool, csp: *mut *mut FcCharSet, result: *mut FcResult) -> *mut FcFontSet; pub fn FcFontSort(config: *mut FcConfig, p: *mut FcPattern, trim: FcBool, csp: *mut *mut FcCharSet, result: *mut FcResult) -> *mut FcFontSet; pub fn FcFontSetSortDestroy(fs: *mut FcFontSet); pub fn FcMatrixCopy(mat: *const FcMatrix) -> *mut FcMatrix; pub fn FcMatrixEqual(mat1: *const FcMatrix, mat2: *const FcMatrix) -> FcBool; pub fn FcMatrixMultiply(result: *mut FcMatrix, a: *const FcMatrix, b: *const FcMatrix); pub fn FcMatrixRotate(m: *mut FcMatrix, c: c_double, s: c_double); pub fn FcMatrixScale(m: *mut FcMatrix, sx: c_double, sy: c_double); pub fn FcMatrixShear(m: *mut FcMatrix, sh: c_double, sv: c_double); pub fn FcNameRegisterObjectTypes(types: *const FcObjectType, ntype: c_int) -> FcBool; pub fn FcNameUnregisterObjectTypes(types: *const FcObjectType, ntype: c_int) -> FcBool; pub fn FcNameGetObjectType(object: *const c_char) -> *const FcObjectType; pub fn FcNameRegisterConstants(consts: *const FcConstant, nconsts: c_int) -> FcBool; pub fn FcNameUnregisterConstants(consts: *const FcConstant, nconsts: c_int) -> FcBool; pub fn FcNameGetConstant(string: *mut FcChar8) -> *const FcConstant; pub fn FcNameConstant(string: *mut FcChar8, result: *mut c_int) -> FcBool; pub fn FcNameParse(name: *const FcChar8) -> *mut FcPattern; pub fn FcNameUnparse(pat: *mut FcPattern) -> *mut FcChar8; pub fn FcPatternCreate() -> *mut FcPattern; pub fn FcPatternDuplicate(p: *const FcPattern) -> *mut FcPattern; pub fn FcPatternReference(p: *mut FcPattern); pub fn FcPatternFilter(p: *mut FcPattern, os: *const FcObjectSet) -> *mut FcPattern; pub fn FcValueDestroy(v: FcValue); pub fn FcValueEqual(va: FcValue, vb: FcValue) -> FcBool; pub fn FcValueSave(v: FcValue) -> FcValue; pub fn FcPatternDestroy(p: *mut FcPattern); pub fn FcPatternEqual(pa: *const FcPattern, pb: *const FcPattern) -> FcBool; pub fn FcPatternEqualSubset(pa: *const FcPattern, pb: *const FcPattern, os: *const FcObjectSet) -> FcBool; pub fn FcPatternHash(p: *const FcPattern) -> FcChar32; pub fn FcPatternAdd(p: *mut FcPattern, object: *const c_char, value: FcValue, append: FcBool) -> FcBool; pub fn FcPatternAddWeak(p: *mut FcPattern, object: *const c_char, value: FcValue, append: FcBool) -> FcBool; pub fn FcPatternGet(p: *mut FcPattern, object: *const c_char, id: c_int, v: *mut FcValue) -> FcResult; pub fn FcPatternDel(p: *mut FcPattern, object: *const c_char) -> FcBool; pub fn FcPatternRemove(p: *mut FcPattern, object: *const c_char, id: c_int) -> FcBool; pub fn FcPatternAddInteger(p: *mut FcPattern, object: *const c_char, i: c_int) -> FcBool; pub fn FcPatternAddDouble(p: *mut FcPattern, object: *const c_char, d: c_double) -> FcBool; pub fn FcPatternAddString(p: *mut FcPattern, object: *const c_char, s: *const FcChar8) -> FcBool; pub fn FcPatternAddMatrix(p: *mut FcPattern, object: *const c_char, s: *const FcMatrix) -> FcBool; pub fn FcPatternAddCharSet(p: *mut FcPattern, object: *const c_char, c: *const FcCharSet) -> FcBool; pub fn FcPatternAddBool(p: *mut FcPattern, object: *const c_char, b: FcBool) -> FcBool; pub fn FcPatternAddLangSet(p: *mut FcPattern, object: *const c_char, ls: *const FcLangSet) -> FcBool; pub fn FcPatternGetInteger(p: *mut FcPattern, object: *const c_char, n: c_int, i: *mut c_int) -> FcResult; pub fn FcPatternGetDouble(p: *mut FcPattern, object: *const c_char, n: c_int, d: *mut c_double) -> FcResult; pub fn FcPatternGetString(p: *mut FcPattern, object: *const c_char, n: c_int, s: *mut *mut FcChar8) -> FcResult; pub fn FcPatternGetMatrix(p: *mut FcPattern, object: *const c_char, n: c_int, s: *mut *mut FcMatrix) -> FcResult; pub fn FcPatternGetCharSet(p: *mut FcPattern, object: *const c_char, n: c_int, c: *mut *mut FcCharSet) -> FcResult; pub fn FcPatternGetBool(p: *mut FcPattern, object: *const c_char, n: c_int, b: *mut FcBool) -> FcResult; pub fn FcPatternGetLangSet(p: *mut FcPattern, object: *const c_char, n: c_int, ls: *mut *mut FcLangSet) -> FcResult; //pub fn FcPatternVaBuild(p: *mut FcPattern, va: *mut __va_list_tag) -> *mut FcPattern; pub fn FcPatternBuild(p: *mut FcPattern/* FIXME: variadic function */) -> *mut FcPattern; pub fn FcPatternFormat(pat: *mut FcPattern, format: *const FcChar8) -> *mut FcChar8; pub fn FcStrCopy(s: *const FcChar8) -> *mut FcChar8; pub fn FcStrCopyFilename(s: *const FcChar8) -> *mut FcChar8; pub fn FcStrPlus(s1: *const FcChar8, s2: *const FcChar8) -> *mut FcChar8; pub fn FcStrFree(s: *mut FcChar8); pub fn FcStrDowncase(s: *const FcChar8) -> *mut FcChar8; pub fn FcStrCmpIgnoreCase(s1: *const FcChar8, s2: *const FcChar8) -> c_int; pub fn FcStrCmp(s1: *const FcChar8, s2: *const FcChar8) -> c_int; pub fn FcStrStrIgnoreCase(s1: *const FcChar8, s2: *const FcChar8) -> *mut FcChar8; pub fn FcStrStr(s1: *const FcChar8, s2: *const FcChar8) -> *mut FcChar8; pub fn FcUtf8ToUcs4(src_orig: *mut FcChar8, dst: *mut FcChar32, len: c_int) -> c_int; pub fn FcUtf8Len(string: *mut FcChar8, len: c_int, nchar: *mut c_int, wchar: *mut c_int) -> FcBool; pub fn FcUcs4ToUtf8(ucs4: FcChar32, dest: *mut FcChar8) -> c_int; pub fn FcUtf16ToUcs4(src_orig: *mut FcChar8, endian: FcEndian, dst: *mut FcChar32, len: c_int) -> c_int; pub fn FcUtf16Len(string: *mut FcChar8, endian: FcEndian, len: c_int, nchar: *mut c_int, wchar: *mut c_int) -> FcBool; pub fn FcStrDirname(file: *const FcChar8) -> *mut FcChar8; pub fn FcStrBasename(file: *const FcChar8) -> *mut FcChar8; pub fn FcStrSetCreate() -> *mut FcStrSet; pub fn FcStrSetMember(set: *mut FcStrSet, s: *const FcChar8) -> FcBool; pub fn FcStrSetEqual(sa: *mut FcStrSet, sb: *mut FcStrSet) -> FcBool; pub fn FcStrSetAdd(set: *mut FcStrSet, s: *const FcChar8) -> FcBool; pub fn FcStrSetAddFilename(set: *mut FcStrSet, s: *const FcChar8) -> FcBool; pub fn FcStrSetDel(set: *mut FcStrSet, s: *const FcChar8) -> FcBool; pub fn FcStrSetDestroy(set: *mut FcStrSet); pub fn FcStrListCreate(set: *mut FcStrSet) -> *mut FcStrList; pub fn FcStrListNext(list: *mut FcStrList) -> *mut FcChar8; pub fn FcStrListDone(list: *mut FcStrList); pub fn FcConfigParseAndLoad(config: *mut FcConfig, file: *const FcChar8, complain: FcBool) -> FcBool; }<|fim▁end|>
pub type FcBlanks = struct__FcBlanks; pub type struct__FcStrList = c_void;
<|file_name|>tests.rs<|end_file_name|><|fim▁begin|>use super::rocket; use rocket::testing::MockRequest; use rocket::http::Status; use rocket::http::Method::*; #[test] fn test_push_pop() {<|fim▁hole|> assert_eq!(response.status(), Status::Ok); let mut req = MockRequest::new(Get, "/pop"); let mut response = req.dispatch_with(&rocket); assert_eq!(response.body_string(), Some("test1".to_string())); }<|fim▁end|>
let rocket = rocket(); let mut req = MockRequest::new(Put, "/push?description=test1"); let response = req.dispatch_with(&rocket);
<|file_name|>ruby_ext_utils.hpp<|end_file_name|><|fim▁begin|>#ifndef RUBY_EXT_UTILS_HPP_ #define RUBY_EXT_UTILS_HPP_ 1 #include <functional> #include <ruby.h> #include <stdlib.h> #include <string.h> #include <stdarg.h> #include <typeinfo> #ifdef __GNUC__ #include <cxxabi.h> #endif template <typename T> static const char * type_name() { #ifdef __GNUC__ const int buf_size = 32; static char tname[buf_size]; if (tname[0] != 0) { return tname; } const std::type_info& id = typeid(T); int status; char *name = abi::__cxa_demangle(id.name(), NULL, 0, &status); if (name != NULL) { if (status == 0) { strncpy(tname, name, buf_size - 1); } else { strncpy(tname, id.name(), buf_size - 1); } free(name); } return tname; #else return typeid(T).name(); #endif } /* * undefined reference to ... [g++ (tdm-1) 4.5.2] */ // template<typename T> // static void // tmp_obj_free(T ptr) // { // if (ptr) { // delete ptr; // } // } // template<typename T> // static void // tmp_ary_free(T ptr) // { // if (ptr) { // delete[] ptr; // } // } template<typename T> static void tmp_obj_free(void *ptr) { if (ptr) { T obj = static_cast<T>(ptr); delete obj; } } template<typename T> static void tmp_ary_free(void *ptr) { if (ptr) { T obj = static_cast<T>(ptr); delete[] obj; }<|fim▁hole|> template<typename T> static inline VALUE wrap_tmp_obj(T ptr) { return Data_Wrap_Struct(rb_cObject, 0, tmp_obj_free<T>, ptr); } template<typename T> static inline VALUE wrap_tmp_ary(T ptr) { return Data_Wrap_Struct(rb_cObject, 0, tmp_ary_free<T>, ptr); } void delete_tmp_obj(volatile VALUE *store); #define delete_tmp_ary(x) delete_tmp_obj(x) /* * http://masamitsu-murase.blogspot.jp/2013/12/sevenzipruby-2-c-ruby.html */ extern "C" VALUE rxu_run_functor(VALUE p); #define RXU_PROTECT_FUNC(func) ((VALUE (*)(VALUE))(func)) template<typename T> static inline VALUE _rb_protect(T func, int *state) { typedef std::function<VALUE ()> func_type; func_type f = func; return rb_protect(rxu_run_functor, reinterpret_cast<VALUE>(&f), state); } template<typename T1, typename T2> static inline VALUE _rb_rescue(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2)); } #if defined(_MSC_VER) && _MSC_VER <= 1800 template<VALUE& e1, typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, NULL); } template<VALUE& e1, VALUE& e2, typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, NULL); } template<VALUE& e1, VALUE& e2, VALUE& e3, typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, e3, NULL); } template<VALUE& e1, VALUE& e2, VALUE& e3, VALUE& e4, typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, e3, e4, NULL); } template<VALUE& e1, VALUE& e2, VALUE& e3, VALUE& e4, VALUE& e5, typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, e3, e4, e5, NULL); } #else template<VALUE&... exceptions, typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), exceptions..., NULL); } #endif template<typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2, VALUE e1) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, NULL); } template<typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2, VALUE e1, VALUE e2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, NULL); } template<typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2, VALUE e1, VALUE e2, VALUE e3) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, e3, NULL); } template<typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2, VALUE e1, VALUE e2, VALUE e3, VALUE e4) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, e3, e4, NULL); } template<typename T1, typename T2> static inline VALUE _rb_rescue2(T1 func1, T2 func2, VALUE e1, VALUE e2, VALUE e3, VALUE e4, VALUE e5) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_rescue2( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2), e1, e2, e3, e4, e5, NULL); } template<typename T1, typename T2> static inline VALUE _rb_ensure(T1 func1, T2 func2) { typedef std::function<VALUE ()> func_type; func_type f1 = func1; func_type f2 = func2; return rb_ensure( RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f1), RUBY_METHOD_FUNC(rxu_run_functor), reinterpret_cast<VALUE>(&f2)); } extern "C" int rxu_run_functor_foreach(VALUE key, VALUE val, VALUE p); #define RXU_FOREACH_FUNC(func) ((int (*)(ANYARGS))(func)) template<typename T> static inline void _rb_hash_foreach(VALUE obj, T func) { typedef std::function<int (VALUE, VALUE)> func_type; func_type f = func; rb_hash_foreach(obj, RXU_FOREACH_FUNC(rxu_run_functor_foreach), reinterpret_cast<VALUE>(&f)); } #endif /* RUBY_EXT_UTILS_HPP_ */<|fim▁end|>
}
<|file_name|>po2js.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # convert .po to .js # import json import optparse import os import polib import re import string<|fim▁hole|>parser.add_option("--quiet", action="store_false", default=True, dest="verbose", help="don't print status messages to stdout") (options, args) = parser.parse_args() if args == None or len(args) == 0: print("ERROR: you must specify at least one po file to translate"); sys.exit(1) paramFix = re.compile("(\\(([0-9])\\))") for srcfile in args: destfile = os.path.splitext(srcfile)[0] + ".js" if options.verbose: print("INFO: converting %s to %s" % (srcfile, destfile)) xlate_map = {} po = polib.pofile(srcfile, autodetect_encoding=False, encoding="utf-8", wrapwidth=-1) for entry in po: if entry.obsolete or entry.msgstr == '': continue xlate_map[entry.msgid] = entry.msgstr; dest = open(destfile, "w") dest.write('i18n = ') encoder = json.JSONEncoder() for part in encoder.iterencode(xlate_map): if part.startswith('"function('): dest.write(part[1:-1]); else: dest.write(part); dest.write(";\n") dest.close()<|fim▁end|>
import sys parser = optparse.OptionParser(usage="usage: %prog [options] pofile...") parser.add_option("--callback", default="_.setTranslation", dest="callback", help="callback function to call with data")
<|file_name|>bash_completions.go<|end_file_name|><|fim▁begin|>package cobra import ( "bytes" "fmt" "io" "os" "sort" "strings" "github.com/spf13/pflag" ) // Annotations for Bash completion. const ( BashCompFilenameExt = "cobra_annotation_bash_completion_filename_extensions" BashCompCustom = "cobra_annotation_bash_completion_custom" BashCompOneRequiredFlag = "cobra_annotation_bash_completion_one_required_flag" BashCompSubdirsInDir = "cobra_annotation_bash_completion_subdirs_in_dir" ) func writePreamble(buf *bytes.Buffer, name string) { buf.WriteString(fmt.Sprintf("# bash completion for %-36s -*- shell-script -*-\n", name)) buf.WriteString(fmt.Sprintf(` __%[1]s_debug() { if [[ -n ${BASH_COMP_DEBUG_FILE} ]]; then echo "$*" >> "${BASH_COMP_DEBUG_FILE}" fi } # Homebrew on Macs have version 1.3 of bash-completion which doesn't include # _init_completion. This is a very minimal version of that function. __%[1]s_init_completion() { COMPREPLY=() _get_comp_words_by_ref "$@" cur prev words cword } __%[1]s_index_of_word() { local w word=$1 shift index=0 for w in "$@"; do [[ $w = "$word" ]] && return index=$((index+1)) done index=-1 } __%[1]s_contains_word() { local w word=$1; shift for w in "$@"; do [[ $w = "$word" ]] && return done return 1 } __%[1]s_handle_reply() { __%[1]s_debug "${FUNCNAME[0]}" case $cur in -*) if [[ $(type -t compopt) = "builtin" ]]; then compopt -o nospace fi local allflags if [ ${#must_have_one_flag[@]} -ne 0 ]; then allflags=("${must_have_one_flag[@]}") else allflags=("${flags[*]} ${two_word_flags[*]}") fi COMPREPLY=( $(compgen -W "${allflags[*]}" -- "$cur") ) if [[ $(type -t compopt) = "builtin" ]]; then [[ "${COMPREPLY[0]}" == *= ]] || compopt +o nospace fi # complete after --flag=abc if [[ $cur == *=* ]]; then if [[ $(type -t compopt) = "builtin" ]]; then compopt +o nospace fi local index flag flag="${cur%%=*}" __%[1]s_index_of_word "${flag}" "${flags_with_completion[@]}" COMPREPLY=() if [[ ${index} -ge 0 ]]; then PREFIX="" cur="${cur#*=}" ${flags_completion[${index}]} if [ -n "${ZSH_VERSION}" ]; then # zsh completion needs --flag= prefix eval "COMPREPLY=( \"\${COMPREPLY[@]/#/${flag}=}\" )" fi fi fi return 0; ;; esac # check if we are handling a flag with special work handling local index __%[1]s_index_of_word "${prev}" "${flags_with_completion[@]}" if [[ ${index} -ge 0 ]]; then ${flags_completion[${index}]} return fi # we are parsing a flag and don't have a special handler, no completion if [[ ${cur} != "${words[cword]}" ]]; then return fi local completions completions=("${commands[@]}") if [[ ${#must_have_one_noun[@]} -ne 0 ]]; then completions=("${must_have_one_noun[@]}") fi if [[ ${#must_have_one_flag[@]} -ne 0 ]]; then completions+=("${must_have_one_flag[@]}") fi COMPREPLY=( $(compgen -W "${completions[*]}" -- "$cur") ) if [[ ${#COMPREPLY[@]} -eq 0 && ${#noun_aliases[@]} -gt 0 && ${#must_have_one_noun[@]} -ne 0 ]]; then COMPREPLY=( $(compgen -W "${noun_aliases[*]}" -- "$cur") ) fi if [[ ${#COMPREPLY[@]} -eq 0 ]]; then declare -F __custom_func >/dev/null && __custom_func fi # available in bash-completion >= 2, not always present on macOS if declare -F __ltrim_colon_completions >/dev/null; then __ltrim_colon_completions "$cur" fi } # The arguments should be in the form "ext1|ext2|extn" __%[1]s_handle_filename_extension_flag() { local ext="$1" _filedir "@(${ext})" } __%[1]s_handle_subdirs_in_dir_flag() { local dir="$1" pushd "${dir}" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 } __%[1]s_handle_flag() { __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" # if a command required a flag, and we found it, unset must_have_one_flag() local flagname=${words[c]} local flagvalue # if the word contained an = if [[ ${words[c]} == *"="* ]]; then flagvalue=${flagname#*=} # take in as flagvalue after the = flagname=${flagname%%=*} # strip everything after the = flagname="${flagname}=" # but put the = back fi __%[1]s_debug "${FUNCNAME[0]}: looking for ${flagname}" if __%[1]s_contains_word "${flagname}" "${must_have_one_flag[@]}"; then must_have_one_flag=() fi # if you set a flag which only applies to this command, don't show subcommands if __%[1]s_contains_word "${flagname}" "${local_nonpersistent_flags[@]}"; then commands=() fi # keep flag value with flagname as flaghash # flaghash variable is an associative array which is only supported in bash > 3. if [[ -z "${BASH_VERSION}" || "${BASH_VERSINFO[0]}" -gt 3 ]]; then if [ -n "${flagvalue}" ] ; then flaghash[${flagname}]=${flagvalue} elif [ -n "${words[ $((c+1)) ]}" ] ; then flaghash[${flagname}]=${words[ $((c+1)) ]} else flaghash[${flagname}]="true" # pad "true" for bool flag fi fi # skip the argument to a two word flag if __%[1]s_contains_word "${words[c]}" "${two_word_flags[@]}"; then c=$((c+1)) # if we are looking for a flags value, don't show commands if [[ $c -eq $cword ]]; then commands=() fi fi c=$((c+1)) } __%[1]s_handle_noun() { __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" if __%[1]s_contains_word "${words[c]}" "${must_have_one_noun[@]}"; then must_have_one_noun=() elif __%[1]s_contains_word "${words[c]}" "${noun_aliases[@]}"; then must_have_one_noun=() fi nouns+=("${words[c]}") c=$((c+1)) } __%[1]s_handle_command() { __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" local next_command if [[ -n ${last_command} ]]; then next_command="_${last_command}_${words[c]//:/__}" else if [[ $c -eq 0 ]]; then next_command="_$(basename "${words[c]//:/__}")" else next_command="_${words[c]//:/__}" fi fi c=$((c+1)) __%[1]s_debug "${FUNCNAME[0]}: looking for ${next_command}" declare -F "$next_command" >/dev/null && $next_command } __%[1]s_handle_word() { if [[ $c -ge $cword ]]; then __%[1]s_handle_reply return fi __%[1]s_debug "${FUNCNAME[0]}: c is $c words[c] is ${words[c]}" if [[ "${words[c]}" == -* ]]; then __%[1]s_handle_flag elif __%[1]s_contains_word "${words[c]}" "${commands[@]}"; then __%[1]s_handle_command elif [[ $c -eq 0 ]] && __%[1]s_contains_word "$(basename "${words[c]}")" "${commands[@]}"; then __%[1]s_handle_command else __%[1]s_handle_noun fi __%[1]s_handle_word } `, name)) } func writePostscript(buf *bytes.Buffer, name string) { name = strings.Replace(name, ":", "__", -1) buf.WriteString(fmt.Sprintf("__start_%s()\n", name)) buf.WriteString(fmt.Sprintf(`{ local cur prev words cword declare -A flaghash 2>/dev/null || : if declare -F _init_completion >/dev/null 2>&1; then _init_completion -s || return else __%[1]s_init_completion -n "=" || return fi local c=0 local flags=() local two_word_flags=() local local_nonpersistent_flags=() local flags_with_completion=() local flags_completion=() local commands=("%[1]s") local must_have_one_flag=() local must_have_one_noun=() local last_command local nouns=() __%[1]s_handle_word } `, name)) buf.WriteString(fmt.Sprintf(`if [[ $(type -t compopt) = "builtin" ]]; then complete -o default -F __start_%s %s else complete -o default -o nospace -F __start_%s %s fi `, name, name, name, name)) buf.WriteString("# ex: ts=4 sw=4 et filetype=sh\n") } func writeCommands(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" commands=()\n") for _, c := range cmd.Commands() { if !c.IsAvailableCommand() || c == cmd.helpCommand { continue } buf.WriteString(fmt.Sprintf(" commands+=(%q)\n", c.Name())) } buf.WriteString("\n") } func writeFlagHandler(buf *bytes.Buffer, name string, annotations map[string][]string, cmd *Command) { for key, value := range annotations { switch key { case BashCompFilenameExt: buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) var ext string if len(value) > 0 { ext = fmt.Sprintf("__%s_handle_filename_extension_flag ", cmd.Name()) + strings.Join(value, "|") } else { ext = "_filedir" } buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", ext)) case BashCompCustom: buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) if len(value) > 0 { handlers := strings.Join(value, "; ") buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", handlers)) } else { buf.WriteString(" flags_completion+=(:)\n") } case BashCompSubdirsInDir: buf.WriteString(fmt.Sprintf(" flags_with_completion+=(%q)\n", name)) var ext string if len(value) == 1 { ext = fmt.Sprintf("__%s_handle_subdirs_in_dir_flag ", cmd.Name()) + value[0] } else { ext = "_filedir -d"<|fim▁hole|> buf.WriteString(fmt.Sprintf(" flags_completion+=(%q)\n", ext)) } } } func writeShortFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { name := flag.Shorthand format := " " if len(flag.NoOptDefVal) == 0 { format += "two_word_" } format += "flags+=(\"-%s\")\n" buf.WriteString(fmt.Sprintf(format, name)) writeFlagHandler(buf, "-"+name, flag.Annotations, cmd) } func writeFlag(buf *bytes.Buffer, flag *pflag.Flag, cmd *Command) { name := flag.Name format := " flags+=(\"--%s" if len(flag.NoOptDefVal) == 0 { format += "=" } format += "\")\n" buf.WriteString(fmt.Sprintf(format, name)) writeFlagHandler(buf, "--"+name, flag.Annotations, cmd) } func writeLocalNonPersistentFlag(buf *bytes.Buffer, flag *pflag.Flag) { name := flag.Name format := " local_nonpersistent_flags+=(\"--%s" if len(flag.NoOptDefVal) == 0 { format += "=" } format += "\")\n" buf.WriteString(fmt.Sprintf(format, name)) } func writeFlags(buf *bytes.Buffer, cmd *Command) { buf.WriteString(` flags=() two_word_flags=() local_nonpersistent_flags=() flags_with_completion=() flags_completion=() `) localNonPersistentFlags := cmd.LocalNonPersistentFlags() cmd.NonInheritedFlags().VisitAll(func(flag *pflag.Flag) { if nonCompletableFlag(flag) { return } writeFlag(buf, flag, cmd) if len(flag.Shorthand) > 0 { writeShortFlag(buf, flag, cmd) } if localNonPersistentFlags.Lookup(flag.Name) != nil { writeLocalNonPersistentFlag(buf, flag) } }) cmd.InheritedFlags().VisitAll(func(flag *pflag.Flag) { if nonCompletableFlag(flag) { return } writeFlag(buf, flag, cmd) if len(flag.Shorthand) > 0 { writeShortFlag(buf, flag, cmd) } }) buf.WriteString("\n") } func writeRequiredFlag(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" must_have_one_flag=()\n") flags := cmd.NonInheritedFlags() flags.VisitAll(func(flag *pflag.Flag) { if nonCompletableFlag(flag) { return } for key := range flag.Annotations { switch key { case BashCompOneRequiredFlag: format := " must_have_one_flag+=(\"--%s" if flag.Value.Type() != "bool" { format += "=" } format += "\")\n" buf.WriteString(fmt.Sprintf(format, flag.Name)) if len(flag.Shorthand) > 0 { buf.WriteString(fmt.Sprintf(" must_have_one_flag+=(\"-%s\")\n", flag.Shorthand)) } } } }) } func writeRequiredNouns(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" must_have_one_noun=()\n") sort.Sort(sort.StringSlice(cmd.ValidArgs)) for _, value := range cmd.ValidArgs { buf.WriteString(fmt.Sprintf(" must_have_one_noun+=(%q)\n", value)) } } func writeArgAliases(buf *bytes.Buffer, cmd *Command) { buf.WriteString(" noun_aliases=()\n") sort.Sort(sort.StringSlice(cmd.ArgAliases)) for _, value := range cmd.ArgAliases { buf.WriteString(fmt.Sprintf(" noun_aliases+=(%q)\n", value)) } } func gen(buf *bytes.Buffer, cmd *Command) { for _, c := range cmd.Commands() { if !c.IsAvailableCommand() || c == cmd.helpCommand { continue } gen(buf, c) } commandName := cmd.CommandPath() commandName = strings.Replace(commandName, " ", "_", -1) commandName = strings.Replace(commandName, ":", "__", -1) buf.WriteString(fmt.Sprintf("_%s()\n{\n", commandName)) buf.WriteString(fmt.Sprintf(" last_command=%q\n", commandName)) writeCommands(buf, cmd) writeFlags(buf, cmd) writeRequiredFlag(buf, cmd) writeRequiredNouns(buf, cmd) writeArgAliases(buf, cmd) buf.WriteString("}\n\n") } // GenBashCompletion generates bash completion file and writes to the passed writer. func (c *Command) GenBashCompletion(w io.Writer) error { buf := new(bytes.Buffer) writePreamble(buf, c.Name()) if len(c.BashCompletionFunction) > 0 { buf.WriteString(c.BashCompletionFunction + "\n") } gen(buf, c) writePostscript(buf, c.Name()) _, err := buf.WriteTo(w) return err } func nonCompletableFlag(flag *pflag.Flag) bool { return flag.Hidden || len(flag.Deprecated) > 0 } // GenBashCompletionFile generates bash completion file. func (c *Command) GenBashCompletionFile(filename string) error { outFile, err := os.Create(filename) if err != nil { return err } defer outFile.Close() return c.GenBashCompletion(outFile) } // MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, // and causes your command to report an error if invoked without the flag. func (c *Command) MarkFlagRequired(name string) error { return MarkFlagRequired(c.Flags(), name) } // MarkPersistentFlagRequired adds the BashCompOneRequiredFlag annotation to the named persistent flag if it exists, // and causes your command to report an error if invoked without the flag. func (c *Command) MarkPersistentFlagRequired(name string) error { return MarkFlagRequired(c.PersistentFlags(), name) } // MarkFlagRequired adds the BashCompOneRequiredFlag annotation to the named flag if it exists, // and causes your command to report an error if invoked without the flag. func MarkFlagRequired(flags *pflag.FlagSet, name string) error { return flags.SetAnnotation(name, BashCompOneRequiredFlag, []string{"true"}) } // MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag, if it exists. // Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. func (c *Command) MarkFlagFilename(name string, extensions ...string) error { return MarkFlagFilename(c.Flags(), name, extensions...) } // MarkFlagCustom adds the BashCompCustom annotation to the named flag, if it exists. // Generated bash autocompletion will call the bash function f for the flag. func (c *Command) MarkFlagCustom(name string, f string) error { return MarkFlagCustom(c.Flags(), name, f) } // MarkPersistentFlagFilename adds the BashCompFilenameExt annotation to the named persistent flag, if it exists. // Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. func (c *Command) MarkPersistentFlagFilename(name string, extensions ...string) error { return MarkFlagFilename(c.PersistentFlags(), name, extensions...) } // MarkFlagFilename adds the BashCompFilenameExt annotation to the named flag in the flag set, if it exists. // Generated bash autocompletion will select filenames for the flag, limiting to named extensions if provided. func MarkFlagFilename(flags *pflag.FlagSet, name string, extensions ...string) error { return flags.SetAnnotation(name, BashCompFilenameExt, extensions) } // MarkFlagCustom adds the BashCompCustom annotation to the named flag in the flag set, if it exists. // Generated bash autocompletion will call the bash function f for the flag. func MarkFlagCustom(flags *pflag.FlagSet, name string, f string) error { return flags.SetAnnotation(name, BashCompCustom, []string{f}) }<|fim▁end|>
}
<|file_name|>delete.js<|end_file_name|><|fim▁begin|>module.exports = require('eden-class').extend(function() { /* Require -------------------------------*/ /* Constants -------------------------------*/ /* Public.Properties -------------------------------*/ /* Protected Properties -------------------------------*/ this._table = null; this._where = []; /* Private Properties -------------------------------*/ /* Magic -------------------------------*/ this.___construct = function(table) { this.argument().test(1, 'string', 'undef'); if(typeof table === 'string') { this.setTable(table); } }; /* Public.Methods -------------------------------*/ /** * Set the table name in which you want to delete from * * @param string name * @return this */ this.setTable = function(table) { //argument test this.argument().test(1, 'string'); this._table = table; return this; }; /** * Returns the string version of the query * * @param bool * @return string * @notes returns the query based on the registry */ this.getQuery = function() { return 'DELETE FROM {TABLE} WHERE {WHERE};' .replace('{TABLE}' , this._table) .replace('{WHERE}' , this._where.join(' AND ')); }; /** * Where clause<|fim▁hole|> * @notes loads a where phrase into registry */ this.where = function(where) { //Argument 1 must be a string or array this.argument().test(1, 'string', 'array'); if(typeof where === 'string') { where = [where]; } this._where = this._where.concat(where); return this; }; /* Protected Methods -------------------------------*/ /* Private Methods -------------------------------*/ }).register('eden/mysql/delete');<|fim▁end|>
* * @param array|string where * @return this
<|file_name|>actions.js<|end_file_name|><|fim▁begin|>import * as types from './actionTypes'; export function addNote(newNote) { return { type: types.ADD_NOTE, newNote }; } export function deleteNote(id) { return { type: types.DELETE_NOTE, id }; } export function updateNote(updatedNote) { return { type: types.UPDATE_NOTE, updatedNote }; } export function getNotes(notes) { return { type: types.GET_NOTES, notes }; } export function selectNote(id) {<|fim▁hole|> return { type: types.SELECT_NOTE, id }; } export function login(userEmail, accessToken) { return { type: types.LOGIN, userEmail, accessToken }; } export function logout() { return { type: types.LOGOUT }; } export function saveAccessToken(userEmail, accessToken) { return { type: types.SAVE_ACCESS_TOKEN, userEmail, accessToken }; } export function authFail(userEmail) { return { type: types.AUTH_FAIL, userEmail }; }<|fim▁end|>
<|file_name|>printers.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package internalversion import ( "bytes" "fmt" "net" "sort" "strconv" "strings" "time" appsv1beta1 "k8s.io/api/apps/v1beta1" autoscalingv2beta1 "k8s.io/api/autoscaling/v2beta1" batchv1 "k8s.io/api/batch/v1" batchv1beta1 "k8s.io/api/batch/v1beta1" certificatesv1beta1 "k8s.io/api/certificates/v1beta1" coordinationv1beta1 "k8s.io/api/coordination/v1beta1" apiv1 "k8s.io/api/core/v1" extensionsv1beta1 "k8s.io/api/extensions/v1beta1" policyv1beta1 "k8s.io/api/policy/v1beta1" rbacv1beta1 "k8s.io/api/rbac/v1beta1" schedulingv1beta1 "k8s.io/api/scheduling/v1beta1" storagev1 "k8s.io/api/storage/v1" "k8s.io/apimachinery/pkg/api/meta" metav1 "k8s.io/apimachinery/pkg/apis/meta/v1" metav1beta1 "k8s.io/apimachinery/pkg/apis/meta/v1beta1" "k8s.io/apimachinery/pkg/labels" "k8s.io/apimachinery/pkg/runtime" "k8s.io/apimachinery/pkg/runtime/schema" "k8s.io/apimachinery/pkg/util/duration" "k8s.io/apimachinery/pkg/util/sets" "k8s.io/kubernetes/pkg/apis/apps" "k8s.io/kubernetes/pkg/apis/autoscaling" "k8s.io/kubernetes/pkg/apis/batch" "k8s.io/kubernetes/pkg/apis/certificates" "k8s.io/kubernetes/pkg/apis/coordination" api "k8s.io/kubernetes/pkg/apis/core" "k8s.io/kubernetes/pkg/apis/core/helper" "k8s.io/kubernetes/pkg/apis/extensions" "k8s.io/kubernetes/pkg/apis/networking" "k8s.io/kubernetes/pkg/apis/policy" "k8s.io/kubernetes/pkg/apis/rbac" "k8s.io/kubernetes/pkg/apis/scheduling" "k8s.io/kubernetes/pkg/apis/storage" storageutil "k8s.io/kubernetes/pkg/apis/storage/util" "k8s.io/kubernetes/pkg/printers" "k8s.io/kubernetes/pkg/util/node" ) const ( loadBalancerWidth = 16 // labelNodeRolePrefix is a label prefix for node roles // It's copied over to here until it's merged in core: https://github.com/kubernetes/kubernetes/pull/39112 labelNodeRolePrefix = "node-role.kubernetes.io/" // nodeLabelRole specifies the role of a node nodeLabelRole = "kubernetes.io/role" ) // Allow injecting additional print handlers var AddHandlers = AddKubeHandlers // AddHandlers adds print handlers for default Kubernetes types dealing with internal versions. // TODO: handle errors from Handler func AddKubeHandlers(h printers.PrintHandler) { podColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Ready", Type: "string", Description: "The aggregate readiness state of this pod for accepting traffic."}, {Name: "Status", Type: "string", Description: "The aggregate status of the containers in this pod."}, {Name: "Restarts", Type: "integer", Description: "The number of times the containers in this pod have been restarted."}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "IP", Type: "string", Priority: 1, Description: apiv1.PodStatus{}.SwaggerDoc()["podIP"]}, {Name: "Node", Type: "string", Priority: 1, Description: apiv1.PodSpec{}.SwaggerDoc()["nodeName"]}, {Name: "Nominated Node", Type: "string", Priority: 1, Description: apiv1.PodStatus{}.SwaggerDoc()["nominatedNodeName"]}, {Name: "Readiness Gates", Type: "string", Priority: 1, Description: apiv1.PodSpec{}.SwaggerDoc()["readinessGates"]}, } h.TableHandler(podColumnDefinitions, printPodList) h.TableHandler(podColumnDefinitions, printPod) podTemplateColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Containers", Type: "string", Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Description: "Images referenced by each container in the template."}, {Name: "Pod Labels", Type: "string", Description: "The labels for the pod template."}, } h.TableHandler(podTemplateColumnDefinitions, printPodTemplate) h.TableHandler(podTemplateColumnDefinitions, printPodTemplateList) podDisruptionBudgetColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Min Available", Type: "string", Description: "The minimum number of pods that must be available."}, {Name: "Max Unavailable", Type: "string", Description: "The maximum number of pods that may be unavailable."}, {Name: "Allowed Disruptions", Type: "integer", Description: "Calculated number of pods that may be disrupted at this time."}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(podDisruptionBudgetColumnDefinitions, printPodDisruptionBudget) h.TableHandler(podDisruptionBudgetColumnDefinitions, printPodDisruptionBudgetList) replicationControllerColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Desired", Type: "integer", Description: apiv1.ReplicationControllerSpec{}.SwaggerDoc()["replicas"]}, {Name: "Current", Type: "integer", Description: apiv1.ReplicationControllerStatus{}.SwaggerDoc()["replicas"]}, {Name: "Ready", Type: "integer", Description: apiv1.ReplicationControllerStatus{}.SwaggerDoc()["readyReplicas"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, {Name: "Selector", Type: "string", Priority: 1, Description: apiv1.ReplicationControllerSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(replicationControllerColumnDefinitions, printReplicationController) h.TableHandler(replicationControllerColumnDefinitions, printReplicationControllerList) replicaSetColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Desired", Type: "integer", Description: extensionsv1beta1.ReplicaSetSpec{}.SwaggerDoc()["replicas"]}, {Name: "Current", Type: "integer", Description: extensionsv1beta1.ReplicaSetStatus{}.SwaggerDoc()["replicas"]}, {Name: "Ready", Type: "integer", Description: extensionsv1beta1.ReplicaSetStatus{}.SwaggerDoc()["readyReplicas"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, {Name: "Selector", Type: "string", Priority: 1, Description: extensionsv1beta1.ReplicaSetSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(replicaSetColumnDefinitions, printReplicaSet) h.TableHandler(replicaSetColumnDefinitions, printReplicaSetList) daemonSetColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Desired", Type: "integer", Description: extensionsv1beta1.DaemonSetStatus{}.SwaggerDoc()["desiredNumberScheduled"]}, {Name: "Current", Type: "integer", Description: extensionsv1beta1.DaemonSetStatus{}.SwaggerDoc()["currentNumberScheduled"]}, {Name: "Ready", Type: "integer", Description: extensionsv1beta1.DaemonSetStatus{}.SwaggerDoc()["numberReady"]}, {Name: "Up-to-date", Type: "integer", Description: extensionsv1beta1.DaemonSetStatus{}.SwaggerDoc()["updatedNumberScheduled"]}, {Name: "Available", Type: "integer", Description: extensionsv1beta1.DaemonSetStatus{}.SwaggerDoc()["numberAvailable"]}, {Name: "Node Selector", Type: "string", Description: apiv1.PodSpec{}.SwaggerDoc()["nodeSelector"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, {Name: "Selector", Type: "string", Priority: 1, Description: extensionsv1beta1.DaemonSetSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(daemonSetColumnDefinitions, printDaemonSet) h.TableHandler(daemonSetColumnDefinitions, printDaemonSetList) jobColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Completions", Type: "string", Description: batchv1.JobStatus{}.SwaggerDoc()["succeeded"]}, {Name: "Duration", Type: "string", Description: "Time required to complete the job."}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, {Name: "Selector", Type: "string", Priority: 1, Description: batchv1.JobSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(jobColumnDefinitions, printJob) h.TableHandler(jobColumnDefinitions, printJobList) cronJobColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Schedule", Type: "string", Description: batchv1beta1.CronJobSpec{}.SwaggerDoc()["schedule"]}, {Name: "Suspend", Type: "boolean", Description: batchv1beta1.CronJobSpec{}.SwaggerDoc()["suspend"]}, {Name: "Active", Type: "integer", Description: batchv1beta1.CronJobStatus{}.SwaggerDoc()["active"]}, {Name: "Last Schedule", Type: "string", Description: batchv1beta1.CronJobStatus{}.SwaggerDoc()["lastScheduleTime"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, {Name: "Selector", Type: "string", Priority: 1, Description: batchv1.JobSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(cronJobColumnDefinitions, printCronJob) h.TableHandler(cronJobColumnDefinitions, printCronJobList) serviceColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Type", Type: "string", Description: apiv1.ServiceSpec{}.SwaggerDoc()["type"]}, {Name: "Cluster-IP", Type: "string", Description: apiv1.ServiceSpec{}.SwaggerDoc()["clusterIP"]}, {Name: "External-IP", Type: "string", Description: apiv1.ServiceSpec{}.SwaggerDoc()["externalIPs"]}, {Name: "Port(s)", Type: "string", Description: apiv1.ServiceSpec{}.SwaggerDoc()["ports"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Selector", Type: "string", Priority: 1, Description: apiv1.ServiceSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(serviceColumnDefinitions, printService) h.TableHandler(serviceColumnDefinitions, printServiceList) ingressColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Hosts", Type: "string", Description: "Hosts that incoming requests are matched against before the ingress rule"}, {Name: "Address", Type: "string", Description: "Address is a list containing ingress points for the load-balancer"}, {Name: "Ports", Type: "string", Description: "Ports of TLS configurations that open"}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(ingressColumnDefinitions, printIngress) h.TableHandler(ingressColumnDefinitions, printIngressList) statefulSetColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Ready", Type: "string", Description: "Number of the pod with ready state"}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, } h.TableHandler(statefulSetColumnDefinitions, printStatefulSet) h.TableHandler(statefulSetColumnDefinitions, printStatefulSetList) endpointColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Endpoints", Type: "string", Description: apiv1.Endpoints{}.SwaggerDoc()["subsets"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(endpointColumnDefinitions, printEndpoints) h.TableHandler(endpointColumnDefinitions, printEndpointsList) nodeColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Status", Type: "string", Description: "The status of the node"}, {Name: "Roles", Type: "string", Description: "The roles of the node"}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Version", Type: "string", Description: apiv1.NodeSystemInfo{}.SwaggerDoc()["kubeletVersion"]}, {Name: "Internal-IP", Type: "string", Priority: 1, Description: apiv1.NodeStatus{}.SwaggerDoc()["addresses"]}, {Name: "External-IP", Type: "string", Priority: 1, Description: apiv1.NodeStatus{}.SwaggerDoc()["addresses"]}, {Name: "OS-Image", Type: "string", Priority: 1, Description: apiv1.NodeSystemInfo{}.SwaggerDoc()["osImage"]}, {Name: "Kernel-Version", Type: "string", Priority: 1, Description: apiv1.NodeSystemInfo{}.SwaggerDoc()["kernelVersion"]}, {Name: "Container-Runtime", Type: "string", Priority: 1, Description: apiv1.NodeSystemInfo{}.SwaggerDoc()["containerRuntimeVersion"]}, } h.TableHandler(nodeColumnDefinitions, printNode) h.TableHandler(nodeColumnDefinitions, printNodeList) eventColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Last Seen", Type: "string", Description: apiv1.Event{}.SwaggerDoc()["lastTimestamp"]}, {Name: "Type", Type: "string", Description: apiv1.Event{}.SwaggerDoc()["type"]}, {Name: "Reason", Type: "string", Description: apiv1.Event{}.SwaggerDoc()["reason"]}, {Name: "Object", Type: "string", Description: apiv1.Event{}.SwaggerDoc()["involvedObject"]}, {Name: "Subobject", Type: "string", Priority: 1, Description: apiv1.Event{}.InvolvedObject.SwaggerDoc()["fieldPath"]}, {Name: "Source", Type: "string", Priority: 1, Description: apiv1.Event{}.SwaggerDoc()["source"]}, {Name: "Message", Type: "string", Description: apiv1.Event{}.SwaggerDoc()["message"]}, {Name: "First Seen", Type: "string", Priority: 1, Description: apiv1.Event{}.SwaggerDoc()["firstTimestamp"]}, {Name: "Count", Type: "string", Priority: 1, Description: apiv1.Event{}.SwaggerDoc()["count"]}, {Name: "Name", Type: "string", Priority: 1, Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, } h.TableHandler(eventColumnDefinitions, printEvent) h.TableHandler(eventColumnDefinitions, printEventList) namespaceColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Status", Type: "string", Description: "The status of the namespace"}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(namespaceColumnDefinitions, printNamespace) h.TableHandler(namespaceColumnDefinitions, printNamespaceList) secretColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Type", Type: "string", Description: apiv1.Secret{}.SwaggerDoc()["type"]}, {Name: "Data", Type: "string", Description: apiv1.Secret{}.SwaggerDoc()["data"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(secretColumnDefinitions, printSecret) h.TableHandler(secretColumnDefinitions, printSecretList) serviceAccountColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Secrets", Type: "string", Description: apiv1.ServiceAccount{}.SwaggerDoc()["secrets"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(serviceAccountColumnDefinitions, printServiceAccount) h.TableHandler(serviceAccountColumnDefinitions, printServiceAccountList) persistentVolumeColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Capacity", Type: "string", Description: apiv1.PersistentVolumeSpec{}.SwaggerDoc()["capacity"]}, {Name: "Access Modes", Type: "string", Description: apiv1.PersistentVolumeSpec{}.SwaggerDoc()["accessModes"]}, {Name: "Reclaim Policy", Type: "string", Description: apiv1.PersistentVolumeSpec{}.SwaggerDoc()["persistentVolumeReclaimPolicy"]}, {Name: "Status", Type: "string", Description: apiv1.PersistentVolumeStatus{}.SwaggerDoc()["phase"]}, {Name: "Claim", Type: "string", Description: apiv1.PersistentVolumeSpec{}.SwaggerDoc()["claimRef"]}, {Name: "StorageClass", Type: "string", Description: "StorageClass of the pv"}, {Name: "Reason", Type: "string", Description: apiv1.PersistentVolumeStatus{}.SwaggerDoc()["reason"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(persistentVolumeColumnDefinitions, printPersistentVolume) h.TableHandler(persistentVolumeColumnDefinitions, printPersistentVolumeList) persistentVolumeClaimColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Status", Type: "string", Description: apiv1.PersistentVolumeClaimStatus{}.SwaggerDoc()["phase"]}, {Name: "Volume", Type: "string", Description: apiv1.PersistentVolumeClaimSpec{}.SwaggerDoc()["volumeName"]}, {Name: "Capacity", Type: "string", Description: apiv1.PersistentVolumeClaimStatus{}.SwaggerDoc()["capacity"]}, {Name: "Access Modes", Type: "string", Description: apiv1.PersistentVolumeClaimStatus{}.SwaggerDoc()["accessModes"]}, {Name: "StorageClass", Type: "string", Description: "StorageClass of the pvc"}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(persistentVolumeClaimColumnDefinitions, printPersistentVolumeClaim) h.TableHandler(persistentVolumeClaimColumnDefinitions, printPersistentVolumeClaimList) componentStatusColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Status", Type: "string", Description: "Status of the component conditions"}, {Name: "Message", Type: "string", Description: "Message of the component conditions"}, {Name: "Error", Type: "string", Description: "Error of the component conditions"}, } h.TableHandler(componentStatusColumnDefinitions, printComponentStatus) h.TableHandler(componentStatusColumnDefinitions, printComponentStatusList) deploymentColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Ready", Type: "string", Description: "Number of the pod with ready state"}, {Name: "Up-to-date", Type: "string", Description: extensionsv1beta1.DeploymentStatus{}.SwaggerDoc()["updatedReplicas"]}, {Name: "Available", Type: "string", Description: extensionsv1beta1.DeploymentStatus{}.SwaggerDoc()["availableReplicas"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Containers", Type: "string", Priority: 1, Description: "Names of each container in the template."}, {Name: "Images", Type: "string", Priority: 1, Description: "Images referenced by each container in the template."}, {Name: "Selector", Type: "string", Priority: 1, Description: extensionsv1beta1.DeploymentSpec{}.SwaggerDoc()["selector"]}, } h.TableHandler(deploymentColumnDefinitions, printDeployment) h.TableHandler(deploymentColumnDefinitions, printDeploymentList) horizontalPodAutoscalerColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Reference", Type: "string", Description: autoscalingv2beta1.HorizontalPodAutoscalerSpec{}.SwaggerDoc()["scaleTargetRef"]}, {Name: "Targets", Type: "string", Description: autoscalingv2beta1.HorizontalPodAutoscalerSpec{}.SwaggerDoc()["metrics"]}, {Name: "MinPods", Type: "string", Description: autoscalingv2beta1.HorizontalPodAutoscalerSpec{}.SwaggerDoc()["minReplicas"]}, {Name: "MaxPods", Type: "string", Description: autoscalingv2beta1.HorizontalPodAutoscalerSpec{}.SwaggerDoc()["maxReplicas"]}, {Name: "Replicas", Type: "string", Description: autoscalingv2beta1.HorizontalPodAutoscalerStatus{}.SwaggerDoc()["currentReplicas"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(horizontalPodAutoscalerColumnDefinitions, printHorizontalPodAutoscaler) h.TableHandler(horizontalPodAutoscalerColumnDefinitions, printHorizontalPodAutoscalerList) configMapColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Data", Type: "string", Description: apiv1.ConfigMap{}.SwaggerDoc()["data"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(configMapColumnDefinitions, printConfigMap) h.TableHandler(configMapColumnDefinitions, printConfigMapList) podSecurityPolicyColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Priv", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["privileged"]}, {Name: "Caps", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["allowedCapabilities"]}, {Name: "SELinux", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["seLinux"]}, {Name: "RunAsUser", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["runAsUser"]}, {Name: "FsGroup", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["fsGroup"]}, {Name: "SupGroup", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["supplementalGroups"]}, {Name: "ReadOnlyRootFs", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["readOnlyRootFilesystem"]}, {Name: "Volumes", Type: "string", Description: policyv1beta1.PodSecurityPolicySpec{}.SwaggerDoc()["volumes"]}, } h.TableHandler(podSecurityPolicyColumnDefinitions, printPodSecurityPolicy) h.TableHandler(podSecurityPolicyColumnDefinitions, printPodSecurityPolicyList) networkPolicyColumnDefinitioins := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Pod-Selector", Type: "string", Description: extensionsv1beta1.NetworkPolicySpec{}.SwaggerDoc()["podSelector"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(networkPolicyColumnDefinitioins, printNetworkPolicy) h.TableHandler(networkPolicyColumnDefinitioins, printNetworkPolicyList) roleBindingsColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Role", Type: "string", Priority: 1, Description: rbacv1beta1.RoleBinding{}.SwaggerDoc()["roleRef"]}, {Name: "Users", Type: "string", Priority: 1, Description: "Users in the roleBinding"}, {Name: "Groups", Type: "string", Priority: 1, Description: "Gruops in the roleBinding"}, {Name: "ServiceAccounts", Type: "string", Priority: 1, Description: "ServiceAccounts in the roleBinding"}, } h.TableHandler(roleBindingsColumnDefinitions, printRoleBinding) h.TableHandler(roleBindingsColumnDefinitions, printRoleBindingList) clusterRoleBindingsColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Role", Type: "string", Priority: 1, Description: rbacv1beta1.ClusterRoleBinding{}.SwaggerDoc()["roleRef"]}, {Name: "Users", Type: "string", Priority: 1, Description: "Users in the roleBinding"}, {Name: "Groups", Type: "string", Priority: 1, Description: "Gruops in the roleBinding"}, {Name: "ServiceAccounts", Type: "string", Priority: 1, Description: "ServiceAccounts in the roleBinding"}, } h.TableHandler(clusterRoleBindingsColumnDefinitions, printClusterRoleBinding) h.TableHandler(clusterRoleBindingsColumnDefinitions, printClusterRoleBindingList) certificateSigningRequestColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Requestor", Type: "string", Description: certificatesv1beta1.CertificateSigningRequestSpec{}.SwaggerDoc()["request"]}, {Name: "Condition", Type: "string", Description: certificatesv1beta1.CertificateSigningRequestStatus{}.SwaggerDoc()["conditions"]}, } h.TableHandler(certificateSigningRequestColumnDefinitions, printCertificateSigningRequest) h.TableHandler(certificateSigningRequestColumnDefinitions, printCertificateSigningRequestList) leaseColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Holder", Type: "string", Description: coordinationv1beta1.LeaseSpec{}.SwaggerDoc()["holderIdentity"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(leaseColumnDefinitions, printLease) h.TableHandler(leaseColumnDefinitions, printLeaseList) storageClassColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Provisioner", Type: "string", Description: storagev1.StorageClass{}.SwaggerDoc()["provisioner"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(storageClassColumnDefinitions, printStorageClass) h.TableHandler(storageClassColumnDefinitions, printStorageClassList) statusColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Status", Type: "string", Description: metav1.Status{}.SwaggerDoc()["status"]}, {Name: "Reason", Type: "string", Description: metav1.Status{}.SwaggerDoc()["reason"]}, {Name: "Message", Type: "string", Description: metav1.Status{}.SwaggerDoc()["Message"]}, } h.TableHandler(statusColumnDefinitions, printStatus) controllerRevisionColumnDefinition := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Controller", Type: "string", Description: "Controller of the object"}, {Name: "Revision", Type: "string", Description: appsv1beta1.ControllerRevision{}.SwaggerDoc()["revision"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(controllerRevisionColumnDefinition, printControllerRevision) h.TableHandler(controllerRevisionColumnDefinition, printControllerRevisionList) resorceQuotaColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, {Name: "Request", Type: "string", Description: "Request represents a minimum amount of cpu/memory that a container may consume."}, {Name: "Limit", Type: "string", Description: "Limits control the maximum amount of cpu/memory that a container may use independent of contention on the node."}, } h.TableHandler(resorceQuotaColumnDefinitions, printResourceQuota) h.TableHandler(resorceQuotaColumnDefinitions, printResourceQuotaList) priorityClassColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Value", Type: "integer", Description: schedulingv1beta1.PriorityClass{}.SwaggerDoc()["value"]}, {Name: "Global-Default", Type: "boolean", Description: schedulingv1beta1.PriorityClass{}.SwaggerDoc()["globalDefault"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.TableHandler(priorityClassColumnDefinitions, printPriorityClass) h.TableHandler(priorityClassColumnDefinitions, printPriorityClassList) AddDefaultHandlers(h) } // AddDefaultHandlers adds handlers that can work with most Kubernetes objects. func AddDefaultHandlers(h printers.PrintHandler) { // types without defined columns objectMetaColumnDefinitions := []metav1beta1.TableColumnDefinition{ {Name: "Name", Type: "string", Format: "name", Description: metav1.ObjectMeta{}.SwaggerDoc()["name"]}, {Name: "Age", Type: "string", Description: metav1.ObjectMeta{}.SwaggerDoc()["creationTimestamp"]}, } h.DefaultTableHandler(objectMetaColumnDefinitions, printObjectMeta) } func printObjectMeta(obj runtime.Object, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { if meta.IsListType(obj) { rows := make([]metav1beta1.TableRow, 0, 16) err := meta.EachListItem(obj, func(obj runtime.Object) error { nestedRows, err := printObjectMeta(obj, options) if err != nil { return err } rows = append(rows, nestedRows...) return nil }) if err != nil { return nil, err } return rows, nil } rows := make([]metav1beta1.TableRow, 0, 1) m, err := meta.Accessor(obj) if err != nil { return nil, err } row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, m.GetName(), translateTimestampSince(m.GetCreationTimestamp())) rows = append(rows, row) return rows, nil } // Pass ports=nil for all ports. func formatEndpoints(endpoints *api.Endpoints, ports sets.String) string { if len(endpoints.Subsets) == 0 { return "<none>" } list := []string{} max := 3 more := false count := 0 for i := range endpoints.Subsets { ss := &endpoints.Subsets[i] if len(ss.Ports) == 0 { // It's possible to have headless services with no ports. for i := range ss.Addresses { if len(list) == max { more = true } if !more { list = append(list, ss.Addresses[i].IP) } count++ } } else { // "Normal" services with ports defined. for i := range ss.Ports { port := &ss.Ports[i] if ports == nil || ports.Has(port.Name) { for i := range ss.Addresses { if len(list) == max { more = true } addr := &ss.Addresses[i] if !more { hostPort := net.JoinHostPort(addr.IP, strconv.Itoa(int(port.Port))) list = append(list, hostPort) } count++ } } } } } ret := strings.Join(list, ",") if more { return fmt.Sprintf("%s + %d more...", ret, count-max) } return ret } // translateTimestampSince returns the elapsed time since timestamp in // human-readable approximation. func translateTimestampSince(timestamp metav1.Time) string { if timestamp.IsZero() { return "<unknown>" } return duration.HumanDuration(time.Since(timestamp.Time)) } // translateTimestampUntil returns the elapsed time until timestamp in // human-readable approximation. func translateTimestampUntil(timestamp metav1.Time) string { if timestamp.IsZero() { return "<unknown>" } return duration.HumanDuration(time.Until(timestamp.Time)) } var ( podSuccessConditions = []metav1beta1.TableRowCondition{{Type: metav1beta1.RowCompleted, Status: metav1beta1.ConditionTrue, Reason: string(api.PodSucceeded), Message: "The pod has completed successfully."}} podFailedConditions = []metav1beta1.TableRowCondition{{Type: metav1beta1.RowCompleted, Status: metav1beta1.ConditionTrue, Reason: string(api.PodFailed), Message: "The pod failed."}} ) func printPodList(podList *api.PodList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(podList.Items)) for i := range podList.Items { r, err := printPod(&podList.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printPod(pod *api.Pod, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { restarts := 0 totalContainers := len(pod.Spec.Containers) readyContainers := 0 reason := string(pod.Status.Phase) if pod.Status.Reason != "" { reason = pod.Status.Reason } row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: pod}, } switch pod.Status.Phase { case api.PodSucceeded: row.Conditions = podSuccessConditions case api.PodFailed: row.Conditions = podFailedConditions } initializing := false for i := range pod.Status.InitContainerStatuses { container := pod.Status.InitContainerStatuses[i] restarts += int(container.RestartCount) switch { case container.State.Terminated != nil && container.State.Terminated.ExitCode == 0: continue case container.State.Terminated != nil: // initialization is failed if len(container.State.Terminated.Reason) == 0 { if container.State.Terminated.Signal != 0 { reason = fmt.Sprintf("Init:Signal:%d", container.State.Terminated.Signal) } else { reason = fmt.Sprintf("Init:ExitCode:%d", container.State.Terminated.ExitCode) } } else { reason = "Init:" + container.State.Terminated.Reason } initializing = true case container.State.Waiting != nil && len(container.State.Waiting.Reason) > 0 && container.State.Waiting.Reason != "PodInitializing": reason = "Init:" + container.State.Waiting.Reason initializing = true default: reason = fmt.Sprintf("Init:%d/%d", i, len(pod.Spec.InitContainers)) initializing = true } break } if !initializing { restarts = 0 hasRunning := false for i := len(pod.Status.ContainerStatuses) - 1; i >= 0; i-- { container := pod.Status.ContainerStatuses[i] restarts += int(container.RestartCount) if container.State.Waiting != nil && container.State.Waiting.Reason != "" { reason = container.State.Waiting.Reason } else if container.State.Terminated != nil && container.State.Terminated.Reason != "" { reason = container.State.Terminated.Reason } else if container.State.Terminated != nil && container.State.Terminated.Reason == "" { if container.State.Terminated.Signal != 0 { reason = fmt.Sprintf("Signal:%d", container.State.Terminated.Signal) } else { reason = fmt.Sprintf("ExitCode:%d", container.State.Terminated.ExitCode) } } else if container.Ready && container.State.Running != nil { hasRunning = true readyContainers++ } } // change pod status back to "Running" if there is at least one container still reporting as "Running" status if reason == "Completed" && hasRunning { reason = "Running" } } if pod.DeletionTimestamp != nil && pod.Status.Reason == node.NodeUnreachablePodReason { reason = "Unknown" } else if pod.DeletionTimestamp != nil { reason = "Terminating" } row.Cells = append(row.Cells, pod.Name, fmt.Sprintf("%d/%d", readyContainers, totalContainers), reason, int64(restarts), translateTimestampSince(pod.CreationTimestamp)) if options.Wide { nodeName := pod.Spec.NodeName nominatedNodeName := pod.Status.NominatedNodeName podIP := pod.Status.PodIP if podIP == "" { podIP = "<none>" } if nodeName == "" { nodeName = "<none>" } if nominatedNodeName == "" { nominatedNodeName = "<none>" } readinessGates := "<none>" if len(pod.Spec.ReadinessGates) > 0 { trueConditions := 0 for _, readinessGate := range pod.Spec.ReadinessGates { conditionType := readinessGate.ConditionType for _, condition := range pod.Status.Conditions { if condition.Type == conditionType { if condition.Status == api.ConditionTrue { trueConditions += 1 } break } } } readinessGates = fmt.Sprintf("%d/%d", trueConditions, len(pod.Spec.ReadinessGates)) } row.Cells = append(row.Cells, podIP, nodeName, nominatedNodeName, readinessGates) } return []metav1beta1.TableRow{row}, nil } func printPodTemplate(obj *api.PodTemplate, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } names, images := layoutContainerCells(obj.Template.Spec.Containers) row.Cells = append(row.Cells, obj.Name, names, images, labels.FormatLabels(obj.Template.Labels)) return []metav1beta1.TableRow{row}, nil } func printPodTemplateList(list *api.PodTemplateList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printPodTemplate(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printPodDisruptionBudget(obj *policy.PodDisruptionBudget, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } var minAvailable string var maxUnavailable string if obj.Spec.MinAvailable != nil { minAvailable = obj.Spec.MinAvailable.String() } else { minAvailable = "N/A" } if obj.Spec.MaxUnavailable != nil { maxUnavailable = obj.Spec.MaxUnavailable.String() } else { maxUnavailable = "N/A" } row.Cells = append(row.Cells, obj.Name, minAvailable, maxUnavailable, int64(obj.Status.PodDisruptionsAllowed), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printPodDisruptionBudgetList(list *policy.PodDisruptionBudgetList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printPodDisruptionBudget(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } // TODO(AdoHe): try to put wide output in a single method func printReplicationController(obj *api.ReplicationController, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } desiredReplicas := obj.Spec.Replicas currentReplicas := obj.Status.Replicas readyReplicas := obj.Status.ReadyReplicas row.Cells = append(row.Cells, obj.Name, int64(desiredReplicas), int64(currentReplicas), int64(readyReplicas), translateTimestampSince(obj.CreationTimestamp)) if options.Wide { names, images := layoutContainerCells(obj.Spec.Template.Spec.Containers) row.Cells = append(row.Cells, names, images, labels.FormatLabels(obj.Spec.Selector)) } return []metav1beta1.TableRow{row}, nil } func printReplicationControllerList(list *api.ReplicationControllerList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printReplicationController(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printReplicaSet(obj *apps.ReplicaSet, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } desiredReplicas := obj.Spec.Replicas currentReplicas := obj.Status.Replicas readyReplicas := obj.Status.ReadyReplicas row.Cells = append(row.Cells, obj.Name, int64(desiredReplicas), int64(currentReplicas), int64(readyReplicas), translateTimestampSince(obj.CreationTimestamp)) if options.Wide { names, images := layoutContainerCells(obj.Spec.Template.Spec.Containers) row.Cells = append(row.Cells, names, images, metav1.FormatLabelSelector(obj.Spec.Selector)) } return []metav1beta1.TableRow{row}, nil } func printReplicaSetList(list *apps.ReplicaSetList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printReplicaSet(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printJob(obj *batch.Job, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } var completions string if obj.Spec.Completions != nil { completions = fmt.Sprintf("%d/%d", obj.Status.Succeeded, *obj.Spec.Completions) } else { parallelism := int32(0) if obj.Spec.Parallelism != nil { parallelism = *obj.Spec.Parallelism } if parallelism > 1 { completions = fmt.Sprintf("%d/1 of %d", obj.Status.Succeeded, parallelism) } else { completions = fmt.Sprintf("%d/1", obj.Status.Succeeded) } } var jobDuration string switch { case obj.Status.StartTime == nil: case obj.Status.CompletionTime == nil: jobDuration = duration.HumanDuration(time.Now().Sub(obj.Status.StartTime.Time)) default: jobDuration = duration.HumanDuration(obj.Status.CompletionTime.Sub(obj.Status.StartTime.Time)) } row.Cells = append(row.Cells, obj.Name, completions, jobDuration, translateTimestampSince(obj.CreationTimestamp)) if options.Wide { names, images := layoutContainerCells(obj.Spec.Template.Spec.Containers) row.Cells = append(row.Cells, names, images, metav1.FormatLabelSelector(obj.Spec.Selector)) } return []metav1beta1.TableRow{row}, nil } func printJobList(list *batch.JobList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printJob(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printCronJob(obj *batch.CronJob, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } lastScheduleTime := "<none>" if obj.Status.LastScheduleTime != nil { lastScheduleTime = translateTimestampSince(*obj.Status.LastScheduleTime) } row.Cells = append(row.Cells, obj.Name, obj.Spec.Schedule, printBoolPtr(obj.Spec.Suspend), int64(len(obj.Status.Active)), lastScheduleTime, translateTimestampSince(obj.CreationTimestamp)) if options.Wide { names, images := layoutContainerCells(obj.Spec.JobTemplate.Spec.Template.Spec.Containers) row.Cells = append(row.Cells, names, images, metav1.FormatLabelSelector(obj.Spec.JobTemplate.Spec.Selector)) } return []metav1beta1.TableRow{row}, nil } func printCronJobList(list *batch.CronJobList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printCronJob(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } // loadBalancerStatusStringer behaves mostly like a string interface and converts the given status to a string. // `wide` indicates whether the returned value is meant for --o=wide output. If not, it's clipped to 16 bytes. func loadBalancerStatusStringer(s api.LoadBalancerStatus, wide bool) string { ingress := s.Ingress result := sets.NewString() for i := range ingress { if ingress[i].IP != "" { result.Insert(ingress[i].IP) } else if ingress[i].Hostname != "" { result.Insert(ingress[i].Hostname) } } r := strings.Join(result.List(), ",") if !wide && len(r) > loadBalancerWidth { r = r[0:(loadBalancerWidth-3)] + "..." } return r } func getServiceExternalIP(svc *api.Service, wide bool) string { switch svc.Spec.Type { case api.ServiceTypeClusterIP: if len(svc.Spec.ExternalIPs) > 0 { return strings.Join(svc.Spec.ExternalIPs, ",") } return "<none>" case api.ServiceTypeNodePort: if len(svc.Spec.ExternalIPs) > 0 { return strings.Join(svc.Spec.ExternalIPs, ",") } return "<none>" case api.ServiceTypeLoadBalancer: lbIps := loadBalancerStatusStringer(svc.Status.LoadBalancer, wide) if len(svc.Spec.ExternalIPs) > 0 { results := []string{} if len(lbIps) > 0 { results = append(results, strings.Split(lbIps, ",")...) } results = append(results, svc.Spec.ExternalIPs...) return strings.Join(results, ",") } if len(lbIps) > 0 { return lbIps } return "<pending>" case api.ServiceTypeExternalName: return svc.Spec.ExternalName } return "<unknown>" } func makePortString(ports []api.ServicePort) string { pieces := make([]string, len(ports)) for ix := range ports { port := &ports[ix] pieces[ix] = fmt.Sprintf("%d/%s", port.Port, port.Protocol) if port.NodePort > 0 { pieces[ix] = fmt.Sprintf("%d:%d/%s", port.Port, port.NodePort, port.Protocol) } } return strings.Join(pieces, ",") } func printService(obj *api.Service, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } svcType := obj.Spec.Type internalIP := obj.Spec.ClusterIP if len(internalIP) == 0 { internalIP = "<none>" } externalIP := getServiceExternalIP(obj, options.Wide) svcPorts := makePortString(obj.Spec.Ports) if len(svcPorts) == 0 { svcPorts = "<none>" } row.Cells = append(row.Cells, obj.Name, string(svcType), internalIP, externalIP, svcPorts, translateTimestampSince(obj.CreationTimestamp)) if options.Wide { row.Cells = append(row.Cells, labels.FormatLabels(obj.Spec.Selector)) } return []metav1beta1.TableRow{row}, nil } func printServiceList(list *api.ServiceList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printService(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } // backendStringer behaves just like a string interface and converts the given backend to a string. func backendStringer(backend *extensions.IngressBackend) string { if backend == nil { return "" } return fmt.Sprintf("%v:%v", backend.ServiceName, backend.ServicePort.String()) } func formatHosts(rules []extensions.IngressRule) string { list := []string{} max := 3 more := false for _, rule := range rules { if len(list) == max { more = true } if !more && len(rule.Host) != 0 { list = append(list, rule.Host) } } if len(list) == 0 { return "*" } ret := strings.Join(list, ",") if more { return fmt.Sprintf("%s + %d more...", ret, len(rules)-max) } return ret } func formatPorts(tls []extensions.IngressTLS) string { if len(tls) != 0 { return "80, 443" } return "80" } func printIngress(obj *extensions.Ingress, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } hosts := formatHosts(obj.Spec.Rules) address := loadBalancerStatusStringer(obj.Status.LoadBalancer, options.Wide) ports := formatPorts(obj.Spec.TLS) createTime := translateTimestampSince(obj.CreationTimestamp) row.Cells = append(row.Cells, obj.Name, hosts, address, ports, createTime) return []metav1beta1.TableRow{row}, nil } func printIngressList(list *extensions.IngressList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printIngress(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printStatefulSet(obj *apps.StatefulSet, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } desiredReplicas := obj.Spec.Replicas readyReplicas := obj.Status.ReadyReplicas createTime := translateTimestampSince(obj.CreationTimestamp) row.Cells = append(row.Cells, obj.Name, fmt.Sprintf("%d/%d", int64(readyReplicas), int64(desiredReplicas)), createTime) if options.Wide { names, images := layoutContainerCells(obj.Spec.Template.Spec.Containers) row.Cells = append(row.Cells, names, images) } return []metav1beta1.TableRow{row}, nil } func printStatefulSetList(list *apps.StatefulSetList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printStatefulSet(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printDaemonSet(obj *apps.DaemonSet, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } desiredScheduled := obj.Status.DesiredNumberScheduled currentScheduled := obj.Status.CurrentNumberScheduled numberReady := obj.Status.NumberReady numberUpdated := obj.Status.UpdatedNumberScheduled numberAvailable := obj.Status.NumberAvailable row.Cells = append(row.Cells, obj.Name, int64(desiredScheduled), int64(currentScheduled), int64(numberReady), int64(numberUpdated), int64(numberAvailable), labels.FormatLabels(obj.Spec.Template.Spec.NodeSelector), translateTimestampSince(obj.CreationTimestamp)) if options.Wide { names, images := layoutContainerCells(obj.Spec.Template.Spec.Containers) row.Cells = append(row.Cells, names, images, metav1.FormatLabelSelector(obj.Spec.Selector)) } return []metav1beta1.TableRow{row}, nil } func printDaemonSetList(list *apps.DaemonSetList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printDaemonSet(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printEndpoints(obj *api.Endpoints, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, formatEndpoints(obj, nil), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printEndpointsList(list *api.EndpointsList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printEndpoints(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printNamespace(obj *api.Namespace, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, string(obj.Status.Phase), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printNamespaceList(list *api.NamespaceList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printNamespace(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printSecret(obj *api.Secret, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, string(obj.Type), int64(len(obj.Data)), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printSecretList(list *api.SecretList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printSecret(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printServiceAccount(obj *api.ServiceAccount, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, int64(len(obj.Secrets)), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printServiceAccountList(list *api.ServiceAccountList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printServiceAccount(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printNode(obj *api.Node, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } conditionMap := make(map[api.NodeConditionType]*api.NodeCondition) NodeAllConditions := []api.NodeConditionType{api.NodeReady} for i := range obj.Status.Conditions { cond := obj.Status.Conditions[i] conditionMap[cond.Type] = &cond } var status []string for _, validCondition := range NodeAllConditions { if condition, ok := conditionMap[validCondition]; ok { if condition.Status == api.ConditionTrue { status = append(status, string(condition.Type)) } else { status = append(status, "Not"+string(condition.Type)) } } } if len(status) == 0 { status = append(status, "Unknown") } if obj.Spec.Unschedulable { status = append(status, "SchedulingDisabled") } roles := strings.Join(findNodeRoles(obj), ",") if len(roles) == 0 { roles = "<none>" } row.Cells = append(row.Cells, obj.Name, strings.Join(status, ","), roles, translateTimestampSince(obj.CreationTimestamp), obj.Status.NodeInfo.KubeletVersion) if options.Wide { osImage, kernelVersion, crVersion := obj.Status.NodeInfo.OSImage, obj.Status.NodeInfo.KernelVersion, obj.Status.NodeInfo.ContainerRuntimeVersion if osImage == "" { osImage = "<unknown>" } if kernelVersion == "" { kernelVersion = "<unknown>" } if crVersion == "" { crVersion = "<unknown>" } row.Cells = append(row.Cells, getNodeInternalIP(obj), getNodeExternalIP(obj), osImage, kernelVersion, crVersion) } return []metav1beta1.TableRow{row}, nil } // Returns first external ip of the node or "<none>" if none is found. func getNodeExternalIP(node *api.Node) string { for _, address := range node.Status.Addresses { if address.Type == api.NodeExternalIP { return address.Address } } return "<none>" } // Returns the internal IP of the node or "<none>" if none is found. func getNodeInternalIP(node *api.Node) string { for _, address := range node.Status.Addresses { if address.Type == api.NodeInternalIP { return address.Address } } return "<none>" } // findNodeRoles returns the roles of a given node. // The roles are determined by looking for: // * a node-role.kubernetes.io/<role>="" label // * a kubernetes.io/role="<role>" label func findNodeRoles(node *api.Node) []string { roles := sets.NewString() for k, v := range node.Labels { switch { case strings.HasPrefix(k, labelNodeRolePrefix): if role := strings.TrimPrefix(k, labelNodeRolePrefix); len(role) > 0 { roles.Insert(role) } case k == nodeLabelRole && v != "": roles.Insert(v) } } return roles.List() } func printNodeList(list *api.NodeList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printNode(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printPersistentVolume(obj *api.PersistentVolume, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } claimRefUID := "" if obj.Spec.ClaimRef != nil { claimRefUID += obj.Spec.ClaimRef.Namespace claimRefUID += "/" claimRefUID += obj.Spec.ClaimRef.Name } modesStr := helper.GetAccessModesAsString(obj.Spec.AccessModes) reclaimPolicyStr := string(obj.Spec.PersistentVolumeReclaimPolicy) aQty := obj.Spec.Capacity[api.ResourceStorage] aSize := aQty.String() phase := obj.Status.Phase if obj.ObjectMeta.DeletionTimestamp != nil { phase = "Terminating" } row.Cells = append(row.Cells, obj.Name, aSize, modesStr, reclaimPolicyStr, string(phase), claimRefUID, helper.GetPersistentVolumeClass(obj), obj.Status.Reason, translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printPersistentVolumeList(list *api.PersistentVolumeList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printPersistentVolume(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printPersistentVolumeClaim(obj *api.PersistentVolumeClaim, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } phase := obj.Status.Phase if obj.ObjectMeta.DeletionTimestamp != nil { phase = "Terminating" } storage := obj.Spec.Resources.Requests[api.ResourceStorage] capacity := "" accessModes := "" if obj.Spec.VolumeName != "" { accessModes = helper.GetAccessModesAsString(obj.Status.AccessModes) storage = obj.Status.Capacity[api.ResourceStorage] capacity = storage.String() } row.Cells = append(row.Cells, obj.Name, string(phase), obj.Spec.VolumeName, capacity, accessModes, helper.GetPersistentVolumeClaimClass(obj), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printPersistentVolumeClaimList(list *api.PersistentVolumeClaimList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printPersistentVolumeClaim(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printEvent(obj *api.Event, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } // While watching event, we should print absolute time. var firstTimestamp, lastTimestamp string if options.AbsoluteTimestamps { firstTimestamp = obj.FirstTimestamp.String() lastTimestamp = obj.LastTimestamp.String() } else { firstTimestamp = translateTimestampSince(obj.FirstTimestamp) lastTimestamp = translateTimestampSince(obj.LastTimestamp) } var target string if len(obj.InvolvedObject.Name) > 0 { target = fmt.Sprintf("%s/%s", strings.ToLower(obj.InvolvedObject.Kind), obj.InvolvedObject.Name) } else { target = strings.ToLower(obj.InvolvedObject.Kind) } if options.Wide { row.Cells = append(row.Cells, lastTimestamp, obj.Type, obj.Reason, target, obj.InvolvedObject.FieldPath, formatEventSource(obj.Source), strings.TrimSpace(obj.Message), firstTimestamp, int64(obj.Count), obj.Name, ) } else { row.Cells = append(row.Cells, lastTimestamp, obj.Type, obj.Reason, target, strings.TrimSpace(obj.Message), ) } return []metav1beta1.TableRow{row}, nil } // Sorts and prints the EventList in a human-friendly format. func printEventList(list *api.EventList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printEvent(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printRoleBinding(obj *rbac.RoleBinding, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, translateTimestampSince(obj.CreationTimestamp)) if options.Wide { roleRef := fmt.Sprintf("%s/%s", obj.RoleRef.Kind, obj.RoleRef.Name) users, groups, sas, _ := rbac.SubjectsStrings(obj.Subjects) row.Cells = append(row.Cells, roleRef, strings.Join(users, ", "), strings.Join(groups, ", "), strings.Join(sas, ", ")) } return []metav1beta1.TableRow{row}, nil } // Prints the RoleBinding in a human-friendly format. func printRoleBindingList(list *rbac.RoleBindingList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printRoleBinding(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printClusterRoleBinding(obj *rbac.ClusterRoleBinding, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, translateTimestampSince(obj.CreationTimestamp)) if options.Wide { roleRef := fmt.Sprintf("%s/%s", obj.RoleRef.Kind, obj.RoleRef.Name) users, groups, sas, _ := rbac.SubjectsStrings(obj.Subjects) row.Cells = append(row.Cells, roleRef, strings.Join(users, ", "), strings.Join(groups, ", "), strings.Join(sas, ", ")) } return []metav1beta1.TableRow{row}, nil } // Prints the ClusterRoleBinding in a human-friendly format. func printClusterRoleBindingList(list *rbac.ClusterRoleBindingList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printClusterRoleBinding(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printCertificateSigningRequest(obj *certificates.CertificateSigningRequest, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } status, err := extractCSRStatus(obj) if err != nil { return nil, err } row.Cells = append(row.Cells, obj.Name, translateTimestampSince(obj.CreationTimestamp), obj.Spec.Username, status) return []metav1beta1.TableRow{row}, nil } func extractCSRStatus(csr *certificates.CertificateSigningRequest) (string, error) { var approved, denied bool for _, c := range csr.Status.Conditions { switch c.Type { case certificates.CertificateApproved: approved = true case certificates.CertificateDenied: denied = true default: return "", fmt.Errorf("unknown csr condition %q", c) } } var status string // must be in order of presidence if denied { status += "Denied" } else if approved { status += "Approved" } else { status += "Pending" } if len(csr.Status.Certificate) > 0 { status += ",Issued" } return status, nil } func printCertificateSigningRequestList(list *certificates.CertificateSigningRequestList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printCertificateSigningRequest(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printComponentStatus(obj *api.ComponentStatus, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } status := "Unknown" message := "" error := "" for _, condition := range obj.Conditions { if condition.Type == api.ComponentHealthy { if condition.Status == api.ConditionTrue { status = "Healthy" } else { status = "Unhealthy" } message = condition.Message error = condition.Error break } } row.Cells = append(row.Cells, obj.Name, status, message, error) return []metav1beta1.TableRow{row}, nil } func printComponentStatusList(list *api.ComponentStatusList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printComponentStatus(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printDeployment(obj *apps.Deployment, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } desiredReplicas := obj.Spec.Replicas updatedReplicas := obj.Status.UpdatedReplicas readyReplicas := obj.Status.ReadyReplicas availableReplicas := obj.Status.AvailableReplicas age := translateTimestampSince(obj.CreationTimestamp) containers := obj.Spec.Template.Spec.Containers selector, err := metav1.LabelSelectorAsSelector(obj.Spec.Selector) if err != nil { // this shouldn't happen if LabelSelector passed validation return nil, err } row.Cells = append(row.Cells, obj.Name, fmt.Sprintf("%d/%d", int64(readyReplicas), int64(desiredReplicas)), int64(updatedReplicas), int64(availableReplicas), age) if options.Wide { containers, images := layoutContainerCells(containers) row.Cells = append(row.Cells, containers, images, selector.String()) } return []metav1beta1.TableRow{row}, nil } func printDeploymentList(list *apps.DeploymentList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printDeployment(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func formatHPAMetrics(specs []autoscaling.MetricSpec, statuses []autoscaling.MetricStatus) string { if len(specs) == 0 { return "<none>" } list := []string{} max := 2 more := false count := 0 for i, spec := range specs { switch spec.Type { case autoscaling.ExternalMetricSourceType: if spec.External.Target.AverageValue != nil { current := "<unknown>" if len(statuses) > i && statuses[i].External != nil && &statuses[i].External.Current.AverageValue != nil { current = statuses[i].External.Current.AverageValue.String() } list = append(list, fmt.Sprintf("%s/%s (avg)", current, spec.External.Target.AverageValue.String())) } else { current := "<unknown>" if len(statuses) > i && statuses[i].External != nil { current = statuses[i].External.Current.Value.String() } list = append(list, fmt.Sprintf("%s/%s", current, spec.External.Target.Value.String())) } case autoscaling.PodsMetricSourceType: current := "<unknown>" if len(statuses) > i && statuses[i].Pods != nil { current = statuses[i].Pods.Current.AverageValue.String() } list = append(list, fmt.Sprintf("%s/%s", current, spec.Pods.Target.AverageValue.String())) case autoscaling.ObjectMetricSourceType: current := "<unknown>" if len(statuses) > i && statuses[i].Object != nil { current = statuses[i].Object.Current.Value.String() } list = append(list, fmt.Sprintf("%s/%s", current, spec.Object.Target.Value.String())) case autoscaling.ResourceMetricSourceType: if spec.Resource.Target.AverageValue != nil { current := "<unknown>" if len(statuses) > i && statuses[i].Resource != nil { current = statuses[i].Resource.Current.AverageValue.String() } list = append(list, fmt.Sprintf("%s/%s", current, spec.Resource.Target.AverageValue.String())) } else { current := "<unknown>" if len(statuses) > i && statuses[i].Resource != nil && statuses[i].Resource.Current.AverageUtilization != nil { current = fmt.Sprintf("%d%%", *statuses[i].Resource.Current.AverageUtilization) } target := "<auto>" if spec.Resource.Target.AverageUtilization != nil { target = fmt.Sprintf("%d%%", *spec.Resource.Target.AverageUtilization) } list = append(list, fmt.Sprintf("%s/%s", current, target)) } default: list = append(list, "<unknown type>") } count++ } if count > max { list = list[:max] more = true } ret := strings.Join(list, ", ") if more { return fmt.Sprintf("%s + %d more...", ret, count-max) } return ret } func printHorizontalPodAutoscaler(obj *autoscaling.HorizontalPodAutoscaler, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } reference := fmt.Sprintf("%s/%s", obj.Spec.ScaleTargetRef.Kind, obj.Spec.ScaleTargetRef.Name) minPods := "<unset>" metrics := formatHPAMetrics(obj.Spec.Metrics, obj.Status.CurrentMetrics) if obj.Spec.MinReplicas != nil { minPods = fmt.Sprintf("%d", *obj.Spec.MinReplicas) } maxPods := obj.Spec.MaxReplicas currentReplicas := obj.Status.CurrentReplicas row.Cells = append(row.Cells, obj.Name, reference, metrics, minPods, int64(maxPods), int64(currentReplicas), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printHorizontalPodAutoscalerList(list *autoscaling.HorizontalPodAutoscalerList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printHorizontalPodAutoscaler(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printConfigMap(obj *api.ConfigMap, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, int64(len(obj.Data)), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printConfigMapList(list *api.ConfigMapList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printConfigMap(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printPodSecurityPolicy(obj *policy.PodSecurityPolicy, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } capabilities := make([]string, len(obj.Spec.AllowedCapabilities)) for i, c := range obj.Spec.AllowedCapabilities { capabilities[i] = string(c) } volumes := make([]string, len(obj.Spec.Volumes)) for i, v := range obj.Spec.Volumes { volumes[i] = string(v) } row.Cells = append(row.Cells, obj.Name, fmt.Sprintf("%v", obj.Spec.Privileged), strings.Join(capabilities, ","), string(obj.Spec.SELinux.Rule), string(obj.Spec.RunAsUser.Rule), string(obj.Spec.FSGroup.Rule), string(obj.Spec.SupplementalGroups.Rule), obj.Spec.ReadOnlyRootFilesystem, strings.Join(volumes, ",")) return []metav1beta1.TableRow{row}, nil } func printPodSecurityPolicyList(list *policy.PodSecurityPolicyList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printPodSecurityPolicy(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printNetworkPolicy(obj *networking.NetworkPolicy, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Name, metav1.FormatLabelSelector(&obj.Spec.PodSelector), translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printNetworkPolicyList(list *networking.NetworkPolicyList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printNetworkPolicy(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printStorageClass(obj *storage.StorageClass, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } name := obj.Name if storageutil.IsDefaultAnnotation(obj.ObjectMeta) { name += " (default)" } provtype := obj.Provisioner row.Cells = append(row.Cells, name, provtype, translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printStorageClassList(list *storage.StorageClassList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printStorageClass(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printLease(obj *coordination.Lease, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } var holderIdentity string if obj.Spec.HolderIdentity != nil { holderIdentity = *obj.Spec.HolderIdentity } row.Cells = append(row.Cells, obj.Name, holderIdentity, translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printLeaseList(list *coordination.LeaseList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printLease(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printStatus(obj *metav1.Status, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } row.Cells = append(row.Cells, obj.Status, obj.Reason, obj.Message) return []metav1beta1.TableRow{row}, nil } // Lay out all the containers on one line if use wide output. func layoutContainerCells(containers []api.Container) (names string, images string) { var namesBuffer bytes.Buffer var imagesBuffer bytes.Buffer for i, container := range containers { namesBuffer.WriteString(container.Name) imagesBuffer.WriteString(container.Image) if i != len(containers)-1 { namesBuffer.WriteString(",") imagesBuffer.WriteString(",") } } return namesBuffer.String(), imagesBuffer.String() } // formatEventSource formats EventSource as a comma separated string excluding Host when empty func formatEventSource(es api.EventSource) string { EventSourceString := []string{es.Component}<|fim▁hole|>} func printControllerRevision(obj *apps.ControllerRevision, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } controllerRef := metav1.GetControllerOf(obj) controllerName := "<none>" if controllerRef != nil { withKind := true gv, err := schema.ParseGroupVersion(controllerRef.APIVersion) if err != nil { return nil, err } gvk := gv.WithKind(controllerRef.Kind) controllerName = printers.FormatResourceName(gvk.GroupKind(), controllerRef.Name, withKind) } revision := obj.Revision age := translateTimestampSince(obj.CreationTimestamp) row.Cells = append(row.Cells, obj.Name, controllerName, revision, age) return []metav1beta1.TableRow{row}, nil } func printControllerRevisionList(list *apps.ControllerRevisionList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printControllerRevision(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printResourceQuota(resourceQuota *api.ResourceQuota, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: resourceQuota}, } resources := make([]api.ResourceName, 0, len(resourceQuota.Status.Hard)) for resource := range resourceQuota.Status.Hard { resources = append(resources, resource) } sort.Sort(SortableResourceNames(resources)) requestColumn := bytes.NewBuffer([]byte{}) limitColumn := bytes.NewBuffer([]byte{}) for i := range resources { w := requestColumn resource := resources[i] usedQuantity := resourceQuota.Status.Used[resource] hardQuantity := resourceQuota.Status.Hard[resource] // use limitColumn writer if a resource name prefixed with "limits" is found if pieces := strings.Split(resource.String(), "."); len(pieces) > 1 && pieces[0] == "limits" { w = limitColumn } fmt.Fprintf(w, "%s: %s/%s, ", resource, usedQuantity.String(), hardQuantity.String()) } age := translateTimestampSince(resourceQuota.CreationTimestamp) row.Cells = append(row.Cells, resourceQuota.Name, age, strings.TrimSuffix(requestColumn.String(), ", "), strings.TrimSuffix(limitColumn.String(), ", ")) return []metav1beta1.TableRow{row}, nil } func printResourceQuotaList(list *api.ResourceQuotaList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printResourceQuota(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printPriorityClass(obj *scheduling.PriorityClass, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { row := metav1beta1.TableRow{ Object: runtime.RawExtension{Object: obj}, } name := obj.Name value := obj.Value globalDefault := obj.GlobalDefault row.Cells = append(row.Cells, name, int64(value), globalDefault, translateTimestampSince(obj.CreationTimestamp)) return []metav1beta1.TableRow{row}, nil } func printPriorityClassList(list *scheduling.PriorityClassList, options printers.PrintOptions) ([]metav1beta1.TableRow, error) { rows := make([]metav1beta1.TableRow, 0, len(list.Items)) for i := range list.Items { r, err := printPriorityClass(&list.Items[i], options) if err != nil { return nil, err } rows = append(rows, r...) } return rows, nil } func printBoolPtr(value *bool) string { if value != nil { return printBool(*value) } return "<unset>" } func printBool(value bool) string { if value { return "True" } return "False" } type SortableResourceNames []api.ResourceName func (list SortableResourceNames) Len() int { return len(list) } func (list SortableResourceNames) Swap(i, j int) { list[i], list[j] = list[j], list[i] } func (list SortableResourceNames) Less(i, j int) bool { return list[i] < list[j] }<|fim▁end|>
if len(es.Host) > 0 { EventSourceString = append(EventSourceString, es.Host) } return strings.Join(EventSourceString, ", ")
<|file_name|>client_activity.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from django.core.management.base import BaseCommand from django.db.models import Count, QuerySet from zerver.models import UserActivity, UserProfile, Realm, \ get_realm, get_user_profile_by_email import datetime class Command(BaseCommand): help = """Report rough client activity globally, for a realm, or for a user Usage examples: ./manage.py client_activity ./manage.py client_activity zulip.com ./manage.py client_activity [email protected]""" def add_arguments(self, parser): # type: (ArgumentParser) -> None parser.add_argument('arg', metavar='<arg>', type=str, nargs='?', default=None, help="realm or user to estimate client activity for") def compute_activity(self, user_activity_objects): # type: (QuerySet) -> None # Report data from the past week. # # This is a rough report of client activity because we inconsistently # register activity from various clients; think of it as telling you # approximately how many people from a group have used a particular # client recently. For example, this might be useful to get a sense of # how popular different versions of a desktop client are. # # Importantly, this does NOT tell you anything about the relative # volumes of requests from clients. threshold = datetime.datetime.now() - datetime.timedelta(days=7) client_counts = user_activity_objects.filter( last_visit__gt=threshold).values("client__name").annotate( count=Count('client__name')) total = 0 counts = [] for client_type in client_counts: count = client_type["count"] client = client_type["client__name"] total += count counts.append((count, client)) counts.sort() for count in counts: print("%25s %15d" % (count[1], count[0])) print("Total:", total)<|fim▁hole|> # type: (*Any, **str) -> None if options['arg'] is None: # Report global activity. self.compute_activity(UserActivity.objects.all()) else: arg = options['arg'] try: # Report activity for a user. user_profile = get_user_profile_by_email(arg) self.compute_activity(UserActivity.objects.filter( user_profile=user_profile)) except UserProfile.DoesNotExist: try: # Report activity for a realm. realm = get_realm(arg) self.compute_activity(UserActivity.objects.filter( user_profile__realm=realm)) except Realm.DoesNotExist: print("Unknown user or domain %s" % (arg,)) exit(1)<|fim▁end|>
def handle(self, *args, **options):
<|file_name|>_guajacum.py<|end_file_name|><|fim▁begin|># -*- Mode:Python -*- ##########################################################################<|fim▁hole|># # # Guacamole Tree printer # # # # Copyright 2014 Janek Bevendorff # # VR Systems Group Bauhaus University Weimar # # # # AVANGO is free software: you can redistribute it and/or modify # # it under the terms of the GNU Lesser General Public License as # # published by the Free Software Foundation, version 3. # # # # AVANGO is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU General Public License for more details. # # # # You should have received a copy of the GNU Lesser General Public # # License along with AVANGO. If not, see <http://www.gnu.org/licenses/>. # # # # USAGE: # # For integrating the tree printer into your project, just import # # this module. It will automatically monkey patch itself # # into your scenegraph and tree node objects. Each of those objects # # will be extended by a print_tree() and print_fields() method. # # For a list of possible parameters read the pyDoc block of # # GuajacumTreePrinter.printTree(). # # # ########################################################################## import re import sys import avango.gua class GuajacumTreePrinter(): """ Recursively print the scene graph or subtrees of certain nodes. This class will be hooked into avango.gua._gua.SceneGraph and avango.gua._gua.Node to provide a printTree() method for SceneGraph and Node objects. """ def __init__(self, graph): self._root = graph def printTree(self, args): """ Print Avango scene graph recursively. @param args: dict of arguments for the tree generation. Possible keys are: - int max_depth: reduce maximum tree depth (-1 means full tree traversal) - str exclude_pattern: regular expression to exclude certain nodes by name - bool print_full_path: print full path for each node (default: False) - bool print_depth: print depth in tree for each node (default: False) - bool shorten_sub_trees: shorten subtrees with more than n child nodes (-1 means full tree traversal) - str group_by_name: regular expression for grouping child nodes together - bool print_memory_addr: show the memory address for each node (default: False) - bool print_field_names: show field names for each node - bool print_field_values: show values of fields for each node (implies print_field_names) @type args: dict @throws Exception: Invalid tree structure """ # check given arguments for i in list(args.keys()): if i not in self._treeOpts: print(self._colorize('error', "Invalid argument '" + i + "'"), file=sys.stderr) return joined_args = dict(list(self._treeOpts.items()) + list(args.items())) _root = self._root if hasattr(self._root, 'Root'): _root = self._root.Root.value elif hasattr(self._root, 'Children'): _root = self._root else: raise Exception( "Invalid tree structure, missing attributes 'Root' or 'Children'") self.__printRecursively(_root, 0, joined_args) def __printRecursively(self, node, cur_depth, args, cur_path=[], is_grouped=False): # return if current node name matches user-specified exclude pattern if None != args['exclude_pattern'] and re.search( args['exclude_pattern'], node.Name.value): return # push current basename to path stack cur_path.append(node.Name.value) obj_name = str(node) # remove memory address from string representation if not needed if not args['print_memory_addr']: obj_name = re.sub(' object at 0x[0-9a-zA-Z]+>$', '>', obj_name) print(self._indent( cur_depth, 'Name: %s%s Obj: %s%s%s' % (self._colorize('important', '"' + node.Name.value + '"'), self._colorize('bold', ' (Group)') if is_grouped else '', self._colorize('important', obj_name, ), ' Path: "' + '/'.join(cur_path).replace('//', '/', 1) + '"' if args['print_full_path'] else '', ' Depth: ' + str(cur_depth) if args['print_depth'] else ''))) if (args['print_field_values'] or args['print_field_names'] ) and node.get_num_fields(): print(self._indent(cur_depth + 1, self._colorize('bold', 'Fields:'))) num_fields = node.get_num_fields() for i in range(num_fields): if args['print_field_values']: print(self._indent(cur_depth + 2, '%s: %s = %s' % (node.get_field_name(i), node.get_field(i).__class__.__name__, str(node.get_field(i).value)))) else: print(self._indent(cur_depth + 2, '%s: %s' % (node.get_field_name(i), node.get_field(i).__class__.__name__))) # if it's a leaf or max_depth is reached, pop current level from path stack and abort recursion if 0 == len(node.Children.value) or cur_depth == args['max_depth']: if len(node.Children.value): print(self._indent(cur_depth + 1, self._colorize( 'bold', 'Node has children...'))) cur_path.pop() return counter = 0 used_name_count = 0 for i in node.Children.value: # group by names if option 'group_by_name' is set name_matches = False if None != args['group_by_name'] and re.search( args['group_by_name'], i.Name.value): name_matches = True used_name_count += 1 if 1 != used_name_count: continue # cut off sub trees if shorten_sub_trees is set if -1 < args['shorten_sub_trees' ] and counter >= args['shorten_sub_trees']: print(self._indent(cur_depth, \ self._colorize('bold', 'Shortened sub tree (' + str(len(node.Children.value) - counter) + ' more...)'))) break self.__printRecursively(i, cur_depth + 1, args, cur_path, used_name_count and name_matches) counter += 1 if 1 < used_name_count: print(self._indent(cur_depth, self._colorize( 'bold', 'Grouped children: ' + str(used_name_count)))) # go up the tree stack cur_path.pop() def _indent(self, depth, text): """ Indent a line to a certain depth. """ if 0 >= depth: return text return '| ' * (depth - 1) + '|___ ' + text def _colorize(self, color, text): """ Wrap text in ANSI escape codes (terminal color codes). Possible values for color: important, error, bold """ color_codes = { 'important': '\033[1;32m', 'error': '\033[1;93m', 'bold': '\033[1m', 'none': '\033[0m', } if color not in color_codes or 'none' == color: return text return color_codes[color] + text + color_codes['none'] # possible tree formatting user options _treeOpts = { 'max_depth': -1, 'exclude_pattern': None, 'print_full_path': False, 'print_depth': False, 'shorten_sub_trees': -1, 'group_by_name': None, 'print_memory_addr': False, 'print_field_names': False, 'print_field_values': False, } def _printTree(self, **args): e314 = GuajacumTreePrinter(self) e314.printTree(args) def _printFields(self): e314 = GuajacumTreePrinter(self) args = {'print_field_values': True, 'max_depth': 0} e314.printTree(args) # now put some antioxidant on our guacamole avango.gua._gua.SceneGraph.print_tree = _printTree avango.gua._gua.Node.print_tree = _printTree avango.gua._gua.SceneGraph.print_fields = _printFields avango.gua._gua.Node.print_fields = _printFields<|fim▁end|>
<|file_name|>t2t_trainer_test.py<|end_file_name|><|fim▁begin|># coding=utf-8 # Copyright 2018 The Tensor2Tensor Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for t2t_trainer.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensor2tensor.bin import t2t_trainer from tensor2tensor.utils import trainer_lib_test import tensorflow as tf FLAGS = tf.flags.FLAGS class TrainerTest(tf.test.TestCase): @classmethod def setUpClass(cls): trainer_lib_test.TrainerLibTest.setUpClass()<|fim▁hole|> def testTrain(self): FLAGS.problem = "tiny_algo" FLAGS.model = "transformer" FLAGS.hparams_set = "transformer_tiny" FLAGS.train_steps = 1 FLAGS.eval_steps = 1 FLAGS.output_dir = tf.test.get_temp_dir() FLAGS.data_dir = tf.test.get_temp_dir() t2t_trainer.main(None) if __name__ == "__main__": tf.test.main()<|fim▁end|>
<|file_name|>multithreaded.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> use std::default::Default; use std::sync::Arc; use std::thread::spawn; use irc::client::prelude::*; fn main() { let config = Config { nickname: Some(format!("pickles")), server: Some(format!("irc.fyrechat.net")), channels: Some(vec![format!("#vana")]), .. Default::default() }; let server = Arc::new(IrcServer::from_config(config).unwrap()); server.identify().unwrap(); let server = server.clone(); let _ = spawn(move || { for msg in server.iter() { print!("{}", msg.unwrap().into_string()); } }).join(); // You might not want to join here for actual multi-threading. }<|fim▁end|>
extern crate irc;
<|file_name|>btrfs_unit_test.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The Serviced Authors. // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // +build unit package btrfs import ( "fmt" "github.com/stretchr/testify/assert" "math" "testing" ) type ParseDFTest struct { label string in []string out []BtrfsDFData outmsg string err error errmsg string } /* type BtrfsDFData struct { DataType string Level string Total uint64 Used uint64 }*/ const ( GiB = uint64(1024 * 1024 * 1024) MiB = uint64(1024 * 1024) KiB = uint64(1024) B = uint64(1) ) var parsedftests = []ParseDFTest{ { label: "output from btrfs fi df (v3.12)", in: []string{ "Data, single: total=9.00GiB, used=8.67GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{ {DataType: "Data", Level: "single", Total: toBytes(9.00, GiB), Used: toBytes(8.67, GiB)}, {DataType: "System", Level: "DUP", Total: toBytes(32.00, MiB), Used: toBytes(16.00, KiB)}, {DataType: "Metadata", Level: "DUP", Total: toBytes(1.00, GiB), Used: toBytes(466.88, MiB)}, }, outmsg: "output did not match expectation", err: nil, errmsg: "error was not nil", }, { label: "empty lines should be tolerated in input", in: []string{ "Data, single: total=9.00GiB, used=8.67GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "", "Metadata, DUP: total=1.00GiB, used=466.88MiB", "", }, out: []BtrfsDFData{ {DataType: "Data", Level: "single", Total: toBytes(9.00, GiB), Used: toBytes(8.67, GiB)}, {DataType: "System", Level: "DUP", Total: toBytes(32.00, MiB), Used: toBytes(16.00, KiB)}, {DataType: "Metadata", Level: "DUP", Total: toBytes(1.00, GiB), Used: toBytes(466.88, MiB)}, }, outmsg: "output did not match expectation", err: nil, errmsg: "error was not nil", }, { label: "output from btrfs fi df (v3.17)", in: []string{ "System, DUP: total=8.00MiB, used=16.00KiB", "System, single: total=4.00MiB, used=0.00B", "Metadata, DUP: total=51.19MiB, used=112.00KiB", "Metadata, single: total=8.00MiB, used=0.00B", "GlobalReserve, single: total=16.00MiB, used=0.00B", }, out: []BtrfsDFData{ {DataType: "System", Level: "DUP", Total: toBytes(8.00, MiB), Used: toBytes(16.00, KiB)}, {DataType: "System", Level: "single", Total: toBytes(4.00, MiB), Used: toBytes(0.00, B)}, {DataType: "Metadata", Level: "DUP", Total: toBytes(51.19, MiB), Used: toBytes(112.00, KiB)}, {DataType: "Metadata", Level: "single", Total: toBytes(8.00, MiB), Used: toBytes(0.00, B)}, {DataType: "GlobalReserve", Level: "single", Total: toBytes(16.00, MiB), Used: toBytes(0.00, B)}, }, outmsg: "output did not match expectation", err: nil, errmsg: "error was not nil", }, { label: "output from btrfs fi df --raw (v3.17)", in: []string{ "System, DUP: total=8388608, used=16384", "System, single: total=4194304, used=0", "Metadata, DUP: total=53673984, used=114688", "Metadata, single: total=8388608, used=0", "GlobalReserve, single: total=16777216, used=0", }, out: []BtrfsDFData{ {DataType: "System", Level: "DUP", Total: uint64(8388608), Used: uint64(16384)}, {DataType: "System", Level: "single", Total: uint64(4194304), Used: uint64(0)}, {DataType: "Metadata", Level: "DUP", Total: uint64(53673984), Used: uint64(114688)}, {DataType: "Metadata", Level: "single", Total: uint64(8388608), Used: uint64(0)}, {DataType: "GlobalReserve", Level: "single", Total: uint64(16777216), Used: uint64(0)}, }, outmsg: "output did not match expectation", err: nil, errmsg: "error was not nil", }, { label: "bad line in output (wrong number of fields)", in: []string{ "Data, single: total=9.00GiB, used=8.67GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", "arglebarglefoo", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("Wrong number of fields (%d, expected 4) in line %q", 1, "arglebarglefoo"), errmsg: "error did not match expectation", }, { label: "bad line in output (bad first field name)", in: []string{ "NotReal, single: total=9.00GiB, used=8.67GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("Unrecognized field %q in line %q", "NotReal", "NotReal, single: total=9.00GiB, used=8.67GiB"), errmsg: "error did not match expectation", }, { label: "bad line in output (wrong number of fields - too many)", in: []string{ "Data, single: total=9.00GiB, used=8.67GiB, extraField=1.23KiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("Wrong number of fields (%d, expected 4) in line %q", 5, "Data, single: total=9.00GiB, used=8.67GiB, extraField=1.23KiB"), errmsg: "error did not match expectation", }, { label: "bad line in output (wrong number of fields - too few)", in: []string{ "Data, single: total=9.00GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("Wrong number of fields (%d, expected 4) in line %q", 3, "Data, single: total=9.00GiB"), errmsg: "error did not match expectation", }, { label: "bad line in output (total field missing)", in: []string{ "Data, single: nottotal=9.00GiB, used=8.67GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("expected total field, not found in line %q", "Data, single: nottotal=9.00GiB, used=8.67GiB"), errmsg: "error did not match expectation", }, { label: "bad line in output (used field missing)", in: []string{ "Data, single: total=9.00GiB, notused=8.67GiB", "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("expected used field, not found in line %q", "Data, single: total=9.00GiB, notused=8.67GiB"), errmsg: "error did not match expectation", }, { label: "too few lines in output", in: []string{ "System, DUP: total=32.00MiB, used=16.00KiB", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("insufficient output: %v", "System, DUP: total=32.00MiB, used=16.00KiB\nMetadata, DUP: total=1.00GiB, used=466.88MiB"), errmsg: "error did not match expectation", }, { label: "too few lines in output - with blank lines", in: []string{ "System, DUP: total=32.00MiB, used=16.00KiB", "", "Metadata, DUP: total=1.00GiB, used=466.88MiB", }, out: []BtrfsDFData{}, outmsg: "output should be empty", err: fmt.Errorf("insufficient output: %v", "System, DUP: total=32.00MiB, used=16.00KiB\nMetadata, DUP: total=1.00GiB, used=466.88MiB"), errmsg: "error did not match expectation", }, } func toBytes(value float64, multiplier uint64) uint64 { return uint64(math.Floor(value * float64(multiplier))) } func TestStub(t *testing.T) { assert.True(t, true, "Test environment set up properly.") } func TestParseDF(t *testing.T) { for _, tc := range parsedftests {<|fim▁hole|> result, err := parseDF(tc.in) assert.Equal(t, err, tc.err, fmt.Sprintf("%s: %s", tc.label, tc.errmsg)) assert.Equal(t, result, tc.out, fmt.Sprintf("%s: %s", tc.label, tc.outmsg)) } }<|fim▁end|>
<|file_name|>compiler_fill_image.cpp<|end_file_name|><|fim▁begin|>#include "utest_helper.hpp" static void compiler_fill_image(void) { const size_t w = 512; const size_t h = 512;<|fim▁hole|> format.image_channel_order = CL_RGBA; format.image_channel_data_type = CL_UNSIGNED_INT8; // Setup kernel and images OCL_CREATE_KERNEL("test_fill_image"); OCL_CREATE_IMAGE2D(buf[0], 0, &format, w, h, 0, NULL); // Run the kernel OCL_SET_ARG(0, sizeof(cl_mem), &buf[0]); OCL_SET_ARG(1, sizeof(color), &color); globals[0] = w; globals[1] = h; locals[0] = 16; locals[1] = 16; OCL_NDRANGE(2); // Check result OCL_MAP_BUFFER(0); for (uint32_t j = 0; j < h; ++j) for (uint32_t i = 0; i < w; i++) OCL_ASSERT(((uint32_t*)buf_data[0])[j * w + i] == 0x78563412); OCL_UNMAP_BUFFER(0); } MAKE_UTEST_FROM_FUNCTION(compiler_fill_image);<|fim▁end|>
uint32_t color = 0x12345678; cl_image_format format;
<|file_name|>index.spec.ts<|end_file_name|><|fim▁begin|>import { _test } from './testlib'; import * as expect from 'expect'; import { join, resolve, sep as pathSep } from 'path'; import { tmpdir } from 'os'; import semver = require('semver'); import { BIN_PATH_JS, nodeSupportsEsmHooks, ts, tsSupportsShowConfig, tsSupportsTsconfigInheritanceViaNodePackages, } from './helpers'; import { lstatSync, mkdtempSync } from 'fs'; import { npath } from '@yarnpkg/fslib'; import type _createRequire from 'create-require'; import { pathToFileURL } from 'url'; import { createExec } from './exec-helpers'; import { BIN_CWD_PATH, BIN_PATH, BIN_SCRIPT_PATH, DIST_DIR, ROOT_DIR, TEST_DIR, testsDirRequire, tsNodeTypes, xfs, contextTsNodeUnderTest, CMD_TS_NODE_WITH_PROJECT_FLAG, CMD_TS_NODE_WITHOUT_PROJECT_FLAG, CMD_ESM_LOADER_WITHOUT_PROJECT, EXPERIMENTAL_MODULES_FLAG, } from './helpers'; const exec = createExec({ cwd: TEST_DIR, }); const test = _test.context(contextTsNodeUnderTest); test.suite('ts-node', (test) => { test('should export the correct version', (t) => { expect(t.context.tsNodeUnderTest.VERSION).toBe( require('../../package.json').version ); }); test('should export all CJS entrypoints', () => { // Ensure our package.json "exports" declaration allows `require()`ing all our entrypoints // https://github.com/TypeStrong/ts-node/pull/1026 testsDirRequire.resolve('ts-node'); // only reliably way to ask node for the root path of a dependency is Path.resolve(require.resolve('ts-node/package'), '..') testsDirRequire.resolve('ts-node/package'); testsDirRequire.resolve('ts-node/package.json'); // All bin entrypoints for people who need to augment our CLI: `node -r otherstuff ./node_modules/ts-node/dist/bin` testsDirRequire.resolve('ts-node/dist/bin'); testsDirRequire.resolve('ts-node/dist/bin.js'); testsDirRequire.resolve('ts-node/dist/bin-transpile'); testsDirRequire.resolve('ts-node/dist/bin-transpile.js'); testsDirRequire.resolve('ts-node/dist/bin-script'); testsDirRequire.resolve('ts-node/dist/bin-script.js'); testsDirRequire.resolve('ts-node/dist/bin-cwd'); testsDirRequire.resolve('ts-node/dist/bin-cwd.js'); // Must be `require()`able obviously testsDirRequire.resolve('ts-node/register'); testsDirRequire.resolve('ts-node/register/files'); testsDirRequire.resolve('ts-node/register/transpile-only'); testsDirRequire.resolve('ts-node/register/type-check'); // `node --loader ts-node/esm` testsDirRequire.resolve('ts-node/esm'); testsDirRequire.resolve('ts-node/esm.mjs'); testsDirRequire.resolve('ts-node/esm/transpile-only'); testsDirRequire.resolve('ts-node/esm/transpile-only.mjs'); testsDirRequire.resolve('ts-node/transpilers/swc'); testsDirRequire.resolve('ts-node/transpilers/swc-experimental'); testsDirRequire.resolve('ts-node/node10/tsconfig.json'); testsDirRequire.resolve('ts-node/node12/tsconfig.json'); testsDirRequire.resolve('ts-node/node14/tsconfig.json'); testsDirRequire.resolve('ts-node/node16/tsconfig.json'); }); test('should not load typescript outside of loadConfig', async () => { const { err, stdout } = await exec( `node -e "require('ts-node'); console.dir(Object.keys(require.cache).filter(k => k.includes('node_modules/typescript')).length)"` ); expect(err).toBe(null); expect(stdout).toBe('0\n'); }); test.suite('cli', (test) => { test('should execute cli', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} hello-world` ); expect(err).toBe(null); expect(stdout).toBe('Hello, world!\n'); }); test('shows usage via --help', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} --help` ); expect(err).toBe(null); expect(stdout).toMatch(/Usage: ts-node /); }); test('shows version via -v', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} -v` ); expect(err).toBe(null); expect(stdout.trim()).toBe( 'v' + testsDirRequire('ts-node/package').version ); }); test('shows version of compiler via -vv', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} -vv` ); expect(err).toBe(null); expect(stdout.trim()).toBe( `ts-node v${testsDirRequire('ts-node/package').version}\n` + `node ${process.version}\n` + `compiler v${testsDirRequire('typescript/package').version}` ); }); test('should register via cli', async () => { const { err, stdout } = await exec( `node -r ts-node/register hello-world.ts`, { cwd: TEST_DIR, } ); expect(err).toBe(null); expect(stdout).toBe('Hello, world!\n'); }); test('should execute cli with absolute path', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} "${join(TEST_DIR, 'hello-world')}"` ); expect(err).toBe(null); expect(stdout).toBe('Hello, world!\n'); }); test('should print scripts', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -pe "import { example } from './complex/index';example()"` ); expect(err).toBe(null); expect(stdout).toBe('example\n'); }); test('should provide registered information globally', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} env` ); expect(err).toBe(null); expect(stdout).toBe('object\n'); }); test('should provide registered information on register', async () => { const { err, stdout } = await exec(`node -r ts-node/register env.ts`, { cwd: TEST_DIR, }); expect(err).toBe(null); expect(stdout).toBe('object\n'); }); test('should allow js', async () => { const { err, stdout } = await exec( [ CMD_TS_NODE_WITH_PROJECT_FLAG, '-O "{\\"allowJs\\":true}"', '-pe "import { main } from \'./allow-js/run\';main()"', ].join(' ') ); expect(err).toBe(null); expect(stdout).toBe('hello world\n'); }); test('should include jsx when `allow-js` true', async () => { const { err, stdout } = await exec( [ CMD_TS_NODE_WITH_PROJECT_FLAG, '-O "{\\"allowJs\\":true}"', '-pe "import { Foo2 } from \'./allow-js/with-jsx\'; Foo2.sayHi()"', ].join(' ') ); expect(err).toBe(null); expect(stdout).toBe('hello world\n'); }); test('should eval code', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -e "import * as m from './module';console.log(m.example('test'))"` ); expect(err).toBe(null); expect(stdout).toBe('TEST\n'); }); test('should import empty files', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -e "import './empty'"` ); expect(err).toBe(null); expect(stdout).toBe(''); }); test('should throw errors', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -e "import * as m from './module';console.log(m.example(123))"` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch( new RegExp( "TS2345: Argument of type '(?:number|123)' " + "is not assignable to parameter of type 'string'\\." ) ); }); test('should be able to ignore diagnostic', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --ignore-diagnostics 2345 -e "import * as m from './module';console.log(m.example(123))"` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch( /TypeError: (?:(?:undefined|foo\.toUpperCase) is not a function|.*has no method \'toUpperCase\')/ ); }); test('should work with source maps', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} "throw error"` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch( [ `${join(TEST_DIR, 'throw error.ts')}:100`, " bar() { throw new Error('this is a demo'); }", ' ^', 'Error: this is a demo', ].join('\n') ); }); test('should work with source maps in --transpile-only mode', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --transpile-only "throw error"` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch( [ `${join(TEST_DIR, 'throw error.ts')}:100`, " bar() { throw new Error('this is a demo'); }", ' ^', 'Error: this is a demo', ].join('\n') ); }); test('eval should work with source maps', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -pe "import './throw error'"` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch( [ `${join(TEST_DIR, 'throw error.ts')}:100`, " bar() { throw new Error('this is a demo'); }", ' ^', ].join('\n') ); }); test('should support transpile only mode', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --transpile-only -pe "x"` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch('ReferenceError: x is not defined'); }); test('should throw error even in transpileOnly mode', async () => { const { err } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --transpile-only -pe "console."` ); if (err === null) { throw new Error('Command was expected to fail, but it succeeded.'); } expect(err.message).toMatch('error TS1003: Identifier expected'); }); for (const flavor of [ '--transpiler ts-node/transpilers/swc transpile-only-swc', '--transpiler ts-node/transpilers/swc-experimental transpile-only-swc', '--swc transpile-only-swc', 'transpile-only-swc-via-tsconfig', 'transpile-only-swc-shorthand-via-tsconfig', ]) { test(`should support swc and third-party transpilers: ${flavor}`, async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} ${flavor}`, { env: { ...process.env, NODE_OPTIONS: `${ process.env.NODE_OPTIONS || '' } --require ${require.resolve('../../tests/spy-swc-transpiler')}`, }, } ); expect(err).toBe(null); expect(stdout).toMatch( 'Hello World! swc transpiler invocation count: 1\n' ); }); } test.suite('should support `traceResolution` compiler option', (test) => { test('prints traces before running code when enabled', async () => { const { err, stdout } = await exec( `${BIN_PATH} --compiler-options="{ \\"traceResolution\\": true }" -e "console.log('ok')"` ); expect(err).toBeNull(); expect(stdout).toContain('======== Resolving module'); expect(stdout.endsWith('ok\n')).toBe(true); }); test('does NOT print traces when not enabled', async () => { const { err, stdout } = await exec( `${BIN_PATH} -e "console.log('ok')"` ); expect(err).toBeNull(); expect(stdout).not.toContain('======== Resolving module'); expect(stdout.endsWith('ok\n')).toBe(true); }); }); if (nodeSupportsEsmHooks) { test('swc transpiler supports native ESM emit', async () => { const { err, stdout } = await exec( `${CMD_ESM_LOADER_WITHOUT_PROJECT} ./index.ts`, { cwd: resolve(TEST_DIR, 'transpile-only-swc-native-esm'), } ); expect(err).toBe(null); expect(stdout).toMatch('Hello file://'); }); } test('should pipe into `ts-node` and evaluate', async () => { const execPromise = exec(CMD_TS_NODE_WITH_PROJECT_FLAG); execPromise.child.stdin!.end("console.log('hello')"); const { err, stdout } = await execPromise; expect(err).toBe(null); expect(stdout).toBe('hello\n'); }); test('should pipe into `ts-node`', async () => { const execPromise = exec(`${CMD_TS_NODE_WITH_PROJECT_FLAG} -p`); execPromise.child.stdin!.end('true'); const { err, stdout } = await execPromise; expect(err).toBe(null); expect(stdout).toBe('true\n'); }); test('should pipe into an eval script', async () => { const execPromise = exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --transpile-only -pe "process.stdin.isTTY"` ); execPromise.child.stdin!.end('true'); const { err, stdout } = await execPromise; expect(err).toBe(null); expect(stdout).toBe('undefined\n'); }); test('should support require flags', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -r ./hello-world -pe "console.log('success')"` ); expect(err).toBe(null); expect(stdout).toBe('Hello, world!\nsuccess\nundefined\n'); }); test('should support require from node modules', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} -r typescript -e "console.log('success')"` ); expect(err).toBe(null); expect(stdout).toBe('success\n'); }); test('should use source maps with react tsx', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} "throw error react tsx.tsx"` ); expect(err).not.toBe(null); expect(err!.message).toMatch( [ `${join(TEST_DIR, './throw error react tsx.tsx')}:100`, " bar() { throw new Error('this is a demo'); }", ' ^', 'Error: this is a demo', ].join('\n') ); }); test('should use source maps with react tsx in --transpile-only mode', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --transpile-only "throw error react tsx.tsx"` ); expect(err).not.toBe(null); expect(err!.message).toMatch( [ `${join(TEST_DIR, './throw error react tsx.tsx')}:100`, " bar() { throw new Error('this is a demo'); }", ' ^', 'Error: this is a demo', ].join('\n') ); }); test('should allow custom typings', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} custom-types` ); // This error comes from *node*, meaning TypeScript respected the custom types (good) but *node* could not find the non-existent module (expected) expect(err?.message).toMatch( /Error: Cannot find module 'does-not-exist'/ ); }); test('should preserve `ts-node` context with child process', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} child-process` ); expect(err).toBe(null); expect(stdout).toBe('Hello, world!\n'); }); test('should import js before ts by default', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} import-order/compiled` ); expect(err).toBe(null); expect(stdout).toBe('Hello, JavaScript!\n'); }); const preferTsExtsEntrypoint = semver.gte(process.version, '12.0.0') ? 'import-order/compiled' : 'import-order/require-compiled'; test('should import ts before js when --prefer-ts-exts flag is present', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --prefer-ts-exts ${preferTsExtsEntrypoint}` ); expect(err).toBe(null); expect(stdout).toBe('Hello, TypeScript!\n'); }); test('should import ts before js when TS_NODE_PREFER_TS_EXTS env is present', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} ${preferTsExtsEntrypoint}`, { env: { ...process.env, TS_NODE_PREFER_TS_EXTS: 'true' }, } ); expect(err).toBe(null); expect(stdout).toBe('Hello, TypeScript!\n'); }); test('should ignore .d.ts files', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} import-order/importer` ); expect(err).toBe(null); expect(stdout).toBe('Hello, World!\n'); }); test.suite('issue #884', (test) => { // TODO disabled because it consistently fails on Windows on TS 2.7 test.skipIf( process.platform === 'win32' && semver.satisfies(ts.version, '2.7') ); test('should compile', async (t) => { const { err, stdout } = await exec( `"${BIN_PATH}" --project issue-884/tsconfig.json issue-884` ); expect(err).toBe(null); expect(stdout).toBe(''); }); }); test.suite('issue #986', (test) => { test('should not compile', async () => { const { err, stdout, stderr } = await exec( `"${BIN_PATH}" --project issue-986/tsconfig.json issue-986` ); expect(err).not.toBe(null); expect(stderr).toMatch("Cannot find name 'TEST'"); // TypeScript error. expect(stdout).toBe(''); }); test('should compile with `--files`', async () => { const { err, stdout, stderr } = await exec( `"${BIN_PATH}" --files --project issue-986/tsconfig.json issue-986` ); expect(err).not.toBe(null); expect(stderr).toMatch('ReferenceError: TEST is not defined'); // Runtime error. expect(stdout).toBe(''); }); }); if (semver.gte(ts.version, '2.7.0')) { test('should locate tsconfig relative to entry-point by default', async () => { const { err, stdout } = await exec(`${BIN_PATH} ../a/index`, { cwd: join(TEST_DIR, 'cwd-and-script-mode/b'), }); expect(err).toBe(null); expect(stdout).toMatch(/plugin-a/); }); test('should locate tsconfig relative to entry-point via ts-node-script', async () => { const { err, stdout } = await exec(`${BIN_SCRIPT_PATH} ../a/index`, { cwd: join(TEST_DIR, 'cwd-and-script-mode/b'), }); expect(err).toBe(null); expect(stdout).toMatch(/plugin-a/); }); test('should locate tsconfig relative to entry-point with --script-mode', async () => { const { err, stdout } = await exec( `${BIN_PATH} --script-mode ../a/index`, { cwd: join(TEST_DIR, 'cwd-and-script-mode/b'), } ); expect(err).toBe(null); expect(stdout).toMatch(/plugin-a/); }); test('should locate tsconfig relative to cwd via ts-node-cwd', async () => { const { err, stdout } = await exec(`${BIN_CWD_PATH} ../a/index`, { cwd: join(TEST_DIR, 'cwd-and-script-mode/b'), }); expect(err).toBe(null); expect(stdout).toMatch(/plugin-b/); }); test('should locate tsconfig relative to cwd in --cwd-mode', async () => { const { err, stdout } = await exec( `${BIN_PATH} --cwd-mode ../a/index`, { cwd: join(TEST_DIR, 'cwd-and-script-mode/b') } ); expect(err).toBe(null); expect(stdout).toMatch(/plugin-b/); }); test('should locate tsconfig relative to realpath, not symlink, when entrypoint is a symlink', async (t) => { if ( lstatSync( join(TEST_DIR, 'main-realpath/symlink/symlink.tsx') ).isSymbolicLink() ) { const { err, stdout } = await exec( `${BIN_PATH} main-realpath/symlink/symlink.tsx` ); expect(err).toBe(null); expect(stdout).toBe(''); } else { t.log('Skipping'); return; } }); } test.suite('should read ts-node options from tsconfig.json', (test) => { const BIN_EXEC = `"${BIN_PATH}" --project tsconfig-options/tsconfig.json`; test('should override compiler options from env', async () => { const { err, stdout } = await exec( `${BIN_EXEC} tsconfig-options/log-options1.js`, { env: { ...process.env, TS_NODE_COMPILER_OPTIONS: '{"typeRoots": ["env-typeroots"]}', }, } ); expect(err).toBe(null); const { config } = JSON.parse(stdout); expect(config.options.typeRoots).toEqual([ join(TEST_DIR, './tsconfig-options/env-typeroots').replace( /\\/g, '/' ), ]); }); test('should use options from `tsconfig.json`', async () => { const { err, stdout } = await exec( `${BIN_EXEC} tsconfig-options/log-options1.js` ); expect(err).toBe(null);<|fim▁hole|> '/' ), ]); expect(config.options.types).toEqual(['tsconfig-tsnode-types']); expect(options.pretty).toBe(undefined); expect(options.skipIgnore).toBe(false); expect(options.transpileOnly).toBe(true); expect(options.require).toEqual([ join(TEST_DIR, './tsconfig-options/required1.js'), ]); }); test('should ignore empty strings in the array options', async () => { const { err, stdout } = await exec( `${BIN_EXEC} tsconfig-options/log-options1.js`, { env: { ...process.env, TS_NODE_IGNORE: '', }, } ); expect(err).toBe(null); const { options } = JSON.parse(stdout); expect(options.ignore).toEqual([]); }); test('should have flags override / merge with `tsconfig.json`', async () => { const { err, stdout } = await exec( `${BIN_EXEC} --skip-ignore --compiler-options "{\\"types\\":[\\"flags-types\\"]}" --require ./tsconfig-options/required2.js tsconfig-options/log-options2.js` ); expect(err).toBe(null); const { options, config } = JSON.parse(stdout); expect(config.options.typeRoots).toEqual([ join(TEST_DIR, './tsconfig-options/tsconfig-typeroots').replace( /\\/g, '/' ), ]); expect(config.options.types).toEqual(['flags-types']); expect(options.pretty).toBe(undefined); expect(options.skipIgnore).toBe(true); expect(options.transpileOnly).toBe(true); expect(options.require).toEqual([ join(TEST_DIR, './tsconfig-options/required1.js'), './tsconfig-options/required2.js', ]); }); test('should have `tsconfig.json` override environment', async () => { const { err, stdout } = await exec( `${BIN_EXEC} tsconfig-options/log-options1.js`, { env: { ...process.env, TS_NODE_PRETTY: 'true', TS_NODE_SKIP_IGNORE: 'true', }, } ); expect(err).toBe(null); const { options, config } = JSON.parse(stdout); expect(config.options.typeRoots).toEqual([ join(TEST_DIR, './tsconfig-options/tsconfig-typeroots').replace( /\\/g, '/' ), ]); expect(config.options.types).toEqual(['tsconfig-tsnode-types']); expect(options.pretty).toBe(true); expect(options.skipIgnore).toBe(false); expect(options.transpileOnly).toBe(true); expect(options.require).toEqual([ join(TEST_DIR, './tsconfig-options/required1.js'), ]); }); if (tsSupportsTsconfigInheritanceViaNodePackages) { test('should pull ts-node options from extended `tsconfig.json`', async () => { const { err, stdout } = await exec( `${BIN_PATH} --show-config --project ./tsconfig-extends/tsconfig.json` ); expect(err).toBe(null); const config = JSON.parse(stdout); expect(config['ts-node'].require).toEqual([ resolve(TEST_DIR, 'tsconfig-extends/other/require-hook.js'), ]); expect(config['ts-node'].scopeDir).toBe( resolve(TEST_DIR, 'tsconfig-extends/other/scopedir') ); expect(config['ts-node'].preferTsExts).toBe(true); }); } }); test.suite( 'should use implicit @tsconfig/bases config when one is not loaded from disk', (_test) => { const test = _test.context(async (t) => ({ tempDir: mkdtempSync(join(tmpdir(), 'ts-node-spec')), })); if ( semver.gte(ts.version, '3.5.0') && semver.gte(process.versions.node, '14.0.0') ) { const libAndTarget = semver.gte(process.versions.node, '16.0.0') ? 'es2021' : 'es2020'; test('implicitly uses @tsconfig/node14 or @tsconfig/node16 compilerOptions when both TS and node versions support it', async (t) => { // node14 and node16 configs are identical, hence the "or" const { context: { tempDir }, } = t; const { err: err1, stdout: stdout1, stderr: stderr1, } = await exec(`${BIN_PATH} --showConfig`, { cwd: tempDir }); expect(err1).toBe(null); t.like(JSON.parse(stdout1), { compilerOptions: { target: libAndTarget, lib: [libAndTarget], }, }); const { err: err2, stdout: stdout2, stderr: stderr2, } = await exec(`${BIN_PATH} -pe 10n`, { cwd: tempDir }); expect(err2).toBe(null); expect(stdout2).toBe('10n\n'); }); } else { test('implicitly uses @tsconfig/* lower than node14 (node12) when either TS or node versions do not support @tsconfig/node14', async ({ context: { tempDir }, }) => { const { err, stdout, stderr } = await exec(`${BIN_PATH} -pe 10n`, { cwd: tempDir, }); expect(err).not.toBe(null); expect(stderr).toMatch( /BigInt literals are not available when targeting lower than|error TS2304: Cannot find name 'n'/ ); }); } test('implicitly loads @types/node even when not installed within local directory', async ({ context: { tempDir }, }) => { const { err, stdout, stderr } = await exec( `${BIN_PATH} -pe process.env.foo`, { cwd: tempDir, env: { ...process.env, foo: 'hello world' }, } ); expect(err).toBe(null); expect(stdout).toBe('hello world\n'); }); test('implicitly loads local @types/node', async ({ context: { tempDir }, }) => { await xfs.copyPromise( npath.toPortablePath(tempDir), npath.toPortablePath(join(TEST_DIR, 'local-types-node')) ); const { err, stdout, stderr } = await exec( `${BIN_PATH} -pe process.env.foo`, { cwd: tempDir, env: { ...process.env, foo: 'hello world' }, } ); expect(err).not.toBe(null); expect(stderr).toMatch( "Property 'env' does not exist on type 'LocalNodeTypes_Process'" ); }); } ); test.suite( 'should bundle @tsconfig/bases to be used in your own tsconfigs', (test) => { test.runIf(tsSupportsTsconfigInheritanceViaNodePackages); const macro = test.macro((nodeVersion: string) => async (t) => { const config = require(`@tsconfig/${nodeVersion}/tsconfig.json`); const { err, stdout, stderr } = await exec( `${BIN_PATH} --showConfig -e 10n`, { cwd: join(TEST_DIR, 'tsconfig-bases', nodeVersion), } ); expect(err).toBe(null); t.like(JSON.parse(stdout), { compilerOptions: { target: config.compilerOptions.target, lib: config.compilerOptions.lib, }, }); }); test(`ts-node/node10/tsconfig.json`, macro, 'node10'); test(`ts-node/node12/tsconfig.json`, macro, 'node12'); test(`ts-node/node14/tsconfig.json`, macro, 'node14'); test(`ts-node/node16/tsconfig.json`, macro, 'node16'); } ); test.suite('compiler host', (test) => { test('should execute cli', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --compiler-host hello-world` ); expect(err).toBe(null); expect(stdout).toBe('Hello, world!\n'); }); }); test('should transpile files inside a node_modules directory when not ignored', async () => { const { err, stdout, stderr } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} from-node-modules/from-node-modules` ); if (err) throw new Error( `Unexpected error: ${err}\nstdout:\n${stdout}\nstderr:\n${stderr}` ); expect(JSON.parse(stdout)).toEqual({ external: { tsmri: { name: 'typescript-module-required-internally' }, jsmri: { name: 'javascript-module-required-internally' }, tsmii: { name: 'typescript-module-imported-internally' }, jsmii: { name: 'javascript-module-imported-internally' }, }, tsmie: { name: 'typescript-module-imported-externally' }, jsmie: { name: 'javascript-module-imported-externally' }, tsmre: { name: 'typescript-module-required-externally' }, jsmre: { name: 'javascript-module-required-externally' }, }); }); test.suite('should respect maxNodeModulesJsDepth', (test) => { test('for unscoped modules', async () => { const { err, stdout, stderr } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} maxnodemodulesjsdepth` ); expect(err).not.toBe(null); expect(stderr.replace(/\r\n/g, '\n')).toMatch( 'TSError: ⨯ Unable to compile TypeScript:\n' + "maxnodemodulesjsdepth/other.ts(4,7): error TS2322: Type 'string' is not assignable to type 'boolean'.\n" + '\n' ); }); test('for @scoped modules', async () => { const { err, stdout, stderr } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} maxnodemodulesjsdepth-scoped` ); expect(err).not.toBe(null); expect(stderr.replace(/\r\n/g, '\n')).toMatch( 'TSError: ⨯ Unable to compile TypeScript:\n' + "maxnodemodulesjsdepth-scoped/other.ts(7,7): error TS2322: Type 'string' is not assignable to type 'boolean'.\n" + '\n' ); }); }); if (tsSupportsShowConfig) { test('--showConfig should log resolved configuration', async (t) => { function native(path: string) { return path.replace(/\/|\\/g, pathSep); } function posix(path: string) { return path.replace(/\/|\\/g, '/'); } const { err, stdout } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --showConfig` ); expect(err).toBe(null); t.is( stdout, JSON.stringify( { 'ts-node': { cwd: native(`${ROOT_DIR}/tests`), projectSearchDir: native(`${ROOT_DIR}/tests`), project: native(`${ROOT_DIR}/tests/tsconfig.json`), }, compilerOptions: { target: 'es6', jsx: 'react', noEmit: false, strict: true, typeRoots: [ posix(`${ROOT_DIR}/tests/typings`), posix(`${ROOT_DIR}/node_modules/@types`), ], sourceMap: true, inlineSourceMap: false, inlineSources: true, declaration: false, outDir: './.ts-node', module: 'commonjs', }, }, null, 2 ) + '\n' ); }); } else { test('--show-config should log error message when used with old typescript versions', async (t) => { const { err, stderr } = await exec( `${CMD_TS_NODE_WITH_PROJECT_FLAG} --showConfig` ); expect(err).not.toBe(null); expect(stderr).toMatch('Error: --showConfig requires'); }); } test('should support compiler scope specified via tsconfig.json', async (t) => { const { err, stderr, stdout } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} --project ./scope/c/config/tsconfig.json ./scope/c/index.js` ); expect(err).toBe(null); expect(stdout).toBe(`value\nFailures: 0\n`); }); }); test.suite('create', (_test) => { const test = _test.context(async (t) => { return { service: t.context.tsNodeUnderTest.create({ compilerOptions: { target: 'es5' }, skipProject: true, }), }; }); test('should create generic compiler instances', ({ context: { service }, }) => { const output = service.compile('const x = 10', 'test.ts'); expect(output).toMatch('var x = 10;'); }); test.suite('should get type information', (test) => { test('given position of identifier', ({ context: { service } }) => { expect( service.getTypeInfo('/**jsdoc here*/const x = 10', 'test.ts', 21) ).toEqual({ comment: 'jsdoc here', name: 'const x: 10', }); }); test('given position that does not point to an identifier', ({ context: { service }, }) => { expect( service.getTypeInfo('/**jsdoc here*/const x = 10', 'test.ts', 0) ).toEqual({ comment: '', name: '', }); }); }); }); test.suite('issue #1098', (test) => { function testIgnored( ignored: tsNodeTypes.Service['ignored'], allowed: string[], disallowed: string[] ) { for (const ext of allowed) { // should accept ${ext} files expect(ignored(join(DIST_DIR, `index${ext}`))).toBe(false); } for (const ext of disallowed) { // should ignore ${ext} files expect(ignored(join(DIST_DIR, `index${ext}`))).toBe(true); } } test('correctly filters file extensions from the compiler when allowJs=false and jsx=false', (t) => { const { ignored } = t.context.tsNodeUnderTest.create({ compilerOptions: {}, skipProject: true, }); testIgnored( ignored, ['.ts', '.d.ts'], ['.js', '.tsx', '.jsx', '.mjs', '.cjs', '.xyz', ''] ); }); test('correctly filters file extensions from the compiler when allowJs=true and jsx=false', (t) => { const { ignored } = t.context.tsNodeUnderTest.create({ compilerOptions: { allowJs: true }, skipProject: true, }); testIgnored( ignored, ['.ts', '.js', '.d.ts'], ['.tsx', '.jsx', '.mjs', '.cjs', '.xyz', ''] ); }); test('correctly filters file extensions from the compiler when allowJs=false and jsx=true', (t) => { const { ignored } = t.context.tsNodeUnderTest.create({ compilerOptions: { allowJs: false, jsx: 'preserve' }, skipProject: true, }); testIgnored( ignored, ['.ts', '.tsx', '.d.ts'], ['.js', '.jsx', '.mjs', '.cjs', '.xyz', ''] ); }); test('correctly filters file extensions from the compiler when allowJs=true and jsx=true', (t) => { const { ignored } = t.context.tsNodeUnderTest.create({ compilerOptions: { allowJs: true, jsx: 'preserve' }, skipProject: true, }); testIgnored( ignored, ['.ts', '.tsx', '.js', '.jsx', '.d.ts'], ['.mjs', '.cjs', '.xyz', ''] ); }); }); }); test('Falls back to transpileOnly when ts compiler returns emitSkipped', async () => { const { err, stdout } = await exec( `${CMD_TS_NODE_WITHOUT_PROJECT_FLAG} --project tsconfig.json ./outside-rootDir/foo.js`, { cwd: join(TEST_DIR, 'emit-skipped-fallback'), } ); expect(err).toBe(null); expect(stdout).toBe('foo\n'); }); test.suite('node environment', (test) => { test.suite('Sets argv and execArgv correctly in forked processes', (test) => { forkTest(`node --no-warnings ${BIN_PATH_JS}`, BIN_PATH_JS, '--no-warnings'); forkTest( `${BIN_PATH}`, process.platform === 'win32' ? BIN_PATH_JS : BIN_PATH ); function forkTest( command: string, expectParentArgv0: string, nodeFlag?: string ) { test(command, async (t) => { const { err, stderr, stdout } = await exec( `${command} --skipIgnore ./recursive-fork/index.ts argv2` ); expect(err).toBeNull(); expect(stderr).toBe(''); const generations = stdout.split('\n'); const expectation = { execArgv: [nodeFlag, BIN_PATH_JS, '--skipIgnore'].filter((v) => v), argv: [ // Note: argv[0] is *always* BIN_PATH_JS in child & grandchild expectParentArgv0, resolve(TEST_DIR, 'recursive-fork/index.ts'), 'argv2', ], }; expect(JSON.parse(generations[0])).toMatchObject(expectation); expectation.argv[0] = BIN_PATH_JS; expect(JSON.parse(generations[1])).toMatchObject(expectation); expect(JSON.parse(generations[2])).toMatchObject(expectation); }); } }); }); test('Detect when typescript adds new ModuleKind values; flag as a failure so we can update our code flagged [MUST_UPDATE_FOR_NEW_MODULEKIND]', async () => { // We have marked a few places in our code with MUST_UPDATE_FOR_NEW_MODULEKIND to make it easier to update them when TS adds new ModuleKinds const foundKeys: string[] = []; function check(value: number, name: string, required: boolean) { if (required) expect(ts.ModuleKind[name]).toBe(value); if (ts.ModuleKind[value] === undefined) { expect(ts.ModuleKind[name]).toBeUndefined(); } else { expect(ts.ModuleKind[value]).toBe(name); foundKeys.push(name, `${value}`); } } check(0, 'None', true); check(1, 'CommonJS', true); check(2, 'AMD', true); check(3, 'UMD', true); check(4, 'System', true); check(5, 'ES2015', true); try { check(6, 'ES2020', false); check(99, 'ESNext', true); } catch { // the value changed: is `99` now, but was `6` in TS 2.7 check(6, 'ESNext', true); expect(ts.ModuleKind[99]).toBeUndefined(); } check(7, 'ES2022', false); check(100, 'Node12', false); check(199, 'NodeNext', false); const actualKeys = Object.keys(ts.ModuleKind); actualKeys.sort(); foundKeys.sort(); expect(actualKeys).toEqual(foundKeys); });<|fim▁end|>
const { options, config } = JSON.parse(stdout); expect(config.options.typeRoots).toEqual([ join(TEST_DIR, './tsconfig-options/tsconfig-typeroots').replace( /\\/g,
<|file_name|>p3starscreen.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys import argparse import pat3dem.star as p3s def main(): progname = os.path.basename(sys.argv[0]) usage = progname + """ [options] <a star file> Write two star files after screening by an item and a cutoff in the star file. Write one star file after screening by a file containing blacklist/whitelist (either keyword or item). """ args_def = {'screen':'0', 'cutoff':'00', 'sfile':'0', 'white':0} parser = argparse.ArgumentParser() parser.add_argument("star", nargs='*', help="specify a star file to be screened") parser.add_argument("-s", "--screen", type=str, help="specify the item, by which the star file will be screened, by default {} (no screening). e.g., 'OriginX'".format(args_def['screen'])) parser.add_argument("-c", "--cutoff", type=str, help="specify the cutoff, by default '{}' (-s and -sf will be combined)".format(args_def['cutoff'])) parser.add_argument("-sf", "--sfile", type=str, help="specify a file containing a keyword each line, by default '{}' (no screening). e.g., 'f.txt'".format(args_def['sfile'])) parser.add_argument("-w", "--white", type=int, help="specify as 1 if you provide a whitelist in -sf".format(args_def['white'])) args = parser.parse_args() if len(sys.argv) == 1: print "usage: " + usage print "Please run '" + progname + " -h' for detailed options." sys.exit(1) # get default values for i in args_def: if args.__dict__[i] == None: args.__dict__[i] = args_def[i] # preprocess -sf if args.sfile != '0': lines_sf = open(args.sfile).readlines() lines_sfile = [] for line in lines_sf: line = line.strip() if line != '': lines_sfile += [line] # get the star file star = args.star[0] basename = os.path.basename(os.path.splitext(star)[0]) star_dict = p3s.star_parse(star, 'data_') header = star_dict['data_'] + star_dict['loop_'] header_len = len(header) with open(star) as read_star: lines = read_star.readlines()[header_len:-1] if args.screen != '0': # get the sc number scn = star_dict['_rln'+args.screen] if args.cutoff != '00': # Name the output files screened1 = '{}_screened_{}-gt-{}.star'.format(basename, args.screen, args.cutoff) screened2 = '{}_screened_{}-le-{}.star'.format(basename, args.screen, args.cutoff) write_screen1 = open(screened1, 'w') write_screen1.write(''.join(header)) write_screen2 = open(screened2, 'w') write_screen2.write(''.join(header)) for line in lines: if float(line.split()[scn]) > float(args.cutoff): write_screen1.write(line) else: write_screen2.write(line) write_screen1.write(' \n') write_screen1.close() write_screen2.write(' \n') write_screen2.close() print 'The screened star files have been written in {} and {}!'.format(screened1, screened2) elif args.sfile != '0': with open('{}_screened.star'.format(basename), 'w') as write_screen: write_screen.write(''.join(header)) if args.white == 0: for line in lines: key = line.split()[scn] if key not in lines_sfile: print 'Include {}.'.format(key) write_screen.write(line) else: for line in lines: key = line.split()[scn] if key in lines_sfile: print 'Include {}.'.format(key) write_screen.write(line) write_screen.write(' \n') elif args.sfile != '0': with open('{}_screened.star'.format(basename), 'w') as write_screen: write_screen.write(''.join(header)) if args.white == 0: for line in lines: skip = 0 for key in lines_sfile: if key in line: skip = 1 print 'Skip {}.'.format(key) break if skip == 0: write_screen.write(line) else: for line in lines: for key in lines_sfile: if key in line: print 'Include {}.'.format(key) write_screen.write(line) break write_screen.write(' \n') if __name__ == '__main__':<|fim▁hole|> main()<|fim▁end|>
<|file_name|>wmtssource.test.js<|end_file_name|><|fim▁begin|>goog.provide('ol.test.source.WMTS'); describe('ol.source.WMTS', function() { describe('when creating options from capabilities', function() { var parser = new ol.format.WMTSCapabilities(); var capabilities; before(function(done) { afterLoadText('spec/ol/format/wmts/ogcsample.xml', function(xml) { try { capabilities = parser.read(xml); } catch (e) { done(e); } done(); }); }); it('can create KVP options from spec/ol/format/wmts/ogcsample.xml', function() { var options = ol.source.WMTS.optionsFromCapabilities( capabilities, { layer: 'BlueMarbleNextGeneration', matrixSet: 'google3857' }); expect(options.urls).to.be.an('array'); expect(options.urls).to.have.length(1); expect(options.urls[0]).to.be.eql( 'http://www.maps.bob/cgi-bin/MiraMon5_0.cgi?'); expect(options.layer).to.be.eql('BlueMarbleNextGeneration'); expect(options.matrixSet).to.be.eql('google3857'); expect(options.format).to.be.eql('image/jpeg'); expect(options.projection).to.be.a(ol.proj.Projection); expect(options.projection).to.be.eql(ol.proj.get('EPSG:3857')); expect(options.requestEncoding).to.be.eql('KVP'); expect(options.tileGrid).to.be.a(ol.tilegrid.WMTS); expect(options.style).to.be.eql('DarkBlue'); expect(options.dimensions).to.eql({Time: '20110805'}); }); it('can create REST options from spec/ol/format/wmts/ogcsample.xml', function() { var options = ol.source.WMTS.optionsFromCapabilities( capabilities,<|fim▁hole|> requestEncoding: 'REST' }); expect(options.urls).to.be.an('array'); expect(options.urls).to.have.length(1); expect(options.urls[0]).to.be.eql( 'http://www.example.com/wmts/coastlines/{TileMatrix}/{TileRow}/{TileCol}.png'); expect(options.layer).to.be.eql('BlueMarbleNextGeneration'); expect(options.matrixSet).to.be.eql('google3857'); expect(options.format).to.be.eql('image/png'); expect(options.projection).to.be.a(ol.proj.Projection); expect(options.projection).to.be.eql(ol.proj.get('EPSG:3857')); expect(options.requestEncoding).to.be.eql('REST'); expect(options.tileGrid).to.be.a(ol.tilegrid.WMTS); expect(options.style).to.be.eql('DarkBlue'); expect(options.dimensions).to.eql({Time: '20110805'}); }); it('can find a MatrixSet by SRS identifier', function() { var options = ol.source.WMTS.optionsFromCapabilities( capabilities, { layer: 'BlueMarbleNextGeneration', projection: 'EPSG:3857', requestEncoding: 'REST' }); expect(options.matrixSet).to.be.eql('google3857'); }); }); describe('when creating tileUrlFunction', function() { it('can replace lowercase REST parameters', function() { var source = new ol.source.WMTS({ layer: 'layer', style: 'default', urls: ['http://www.example.com/wmts/coastlines/{layer}/{style}/' + '{tilematrixset}/{TileMatrix}/{TileCol}/{TileRow}.jpg'], matrixSet: 'EPSG:3857', requestEncoding: 'REST', tileGrid: new ol.tilegrid.WMTS({ origin: [-20037508.342789244, 20037508.342789244], resolutions: [559082264.029 * 0.28E-3, 279541132.015 * 0.28E-3, 139770566.007 * 0.28E-3], matrixIds: [0, 1, 2] }) }); var projection = ol.proj.get('EPSG:3857'); var url = source.tileUrlFunction( source.getTileCoordForTileUrlFunction([1, 1, -2]), 1, projection); expect(url).to.be.eql('http://www.example.com/wmts/coastlines/' + 'layer/default/EPSG:3857/1/1/1.jpg'); }); it('can replace camelcase REST parameters', function() { var source = new ol.source.WMTS({ layer: 'layer', style: 'default', urls: ['http://www.example.com/wmts/coastlines/{Layer}/{Style}/' + '{tilematrixset}/{TileMatrix}/{TileCol}/{TileRow}.jpg'], matrixSet: 'EPSG:3857', requestEncoding: 'REST', tileGrid: new ol.tilegrid.WMTS({ origin: [-20037508.342789244, 20037508.342789244], resolutions: [559082264.029 * 0.28E-3, 279541132.015 * 0.28E-3, 139770566.007 * 0.28E-3], matrixIds: [0, 1, 2] }) }); var projection = ol.proj.get('EPSG:3857'); var url = source.tileUrlFunction( source.getTileCoordForTileUrlFunction([1, 1, -2]), 1, projection); expect(url).to.be.eql('http://www.example.com/wmts/coastlines/' + 'layer/default/EPSG:3857/1/1/1.jpg'); }); }); describe('when creating options from Esri capabilities', function() { var parser = new ol.format.WMTSCapabilities(); var capabilities; before(function(done) { afterLoadText('spec/ol/format/wmts/arcgis.xml', function(xml) { try { capabilities = parser.read(xml); } catch (e) { done(e); } done(); }); }); it('can create KVP options from spec/ol/format/wmts/arcgis.xml', function() { var options = ol.source.WMTS.optionsFromCapabilities( capabilities, { layer: 'Demographics_USA_Population_Density', matrixSet: 'default028mm' }); expect(options.urls).to.be.an('array'); expect(options.urls).to.have.length(1); expect(options.urls[0]).to.be.eql( 'http://services.arcgisonline.com/arcgis/rest/services/' + 'Demographics/USA_Population_Density/MapServer/WMTS?'); }); }); describe('#getUrls', function() { var sourceOptions; var source; beforeEach(function() { sourceOptions = { layer: 'layer', style: 'default', matrixSet: 'foo', requestEncoding: 'REST', tileGrid: new ol.tilegrid.WMTS({ origin: [0, 0], resolutions: [], matrixIds: [] }) }; }); describe('using a "url" option', function() { beforeEach(function() { sourceOptions.url = 'some_wmts_url'; source = new ol.source.WMTS(sourceOptions); }); it('returns the WMTS URLs', function() { var urls = source.getUrls(); expect(urls).to.be.eql(['some_wmts_url']); }); }); describe('using a "urls" option', function() { beforeEach(function() { sourceOptions.urls = ['some_wmts_url1', 'some_wmts_url2']; source = new ol.source.WMTS(sourceOptions); }); it('returns the WMTS URLs', function() { var urls = source.getUrls(); expect(urls).to.be.eql(['some_wmts_url1', 'some_wmts_url2']); }); }); }); describe('#getRequestEncoding', function() { var source; beforeEach(function() { source = new ol.source.WMTS({ layer: 'layer', style: 'default', matrixSet: 'foo', requestEncoding: 'REST', tileGrid: new ol.tilegrid.WMTS({ origin: [0, 0], resolutions: [], matrixIds: [] }) }); }); it('returns the request encoding', function() { var requestEncoding = source.getRequestEncoding(); expect(requestEncoding).to.be.eql('REST'); }); }); }); goog.require('ol.format.WMTSCapabilities'); goog.require('ol.proj'); goog.require('ol.proj.Projection'); goog.require('ol.tilegrid.WMTS'); goog.require('ol.source.WMTS');<|fim▁end|>
{ layer: 'BlueMarbleNextGeneration', matrixSet: 'google3857',
<|file_name|>exposure.rs<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #include "ip.rsh" #pragma rs_fp_relaxed static float bright = 0.f; void setBright(float v) { bright = 255.f / (255.f - v);<|fim▁hole|>{ uchar4 out = {0, 0, 0, 255}; float3 t = convert_float3(in.rgb); out.rgb = convert_uchar3(clamp(convert_int3(t * bright), 0, 255)); return out; }<|fim▁end|>
} uchar4 RS_KERNEL exposure(uchar4 in)
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>// Type definitions for Node.js v6.x // Project: http://nodejs.org/ // Definitions by: Microsoft TypeScript <http://typescriptlang.org>, DefinitelyTyped <https://github.com/DefinitelyTyped/DefinitelyTyped>, Wilco Bakker <https://github.com/WilcoBakker>, Thomas Bouldin <https://github.com/inlined> // Definitions: https://github.com/DefinitelyTyped/DefinitelyTyped /************************************************ * * * Node.js v6.x API * * * ************************************************/ // This needs to be global to avoid TS2403 in case lib.dom.d.ts is present in the same build interface Console { Console: typeof NodeJS.Console; assert(value: any, message?: string, ...optionalParams: any[]): void; dir(obj: any, options?: NodeJS.InspectOptions): void; error(message?: any, ...optionalParams: any[]): void; info(message?: any, ...optionalParams: any[]): void; log(message?: any, ...optionalParams: any[]): void; time(label: string): void; timeEnd(label: string): void; trace(message?: any, ...optionalParams: any[]): void; warn(message?: any, ...optionalParams: any[]): void; } interface Error { stack?: string; } interface ErrorConstructor { captureStackTrace(targetObject: Object, constructorOpt?: Function): void; stackTraceLimit: number; } // compat for TypeScript 1.8 // if you use with --target es3 or --target es5 and use below definitions, // use the lib.es6.d.ts that is bundled with TypeScript 1.8. interface MapConstructor { } interface WeakMapConstructor { } interface SetConstructor { } interface WeakSetConstructor { } /************************************************ * * * GLOBAL * * * ************************************************/ declare var process: NodeJS.Process; declare var global: NodeJS.Global; declare var console: Console; declare var __filename: string; declare var __dirname: string; declare function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; declare function clearTimeout(timeoutId: NodeJS.Timer): void; declare function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; declare function clearInterval(intervalId: NodeJS.Timer): void; declare function setImmediate(callback: (...args: any[]) => void, ...args: any[]): any; declare function clearImmediate(immediateId: any): void; interface NodeRequireFunction { (id: string): any; } interface NodeRequire extends NodeRequireFunction { resolve(id: string): string; cache: any; extensions: any; main: NodeModule | undefined; } declare var require: NodeRequire; interface NodeModule { exports: any; require: NodeRequireFunction; id: string; filename: string; loaded: boolean; parent: NodeModule | null; children: NodeModule[]; } declare var module: NodeModule; // Same as module.exports declare var exports: any; declare var SlowBuffer: { new (str: string, encoding?: string): Buffer; new (size: number): Buffer; new (size: Uint8Array): Buffer; new (array: any[]): Buffer; prototype: Buffer; isBuffer(obj: any): boolean; byteLength(string: string, encoding?: string): number; concat(list: Buffer[], totalLength?: number): Buffer; }; // Buffer class type BufferEncoding = "ascii" | "utf8" | "utf16le" | "ucs2" | "base64" | "latin1" | "binary" | "hex"; interface Buffer extends NodeBuffer { } /** * Raw data is stored in instances of the Buffer class. * A Buffer is similar to an array of integers but corresponds to a raw memory allocation outside the V8 heap. A Buffer cannot be resized. * Valid string encodings: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' */ declare var Buffer: { /** * Allocates a new buffer containing the given {str}. * * @param str String to store in buffer. * @param encoding encoding to use, optional. Default is 'utf8' */ new (str: string, encoding?: string): Buffer; /** * Allocates a new buffer of {size} octets. * * @param size count of octets to allocate. */ new (size: number): Buffer; /** * Allocates a new buffer containing the given {array} of octets. * * @param array The octets to store. */ new (array: Uint8Array): Buffer; /** * Produces a Buffer backed by the same allocated memory as * the given {ArrayBuffer}. * * * @param arrayBuffer The ArrayBuffer with which to share memory. */ new (arrayBuffer: ArrayBuffer): Buffer; /** * Allocates a new buffer containing the given {array} of octets. * * @param array The octets to store. */ new (array: any[]): Buffer; /** * Copies the passed {buffer} data onto a new {Buffer} instance. * * @param buffer The buffer to copy. */ new (buffer: Buffer): Buffer; prototype: Buffer; /** * Allocates a new Buffer using an {array} of octets. * * @param array */ from(array: any[]): Buffer; /** * When passed a reference to the .buffer property of a TypedArray instance, * the newly created Buffer will share the same allocated memory as the TypedArray. * The optional {byteOffset} and {length} arguments specify a memory range * within the {arrayBuffer} that will be shared by the Buffer. * * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() * @param byteOffset * @param length */ from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; /** * Copies the passed {buffer} data onto a new Buffer instance. * * @param buffer */ from(buffer: Buffer): Buffer; /** * Creates a new Buffer containing the given JavaScript string {str}. * If provided, the {encoding} parameter identifies the character encoding. * If not provided, {encoding} defaults to 'utf8'. * * @param str */ from(str: string, encoding?: string): Buffer; /** * Returns true if {obj} is a Buffer * * @param obj object to test. */ isBuffer(obj: any): obj is Buffer; /** * Returns true if {encoding} is a valid encoding argument. * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' * * @param encoding string to test. */ isEncoding(encoding: string): boolean; /** * Gives the actual byte length of a string. encoding defaults to 'utf8'. * This is not the same as String.prototype.length since that returns the number of characters in a string. * * @param string string to test. * @param encoding encoding used to evaluate (defaults to 'utf8') */ byteLength(string: string, encoding?: string): number; /** * Returns a buffer which is the result of concatenating all the buffers in the list together. * * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. * If the list has exactly one item, then the first item of the list is returned. * If the list has more than one item, then a new Buffer is created. * * @param list An array of Buffer objects to concatenate * @param totalLength Total length of the buffers when concatenated. * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. */ concat(list: Buffer[], totalLength?: number): Buffer; /** * The same as buf1.compare(buf2). */ compare(buf1: Buffer, buf2: Buffer): number; /** * Allocates a new buffer of {size} octets. * * @param size count of octets to allocate. * @param fill if specified, buffer will be initialized by calling buf.fill(fill). * If parameter is omitted, buffer will be filled with zeros. * @param encoding encoding used for call to buf.fill while initalizing */ alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; /** * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents * of the newly created Buffer are unknown and may contain sensitive data. * * @param size count of octets to allocate */ allocUnsafe(size: number): Buffer; /** * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents * of the newly created Buffer are unknown and may contain sensitive data. * * @param size count of octets to allocate */ allocUnsafeSlow(size: number): Buffer; }; /************************************************ * * * GLOBAL INTERFACES * * * ************************************************/ declare namespace NodeJS { export interface InspectOptions { showHidden?: boolean; depth?: number | null; colors?: boolean; customInspect?: boolean; showProxy?: boolean; maxArrayLength?: number | null; breakLength?: number; } export var Console: { prototype: Console; new(stdout: WritableStream, stderr?: WritableStream): Console; } export interface ErrnoException extends Error { errno?: number; code?: string; path?: string; syscall?: string; stack?: string; } export class EventEmitter { addListener(event: string | symbol, listener: Function): this; on(event: string | symbol, listener: Function): this; once(event: string | symbol, listener: Function): this; removeListener(event: string | symbol, listener: Function): this; removeAllListeners(event?: string | symbol): this; setMaxListeners(n: number): this; getMaxListeners(): number; listeners(event: string | symbol): Function[]; emit(event: string | symbol, ...args: any[]): boolean; listenerCount(type: string | symbol): number; // Added in Node 6... prependListener(event: string | symbol, listener: Function): this; prependOnceListener(event: string | symbol, listener: Function): this; eventNames(): (string | symbol)[]; } export interface ReadableStream extends EventEmitter { readable: boolean; read(size?: number): string | Buffer; setEncoding(encoding: string | null): void; pause(): this; resume(): this; isPaused(): boolean; pipe<T extends WritableStream>(destination: T, options?: { end?: boolean; }): T; unpipe<T extends WritableStream>(destination?: T): void; unshift(chunk: string): void; unshift(chunk: Buffer): void; wrap(oldStream: ReadableStream): ReadableStream; } export interface WritableStream extends EventEmitter { writable: boolean; write(buffer: Buffer | string, cb?: Function): boolean; write(str: string, encoding?: string, cb?: Function): boolean; end(): void; end(buffer: Buffer, cb?: Function): void; end(str: string, cb?: Function): void; end(str: string, encoding?: string, cb?: Function): void; } export interface ReadWriteStream extends ReadableStream, WritableStream {} export interface Events extends EventEmitter { } export interface Domain extends Events { run(fn: Function): void; add(emitter: Events): void; remove(emitter: Events): void; bind(cb: (err: Error, data: any) => any): any; intercept(cb: (data: any) => any): any; dispose(): void; addListener(event: string, listener: Function): this; on(event: string, listener: Function): this; once(event: string, listener: Function): this; removeListener(event: string, listener: Function): this; removeAllListeners(event?: string): this; } export interface MemoryUsage { rss: number; heapTotal: number; heapUsed: number; } export interface CpuUsage { user: number; system: number; } export interface ProcessVersions { http_parser: string; node: string; v8: string; ares: string; uv: string; zlib: string; modules: string; openssl: string; } type Platform = 'aix' | 'android' | 'darwin' | 'freebsd' | 'linux' | 'openbsd' | 'sunos' | 'win32'; export interface Socket extends ReadWriteStream { isTTY?: true; } export interface WriteStream extends Socket { columns?: number; rows?: number; } export interface ReadStream extends Socket { isRaw?: boolean; setRawMode?(mode: boolean): void; } export interface Process extends EventEmitter { stdout: WriteStream; stderr: WriteStream; stdin: ReadStream; argv: string[]; argv0: string; execArgv: string[]; execPath: string; abort(): void; chdir(directory: string): void; cwd(): string; emitWarning(warning: string | Error, name?: string, ctor?: Function): void; env: any; exit(code?: number): void; exitCode: number; getgid(): number; setgid(id: number): void; setgid(id: string): void; getuid(): number; setuid(id: number): void; setuid(id: string): void; version: string; versions: ProcessVersions; config: { target_defaults: { cflags: any[]; default_configuration: string; defines: string[]; include_dirs: string[]; libraries: string[]; }; variables: { clang: number; host_arch: string; node_install_npm: boolean; node_install_waf: boolean; node_prefix: string; node_shared_openssl: boolean; node_shared_v8: boolean; node_shared_zlib: boolean; node_use_dtrace: boolean; node_use_etw: boolean; node_use_openssl: boolean; target_arch: string; v8_no_strict_aliasing: number; v8_use_snapshot: boolean; visibility: string; }; }; kill(pid: number, signal?: string | number): void; pid: number; title: string; arch: string; platform: Platform; mainModule?: NodeModule; memoryUsage(): MemoryUsage; cpuUsage(previousValue?: CpuUsage): CpuUsage; nextTick(callback: Function, ...args: any[]): void; umask(mask?: number): number; uptime(): number; hrtime(time?: [number, number]): [number, number]; domain: Domain; // Worker send?(message: any, sendHandle?: any): void; disconnect(): void; connected: boolean; } export interface Global { Array: typeof Array; ArrayBuffer: typeof ArrayBuffer; Boolean: typeof Boolean; Buffer: typeof Buffer; DataView: typeof DataView; Date: typeof Date; Error: typeof Error; EvalError: typeof EvalError; Float32Array: typeof Float32Array; Float64Array: typeof Float64Array; Function: typeof Function; GLOBAL: Global; Infinity: typeof Infinity; Int16Array: typeof Int16Array; Int32Array: typeof Int32Array; Int8Array: typeof Int8Array; Intl: typeof Intl; JSON: typeof JSON; Map: MapConstructor; Math: typeof Math; NaN: typeof NaN; Number: typeof Number; Object: typeof Object; Promise: Function; RangeError: typeof RangeError; ReferenceError: typeof ReferenceError; RegExp: typeof RegExp; Set: SetConstructor; String: typeof String; Symbol: Function; SyntaxError: typeof SyntaxError; TypeError: typeof TypeError; URIError: typeof URIError; Uint16Array: typeof Uint16Array; Uint32Array: typeof Uint32Array; Uint8Array: typeof Uint8Array; Uint8ClampedArray: Function; WeakMap: WeakMapConstructor; WeakSet: WeakSetConstructor; clearImmediate: (immediateId: any) => void; clearInterval: (intervalId: NodeJS.Timer) => void; clearTimeout: (timeoutId: NodeJS.Timer) => void; console: typeof console; decodeURI: typeof decodeURI; decodeURIComponent: typeof decodeURIComponent; encodeURI: typeof encodeURI; encodeURIComponent: typeof encodeURIComponent; escape: (str: string) => string; eval: typeof eval; global: Global; isFinite: typeof isFinite; isNaN: typeof isNaN; parseFloat: typeof parseFloat; parseInt: typeof parseInt; process: Process; root: Global; setImmediate: (callback: (...args: any[]) => void, ...args: any[]) => any; setInterval: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => NodeJS.Timer; setTimeout: (callback: (...args: any[]) => void, ms: number, ...args: any[]) => NodeJS.Timer; undefined: typeof undefined; unescape: (str: string) => string; gc: () => void; v8debug?: any; } export interface Timer { ref(): void; unref(): void; } } interface IterableIterator<T> { } /** * @deprecated */ interface NodeBuffer extends Uint8Array { write(string: string, offset?: number, length?: number, encoding?: string): number; toString(encoding?: string, start?: number, end?: number): string; toJSON(): { type: 'Buffer', data: any[] }; equals(otherBuffer: Buffer): boolean; compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; slice(start?: number, end?: number): Buffer; writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; readUInt8(offset: number, noAssert?: boolean): number; readUInt16LE(offset: number, noAssert?: boolean): number; readUInt16BE(offset: number, noAssert?: boolean): number; readUInt32LE(offset: number, noAssert?: boolean): number; readUInt32BE(offset: number, noAssert?: boolean): number; readInt8(offset: number, noAssert?: boolean): number; readInt16LE(offset: number, noAssert?: boolean): number; readInt16BE(offset: number, noAssert?: boolean): number; readInt32LE(offset: number, noAssert?: boolean): number; readInt32BE(offset: number, noAssert?: boolean): number; readFloatLE(offset: number, noAssert?: boolean): number; readFloatBE(offset: number, noAssert?: boolean): number; readDoubleLE(offset: number, noAssert?: boolean): number; readDoubleBE(offset: number, noAssert?: boolean): number; swap16(): Buffer; swap32(): Buffer; swap64(): Buffer; writeUInt8(value: number, offset: number, noAssert?: boolean): number; writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; writeInt8(value: number, offset: number, noAssert?: boolean): number; writeInt16LE(value: number, offset: number, noAssert?: boolean): number; writeInt16BE(value: number, offset: number, noAssert?: boolean): number; writeInt32LE(value: number, offset: number, noAssert?: boolean): number; writeInt32BE(value: number, offset: number, noAssert?: boolean): number; writeFloatLE(value: number, offset: number, noAssert?: boolean): number; writeFloatBE(value: number, offset: number, noAssert?: boolean): number; writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; fill(value: any, offset?: number, end?: number): this; indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; entries(): IterableIterator<[number, number]>; includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; keys(): IterableIterator<number>; values(): IterableIterator<number>; } /************************************************ * * * MODULES * * * ************************************************/ declare module "buffer" { export var INSPECT_MAX_BYTES: number; var BuffType: typeof Buffer; var SlowBuffType: typeof SlowBuffer; export { BuffType as Buffer, SlowBuffType as SlowBuffer }; } declare module "querystring" { export interface StringifyOptions { encodeURIComponent?: Function; } export interface ParseOptions { maxKeys?: number; decodeURIComponent?: Function; } export function stringify<T>(obj: T, sep?: string, eq?: string, options?: StringifyOptions): string; export function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): any; export function parse<T extends {}>(str: string, sep?: string, eq?: string, options?: ParseOptions): T; export function escape(str: string): string; export function unescape(str: string): string; } declare module "events" { class internal extends NodeJS.EventEmitter { } namespace internal { export class EventEmitter extends internal { static listenerCount(emitter: EventEmitter, event: string | symbol): number; // deprecated static defaultMaxListeners: number; addListener(event: string | symbol, listener: Function): this; on(event: string | symbol, listener: Function): this; once(event: string | symbol, listener: Function): this; prependListener(event: string | symbol, listener: Function): this; prependOnceListener(event: string | symbol, listener: Function): this; removeListener(event: string | symbol, listener: Function): this; removeAllListeners(event?: string | symbol): this; setMaxListeners(n: number): this; getMaxListeners(): number; listeners(event: string | symbol): Function[]; emit(event: string | symbol, ...args: any[]): boolean; eventNames(): (string | symbol)[]; listenerCount(type: string | symbol): number; } } export = internal; } declare module "http" { import * as events from "events"; import * as net from "net"; import * as stream from "stream"; export interface RequestOptions { protocol?: string; host?: string; hostname?: string; family?: number; port?: number; localAddress?: string; socketPath?: string; method?: string; path?: string; headers?: { [key: string]: any }; auth?: string; agent?: Agent | boolean; } export interface Server extends net.Server { setTimeout(msecs: number, callback: Function): void; maxHeadersCount: number; timeout: number; listening: boolean; } /** * @deprecated Use IncomingMessage */ export interface ServerRequest extends IncomingMessage { connection: net.Socket; } export interface ServerResponse extends stream.Writable { // Extended base methods write(buffer: Buffer): boolean; write(buffer: Buffer, cb?: Function): boolean; write(str: string, cb?: Function): boolean; write(str: string, encoding?: string, cb?: Function): boolean; write(str: string, encoding?: string, fd?: string): boolean; writeContinue(): void; writeHead(statusCode: number, reasonPhrase?: string, headers?: any): void; writeHead(statusCode: number, headers?: any): void; statusCode: number; statusMessage: string; headersSent: boolean; setHeader(name: string, value: string | string[]): void; setTimeout(msecs: number, callback: Function): ServerResponse; sendDate: boolean; getHeader(name: string): string; removeHeader(name: string): void; write(chunk: any, encoding?: string): any; addTrailers(headers: any): void; finished: boolean; // Extended base methods end(): void; end(buffer: Buffer, cb?: Function): void; end(str: string, cb?: Function): void; end(str: string, encoding?: string, cb?: Function): void; end(data?: any, encoding?: string): void; } export interface ClientRequest extends stream.Writable { // Extended base methods write(buffer: Buffer): boolean; write(buffer: Buffer, cb?: Function): boolean; write(str: string, cb?: Function): boolean; write(str: string, encoding?: string, cb?: Function): boolean; write(str: string, encoding?: string, fd?: string): boolean; write(chunk: any, encoding?: string): void; abort(): void; setTimeout(timeout: number, callback?: Function): void; setNoDelay(noDelay?: boolean): void; setSocketKeepAlive(enable?: boolean, initialDelay?: number): void; setHeader(name: string, value: string | string[]): void; getHeader(name: string): string; removeHeader(name: string): void; addTrailers(headers: any): void; // Extended base methods end(): void; end(buffer: Buffer, cb?: Function): void; end(str: string, cb?: Function): void; end(str: string, encoding?: string, cb?: Function): void; end(data?: any, encoding?: string): void; } export interface IncomingMessage extends stream.Readable { httpVersion: string; httpVersionMajor: number; httpVersionMinor: number; connection: net.Socket; headers: any; rawHeaders: string[]; trailers: any; rawTrailers: any; setTimeout(msecs: number, callback: Function): NodeJS.Timer; /** * Only valid for request obtained from http.Server. */ method?: string; /** * Only valid for request obtained from http.Server. */ url?: string; /** * Only valid for response obtained from http.ClientRequest. */ statusCode?: number; /** * Only valid for response obtained from http.ClientRequest. */ statusMessage?: string; socket: net.Socket; destroy(error?: Error): void; } /** * @deprecated Use IncomingMessage */ export interface ClientResponse extends IncomingMessage { } export interface AgentOptions { /** * Keep sockets around in a pool to be used by other requests in the future. Default = false */ keepAlive?: boolean; /** * When using HTTP KeepAlive, how often to send TCP KeepAlive packets over sockets being kept alive. Default = 1000. * Only relevant if keepAlive is set to true. */ keepAliveMsecs?: number; /** * Maximum number of sockets to allow per host. Default for Node 0.10 is 5, default for Node 0.12 is Infinity */ maxSockets?: number; /** * Maximum number of sockets to leave open in a free state. Only relevant if keepAlive is set to true. Default = 256. */ maxFreeSockets?: number; } export class Agent { maxSockets: number; sockets: any; requests: any; constructor(opts?: AgentOptions); /** * Destroy any sockets that are currently in use by the agent. * It is usually not necessary to do this. However, if you are using an agent with KeepAlive enabled, * then it is best to explicitly shut down the agent when you know that it will no longer be used. Otherwise, * sockets may hang open for quite a long time before the server terminates them. */ destroy(): void; } export var METHODS: string[]; export var STATUS_CODES: { [errorCode: number]: string; [errorCode: string]: string; }; export function createServer(requestListener?: (request: IncomingMessage, response: ServerResponse) => void): Server; export function createClient(port?: number, host?: string): any; export function request(options: RequestOptions | string, callback?: (res: IncomingMessage) => void): ClientRequest; export function get(options: any, callback?: (res: IncomingMessage) => void): ClientRequest; export var globalAgent: Agent; } declare module "cluster" { import * as child from "child_process"; import * as events from "events"; import * as net from "net"; // interfaces export interface ClusterSettings { execArgv?: string[]; // default: process.execArgv exec?: string; args?: string[]; silent?: boolean; stdio?: any[]; uid?: number; gid?: number; } export interface ClusterSetupMasterSettings { exec?: string; // default: process.argv[1] args?: string[]; // default: process.argv.slice(2) silent?: boolean; // default: false stdio?: any[]; } export interface Address { address: string; port: number; addressType: number | "udp4" | "udp6"; // 4, 6, -1, "udp4", "udp6" } export class Worker extends events.EventEmitter { id: string; process: child.ChildProcess; suicide: boolean; send(message: any, sendHandle?: any, callback?: (error: Error) => void): boolean; kill(signal?: string): void; destroy(signal?: string): void; disconnect(): void; isConnected(): boolean; isDead(): boolean; exitedAfterDisconnect: boolean; /** * events.EventEmitter * 1. disconnect * 2. error * 3. exit * 4. listening * 5. message * 6. online */ addListener(event: string, listener: Function): this; addListener(event: "disconnect", listener: () => void): this; addListener(event: "error", listener: (error: Error) => void): this; addListener(event: "exit", listener: (code: number, signal: string) => void): this; addListener(event: "listening", listener: (address: Address) => void): this; addListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. addListener(event: "online", listener: () => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "disconnect", listener: () => void): boolean emit(event: "error", listener: (error: Error) => void): boolean emit(event: "exit", listener: (code: number, signal: string) => void): boolean emit(event: "listening", listener: (address: Address) => void): boolean emit(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): boolean emit(event: "online", listener: () => void): boolean on(event: string, listener: Function): this; on(event: "disconnect", listener: () => void): this; on(event: "error", listener: (error: Error) => void): this; on(event: "exit", listener: (code: number, signal: string) => void): this; on(event: "listening", listener: (address: Address) => void): this; on(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. on(event: "online", listener: () => void): this; once(event: string, listener: Function): this; once(event: "disconnect", listener: () => void): this; once(event: "error", listener: (error: Error) => void): this; once(event: "exit", listener: (code: number, signal: string) => void): this; once(event: "listening", listener: (address: Address) => void): this; once(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. once(event: "online", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "disconnect", listener: () => void): this; prependListener(event: "error", listener: (error: Error) => void): this; prependListener(event: "exit", listener: (code: number, signal: string) => void): this; prependListener(event: "listening", listener: (address: Address) => void): this; prependListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. prependListener(event: "online", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "disconnect", listener: () => void): this; prependOnceListener(event: "error", listener: (error: Error) => void): this; prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; prependOnceListener(event: "listening", listener: (address: Address) => void): this; prependOnceListener(event: "message", listener: (message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. prependOnceListener(event: "online", listener: () => void): this; } export interface Cluster extends events.EventEmitter { Worker: Worker; disconnect(callback?: Function): void; fork(env?: any): Worker; isMaster: boolean; isWorker: boolean; // TODO: cluster.schedulingPolicy settings: ClusterSettings; setupMaster(settings?: ClusterSetupMasterSettings): void; worker: Worker; workers: { [index: string]: Worker }; /** * events.EventEmitter * 1. disconnect * 2. exit * 3. fork * 4. listening * 5. message * 6. online * 7. setup */ addListener(event: string, listener: Function): this; addListener(event: "disconnect", listener: (worker: Worker) => void): this; addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; addListener(event: "fork", listener: (worker: Worker) => void): this; addListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. addListener(event: "online", listener: (worker: Worker) => void): this; addListener(event: "setup", listener: (settings: any) => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "disconnect", listener: (worker: Worker) => void): boolean; emit(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): boolean; emit(event: "fork", listener: (worker: Worker) => void): boolean; emit(event: "listening", listener: (worker: Worker, address: Address) => void): boolean; emit(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): boolean; emit(event: "online", listener: (worker: Worker) => void): boolean; emit(event: "setup", listener: (settings: any) => void): boolean; on(event: string, listener: Function): this; on(event: "disconnect", listener: (worker: Worker) => void): this; on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; on(event: "fork", listener: (worker: Worker) => void): this; on(event: "listening", listener: (worker: Worker, address: Address) => void): this; on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. on(event: "online", listener: (worker: Worker) => void): this; on(event: "setup", listener: (settings: any) => void): this; once(event: string, listener: Function): this; once(event: "disconnect", listener: (worker: Worker) => void): this; once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; once(event: "fork", listener: (worker: Worker) => void): this; once(event: "listening", listener: (worker: Worker, address: Address) => void): this; once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. once(event: "online", listener: (worker: Worker) => void): this; once(event: "setup", listener: (settings: any) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "disconnect", listener: (worker: Worker) => void): this; prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; prependListener(event: "fork", listener: (worker: Worker) => void): this; prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. prependListener(event: "online", listener: (worker: Worker) => void): this; prependListener(event: "setup", listener: (settings: any) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): this; prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): this; prependOnceListener(event: "fork", listener: (worker: Worker) => void): this; prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): this; prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): this; // the handle is a net.Socket or net.Server object, or undefined. prependOnceListener(event: "online", listener: (worker: Worker) => void): this; prependOnceListener(event: "setup", listener: (settings: any) => void): this; } export function disconnect(callback?: Function): void; export function fork(env?: any): Worker; export var isMaster: boolean; export var isWorker: boolean; // TODO: cluster.schedulingPolicy export var settings: ClusterSettings; export function setupMaster(settings?: ClusterSetupMasterSettings): void; export var worker: Worker; export var workers: { [index: string]: Worker }; /** * events.EventEmitter * 1. disconnect * 2. exit * 3. fork * 4. listening * 5. message * 6. online * 7. setup */ export function addListener(event: string, listener: Function): Cluster; export function addListener(event: "disconnect", listener: (worker: Worker) => void): Cluster; export function addListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; export function addListener(event: "fork", listener: (worker: Worker) => void): Cluster; export function addListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; export function addListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. export function addListener(event: "online", listener: (worker: Worker) => void): Cluster; export function addListener(event: "setup", listener: (settings: any) => void): Cluster; export function emit(event: string | symbol, ...args: any[]): boolean; export function emit(event: "disconnect", listener: (worker: Worker) => void): boolean; export function emit(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): boolean; export function emit(event: "fork", listener: (worker: Worker) => void): boolean; export function emit(event: "listening", listener: (worker: Worker, address: Address) => void): boolean; export function emit(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): boolean; export function emit(event: "online", listener: (worker: Worker) => void): boolean; export function emit(event: "setup", listener: (settings: any) => void): boolean; export function on(event: string, listener: Function): Cluster; export function on(event: "disconnect", listener: (worker: Worker) => void): Cluster; export function on(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; export function on(event: "fork", listener: (worker: Worker) => void): Cluster; export function on(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; export function on(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. export function on(event: "online", listener: (worker: Worker) => void): Cluster; export function on(event: "setup", listener: (settings: any) => void): Cluster; export function once(event: string, listener: Function): Cluster; export function once(event: "disconnect", listener: (worker: Worker) => void): Cluster; export function once(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; export function once(event: "fork", listener: (worker: Worker) => void): Cluster; export function once(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; export function once(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. export function once(event: "online", listener: (worker: Worker) => void): Cluster; export function once(event: "setup", listener: (settings: any) => void): Cluster; export function removeListener(event: string, listener: Function): Cluster; export function removeAllListeners(event?: string): Cluster; export function setMaxListeners(n: number): Cluster; export function getMaxListeners(): number; export function listeners(event: string): Function[]; export function listenerCount(type: string): number; export function prependListener(event: string, listener: Function): Cluster; export function prependListener(event: "disconnect", listener: (worker: Worker) => void): Cluster; export function prependListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; export function prependListener(event: "fork", listener: (worker: Worker) => void): Cluster; export function prependListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; export function prependListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. export function prependListener(event: "online", listener: (worker: Worker) => void): Cluster; export function prependListener(event: "setup", listener: (settings: any) => void): Cluster; export function prependOnceListener(event: string, listener: Function): Cluster; export function prependOnceListener(event: "disconnect", listener: (worker: Worker) => void): Cluster; export function prependOnceListener(event: "exit", listener: (worker: Worker, code: number, signal: string) => void): Cluster; export function prependOnceListener(event: "fork", listener: (worker: Worker) => void): Cluster; export function prependOnceListener(event: "listening", listener: (worker: Worker, address: Address) => void): Cluster; export function prependOnceListener(event: "message", listener: (worker: Worker, message: any, handle: net.Socket | net.Server) => void): Cluster; // the handle is a net.Socket or net.Server object, or undefined. export function prependOnceListener(event: "online", listener: (worker: Worker) => void): Cluster; export function prependOnceListener(event: "setup", listener: (settings: any) => void): Cluster; export function eventNames(): string[]; } declare module "zlib" { import * as stream from "stream"; export interface ZlibOptions { chunkSize?: number; windowBits?: number; level?: number; memLevel?: number; strategy?: number; dictionary?: any; finishFlush?: number } export interface Gzip extends stream.Transform { } export interface Gunzip extends stream.Transform { } export interface Deflate extends stream.Transform { } export interface Inflate extends stream.Transform { } export interface DeflateRaw extends stream.Transform { } export interface InflateRaw extends stream.Transform { } export interface Unzip extends stream.Transform { } export function createGzip(options?: ZlibOptions): Gzip; export function createGunzip(options?: ZlibOptions): Gunzip; export function createDeflate(options?: ZlibOptions): Deflate; export function createInflate(options?: ZlibOptions): Inflate; export function createDeflateRaw(options?: ZlibOptions): DeflateRaw; export function createInflateRaw(options?: ZlibOptions): InflateRaw; export function createUnzip(options?: ZlibOptions): Unzip; export function deflate(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function deflate(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function deflateSync(buf: Buffer | string, options?: ZlibOptions): Buffer; export function deflateRaw(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function deflateRaw(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function deflateRawSync(buf: Buffer | string, options?: ZlibOptions): Buffer; export function gzip(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function gzip(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function gzipSync(buf: Buffer | string, options?: ZlibOptions): Buffer; export function gunzip(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function gunzip(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function gunzipSync(buf: Buffer | string, options?: ZlibOptions): Buffer; export function inflate(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function inflate(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function inflateSync(buf: Buffer | string, options?: ZlibOptions): Buffer; export function inflateRaw(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function inflateRaw(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function inflateRawSync(buf: Buffer | string, options?: ZlibOptions): Buffer; export function unzip(buf: Buffer | string, callback: (error: Error, result: Buffer) => void): void; export function unzip(buf: Buffer | string, options: ZlibOptions, callback: (error: Error, result: Buffer) => void): void; export function unzipSync(buf: Buffer | string, options?: ZlibOptions): Buffer; // Constants export var Z_NO_FLUSH: number; export var Z_PARTIAL_FLUSH: number; export var Z_SYNC_FLUSH: number; export var Z_FULL_FLUSH: number; export var Z_FINISH: number; export var Z_BLOCK: number; export var Z_TREES: number; export var Z_OK: number; export var Z_STREAM_END: number; export var Z_NEED_DICT: number; export var Z_ERRNO: number; export var Z_STREAM_ERROR: number; export var Z_DATA_ERROR: number; export var Z_MEM_ERROR: number; export var Z_BUF_ERROR: number; export var Z_VERSION_ERROR: number; export var Z_NO_COMPRESSION: number; export var Z_BEST_SPEED: number; export var Z_BEST_COMPRESSION: number; export var Z_DEFAULT_COMPRESSION: number; export var Z_FILTERED: number; export var Z_HUFFMAN_ONLY: number; export var Z_RLE: number; export var Z_FIXED: number; export var Z_DEFAULT_STRATEGY: number; export var Z_BINARY: number; export var Z_TEXT: number; export var Z_ASCII: number; export var Z_UNKNOWN: number; export var Z_DEFLATED: number; export var Z_NULL: number; } declare module "os" { export interface CpuInfo { model: string; speed: number; times: { user: number; nice: number; sys: number; idle: number; irq: number; }; } export interface NetworkInterfaceInfo { address: string; netmask: string; family: string; mac: string; internal: boolean; } export function hostname(): string; export function loadavg(): number[]; export function uptime(): number; export function freemem(): number; export function totalmem(): number; export function cpus(): CpuInfo[]; export function type(): string; export function release(): string; export function networkInterfaces(): { [index: string]: NetworkInterfaceInfo[] }; export function homedir(): string; export function userInfo(options?: { encoding: string }): { username: string, uid: number, gid: number, shell: any, homedir: string } export var constants: { UV_UDP_REUSEADDR: number, signals: { SIGHUP: number; SIGINT: number; SIGQUIT: number; SIGILL: number; SIGTRAP: number; SIGABRT: number; SIGIOT: number; SIGBUS: number; SIGFPE: number; SIGKILL: number; SIGUSR1: number; SIGSEGV: number; SIGUSR2: number; SIGPIPE: number; SIGALRM: number; SIGTERM: number; SIGCHLD: number; SIGSTKFLT: number; SIGCONT: number; SIGSTOP: number; SIGTSTP: number; SIGTTIN: number; SIGTTOU: number; SIGURG: number; SIGXCPU: number; SIGXFSZ: number; SIGVTALRM: number; SIGPROF: number; SIGWINCH: number; SIGIO: number; SIGPOLL: number; SIGPWR: number; SIGSYS: number; SIGUNUSED: number; }, errno: { E2BIG: number; EACCES: number; EADDRINUSE: number; EADDRNOTAVAIL: number; EAFNOSUPPORT: number; EAGAIN: number; EALREADY: number; EBADF: number; EBADMSG: number; EBUSY: number; ECANCELED: number; ECHILD: number; ECONNABORTED: number; ECONNREFUSED: number; ECONNRESET: number; EDEADLK: number; EDESTADDRREQ: number; EDOM: number; EDQUOT: number; EEXIST: number; EFAULT: number; EFBIG: number; EHOSTUNREACH: number; EIDRM: number; EILSEQ: number; EINPROGRESS: number; EINTR: number; EINVAL: number; EIO: number; EISCONN: number; EISDIR: number; ELOOP: number; EMFILE: number; EMLINK: number; EMSGSIZE: number; EMULTIHOP: number; ENAMETOOLONG: number; ENETDOWN: number; ENETRESET: number; ENETUNREACH: number; ENFILE: number; ENOBUFS: number; ENODATA: number; ENODEV: number; ENOENT: number; ENOEXEC: number; ENOLCK: number; ENOLINK: number; ENOMEM: number; ENOMSG: number; ENOPROTOOPT: number; ENOSPC: number; ENOSR: number; ENOSTR: number; ENOSYS: number; ENOTCONN: number; ENOTDIR: number; ENOTEMPTY: number; ENOTSOCK: number; ENOTSUP: number; ENOTTY: number; ENXIO: number; EOPNOTSUPP: number; EOVERFLOW: number; EPERM: number; EPIPE: number; EPROTO: number; EPROTONOSUPPORT: number; EPROTOTYPE: number; ERANGE: number; EROFS: number; ESPIPE: number; ESRCH: number; ESTALE: number; ETIME: number; ETIMEDOUT: number; ETXTBSY: number; EWOULDBLOCK: number; EXDEV: number; }, }; export function arch(): string; export function platform(): NodeJS.Platform; export function tmpdir(): string; export var EOL: string; export function endianness(): "BE" | "LE"; } declare module "https" { import * as tls from "tls"; import * as events from "events"; import * as http from "http"; export interface ServerOptions { pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; crl?: any; ciphers?: string; honorCipherOrder?: boolean; requestCert?: boolean; rejectUnauthorized?: boolean; NPNProtocols?: any; SNICallback?: (servername: string, cb: (err: Error, ctx: tls.SecureContext) => any) => any; } export interface RequestOptions extends http.RequestOptions { pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; ciphers?: string; rejectUnauthorized?: boolean; secureProtocol?: string; } export interface Agent extends http.Agent { } export interface AgentOptions extends http.AgentOptions { pfx?: any; key?: any; passphrase?: string; cert?: any; ca?: any; ciphers?: string; rejectUnauthorized?: boolean; secureProtocol?: string; maxCachedSessions?: number; } export var Agent: { new (options?: AgentOptions): Agent; }; export interface Server extends tls.Server { } export function createServer(options: ServerOptions, requestListener?: Function): Server; export function request(options: RequestOptions | string, callback?: (res: http.IncomingMessage) => void): http.ClientRequest; export function get(options: RequestOptions | string, callback?: (res: http.IncomingMessage) => void): http.ClientRequest; export var globalAgent: Agent; } declare module "punycode" { export function decode(string: string): string; export function encode(string: string): string; export function toUnicode(domain: string): string; export function toASCII(domain: string): string; export var ucs2: ucs2; interface ucs2 { decode(string: string): number[]; encode(codePoints: number[]): string; } export var version: any; } declare module "repl" { import * as stream from "stream"; import * as readline from "readline"; export interface ReplOptions { prompt?: string; input?: NodeJS.ReadableStream; output?: NodeJS.WritableStream; terminal?: boolean; eval?: Function; useColors?: boolean; useGlobal?: boolean; ignoreUndefined?: boolean; writer?: Function; completer?: Function; replMode?: any; breakEvalOnSigint?: any; } export interface REPLServer extends readline.ReadLine { defineCommand(keyword: string, cmd: Function | { help: string, action: Function }): void; displayPrompt(preserveCursor?: boolean): void; context: any; /** * events.EventEmitter * 1. exit * 2. reset **/ addListener(event: string, listener: Function): this; addListener(event: "exit", listener: () => void): this; addListener(event: "reset", listener: Function): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "exit"): boolean; emit(event: "reset", context: any): boolean; on(event: string, listener: Function): this; on(event: "exit", listener: () => void): this; on(event: "reset", listener: Function): this; once(event: string, listener: Function): this; once(event: "exit", listener: () => void): this; once(event: "reset", listener: Function): this; prependListener(event: string, listener: Function): this; prependListener(event: "exit", listener: () => void): this; prependListener(event: "reset", listener: Function): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "exit", listener: () => void): this; prependOnceListener(event: "reset", listener: Function): this; } export function start(options?: string | ReplOptions): REPLServer; } declare module "readline" { import * as events from "events"; import * as stream from "stream"; export interface Key { sequence?: string; name?: string; ctrl?: boolean; meta?: boolean; shift?: boolean; } export interface ReadLine extends events.EventEmitter { setPrompt(prompt: string): void; prompt(preserveCursor?: boolean): void; question(query: string, callback: (answer: string) => void): void; pause(): this; resume(): this; close(): void; write(data: string | Buffer, key?: Key): void; /** * events.EventEmitter * 1. close * 2. line * 3. pause * 4. resume * 5. SIGCONT * 6. SIGINT * 7. SIGTSTP **/ addListener(event: string, listener: Function): this; addListener(event: "close", listener: () => void): this; addListener(event: "line", listener: (input: any) => void): this; addListener(event: "pause", listener: () => void): this; addListener(event: "resume", listener: () => void): this; addListener(event: "SIGCONT", listener: () => void): this; addListener(event: "SIGINT", listener: () => void): this; addListener(event: "SIGTSTP", listener: () => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close"): boolean; emit(event: "line", input: any): boolean; emit(event: "pause"): boolean; emit(event: "resume"): boolean; emit(event: "SIGCONT"): boolean; emit(event: "SIGINT"): boolean; emit(event: "SIGTSTP"): boolean; on(event: string, listener: Function): this; on(event: "close", listener: () => void): this; on(event: "line", listener: (input: any) => void): this; on(event: "pause", listener: () => void): this; on(event: "resume", listener: () => void): this; on(event: "SIGCONT", listener: () => void): this; on(event: "SIGINT", listener: () => void): this; on(event: "SIGTSTP", listener: () => void): this; once(event: string, listener: Function): this; once(event: "close", listener: () => void): this; once(event: "line", listener: (input: any) => void): this; once(event: "pause", listener: () => void): this; once(event: "resume", listener: () => void): this; once(event: "SIGCONT", listener: () => void): this; once(event: "SIGINT", listener: () => void): this; once(event: "SIGTSTP", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: () => void): this; prependListener(event: "line", listener: (input: any) => void): this; prependListener(event: "pause", listener: () => void): this; prependListener(event: "resume", listener: () => void): this; prependListener(event: "SIGCONT", listener: () => void): this; prependListener(event: "SIGINT", listener: () => void): this; prependListener(event: "SIGTSTP", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: () => void): this; prependOnceListener(event: "line", listener: (input: any) => void): this; prependOnceListener(event: "pause", listener: () => void): this; prependOnceListener(event: "resume", listener: () => void): this; prependOnceListener(event: "SIGCONT", listener: () => void): this; prependOnceListener(event: "SIGINT", listener: () => void): this; prependOnceListener(event: "SIGTSTP", listener: () => void): this; } export interface Completer { (line: string): CompleterResult; (line: string, callback: (err: any, result: CompleterResult) => void): any; } export type CompleterResult = [string[], string]; export interface ReadLineOptions { input: NodeJS.ReadableStream; output?: NodeJS.WritableStream; completer?: Completer; terminal?: boolean; historySize?: number; } export function createInterface(input: NodeJS.ReadableStream, output?: NodeJS.WritableStream, completer?: Completer, terminal?: boolean): ReadLine; export function createInterface(options: ReadLineOptions): ReadLine; export function cursorTo(stream: NodeJS.WritableStream, x: number, y?: number): void; export function moveCursor(stream: NodeJS.WritableStream, dx: number | string, dy: number | string): void; export function clearLine(stream: NodeJS.WritableStream, dir: number): void; export function clearScreenDown(stream: NodeJS.WritableStream): void; } declare module "vm" { export interface Context { } export interface ScriptOptions { filename?: string; lineOffset?: number; columnOffset?: number; displayErrors?: boolean; timeout?: number; cachedData?: Buffer; produceCachedData?: boolean; } export interface RunningScriptOptions { filename?: string; lineOffset?: number; columnOffset?: number; displayErrors?: boolean; timeout?: number; } export class Script { constructor(code: string, options?: ScriptOptions); runInContext(contextifiedSandbox: Context, options?: RunningScriptOptions): any; runInNewContext(sandbox?: Context, options?: RunningScriptOptions): any; runInThisContext(options?: RunningScriptOptions): any; } export function createContext(sandbox?: Context): Context; export function isContext(sandbox: Context): boolean; export function runInContext(code: string, contextifiedSandbox: Context, options?: RunningScriptOptions): any; export function runInDebugContext(code: string): any; export function runInNewContext(code: string, sandbox?: Context, options?: RunningScriptOptions): any; export function runInThisContext(code: string, options?: RunningScriptOptions): any; } declare module "child_process" { import * as events from "events"; import * as stream from "stream"; import * as net from "net"; export interface ChildProcess extends events.EventEmitter { stdin: stream.Writable; stdout: stream.Readable; stderr: stream.Readable; stdio: [stream.Writable, stream.Readable, stream.Readable]; killed: boolean; pid: number; kill(signal?: string): void; send(message: any, sendHandle?: any): boolean; connected: boolean; disconnect(): void; unref(): void; ref(): void; /** * events.EventEmitter * 1. close * 2. disconnect * 3. error * 4. exit * 5. message **/ addListener(event: string, listener: Function): this; addListener(event: "close", listener: (code: number, signal: string) => void): this; addListener(event: "disconnect", listener: () => void): this; addListener(event: "error", listener: (err: Error) => void): this; addListener(event: "exit", listener: (code: number, signal: string) => void): this; addListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close", code: number, signal: string): boolean; emit(event: "disconnect"): boolean; emit(event: "error", err: Error): boolean; emit(event: "exit", code: number, signal: string): boolean; emit(event: "message", message: any, sendHandle: net.Socket | net.Server): boolean; on(event: string, listener: Function): this; on(event: "close", listener: (code: number, signal: string) => void): this; on(event: "disconnect", listener: () => void): this; on(event: "error", listener: (err: Error) => void): this; on(event: "exit", listener: (code: number, signal: string) => void): this; on(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; once(event: string, listener: Function): this; once(event: "close", listener: (code: number, signal: string) => void): this; once(event: "disconnect", listener: () => void): this; once(event: "error", listener: (err: Error) => void): this; once(event: "exit", listener: (code: number, signal: string) => void): this; once(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: (code: number, signal: string) => void): this; prependListener(event: "disconnect", listener: () => void): this; prependListener(event: "error", listener: (err: Error) => void): this; prependListener(event: "exit", listener: (code: number, signal: string) => void): this; prependListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: (code: number, signal: string) => void): this; prependOnceListener(event: "disconnect", listener: () => void): this; prependOnceListener(event: "error", listener: (err: Error) => void): this; prependOnceListener(event: "exit", listener: (code: number, signal: string) => void): this; prependOnceListener(event: "message", listener: (message: any, sendHandle: net.Socket | net.Server) => void): this; } export interface SpawnOptions { cwd?: string; env?: any; stdio?: any; detached?: boolean; uid?: number; gid?: number; shell?: boolean | string; } export function spawn(command: string, args?: string[], options?: SpawnOptions): ChildProcess; export interface ExecOptions { cwd?: string; env?: any; shell?: string; timeout?: number; maxBuffer?: number; killSignal?: string; uid?: number; gid?: number; } export interface ExecOptionsWithStringEncoding extends ExecOptions { encoding: BufferEncoding; } export interface ExecOptionsWithBufferEncoding extends ExecOptions { encoding: string; // specify `null`. } export function exec(command: string, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; export function exec(command: string, options: ExecOptionsWithStringEncoding, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; // usage. child_process.exec("tsc", {encoding: null as string}, (err, stdout, stderr) => {}); export function exec(command: string, options: ExecOptionsWithBufferEncoding, callback?: (error: Error, stdout: Buffer, stderr: Buffer) => void): ChildProcess; export function exec(command: string, options: ExecOptions, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; export interface ExecFileOptions { cwd?: string; env?: any; timeout?: number; maxBuffer?: number; killSignal?: string; uid?: number; gid?: number; } export interface ExecFileOptionsWithStringEncoding extends ExecFileOptions { encoding: BufferEncoding; } export interface ExecFileOptionsWithBufferEncoding extends ExecFileOptions { encoding: string; // specify `null`. } export function execFile(file: string, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; export function execFile(file: string, options?: ExecFileOptionsWithStringEncoding, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; // usage. child_process.execFile("file.sh", {encoding: null as string}, (err, stdout, stderr) => {}); export function execFile(file: string, options?: ExecFileOptionsWithBufferEncoding, callback?: (error: Error, stdout: Buffer, stderr: Buffer) => void): ChildProcess; export function execFile(file: string, options?: ExecFileOptions, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; export function execFile(file: string, args?: string[], callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; export function execFile(file: string, args?: string[], options?: ExecFileOptionsWithStringEncoding, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; // usage. child_process.execFile("file.sh", ["foo"], {encoding: null as string}, (err, stdout, stderr) => {}); export function execFile(file: string, args?: string[], options?: ExecFileOptionsWithBufferEncoding, callback?: (error: Error, stdout: Buffer, stderr: Buffer) => void): ChildProcess; export function execFile(file: string, args?: string[], options?: ExecFileOptions, callback?: (error: Error, stdout: string, stderr: string) => void): ChildProcess; export interface ForkOptions { cwd?: string; env?: any; execPath?: string; execArgv?: string[]; silent?: boolean; uid?: number; gid?: number; } export function fork(modulePath: string, args?: string[], options?: ForkOptions): ChildProcess; export interface SpawnSyncOptions { cwd?: string; input?: string | Buffer; stdio?: any; env?: any; uid?: number; gid?: number; timeout?: number; killSignal?: string; maxBuffer?: number; encoding?: string; shell?: boolean | string; } export interface SpawnSyncOptionsWithStringEncoding extends SpawnSyncOptions { encoding: BufferEncoding; } export interface SpawnSyncOptionsWithBufferEncoding extends SpawnSyncOptions { encoding: string; // specify `null`. } export interface SpawnSyncReturns<T> { pid: number; output: string[]; stdout: T; stderr: T; status: number; signal: string; error: Error; } export function spawnSync(command: string): SpawnSyncReturns<Buffer>; export function spawnSync(command: string, options?: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns<string>; export function spawnSync(command: string, options?: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns<Buffer>; export function spawnSync(command: string, options?: SpawnSyncOptions): SpawnSyncReturns<Buffer>; export function spawnSync(command: string, args?: string[], options?: SpawnSyncOptionsWithStringEncoding): SpawnSyncReturns<string>; export function spawnSync(command: string, args?: string[], options?: SpawnSyncOptionsWithBufferEncoding): SpawnSyncReturns<Buffer>; export function spawnSync(command: string, args?: string[], options?: SpawnSyncOptions): SpawnSyncReturns<Buffer>; export interface ExecSyncOptions { cwd?: string; input?: string | Buffer; stdio?: any; env?: any; shell?: string; uid?: number; gid?: number; timeout?: number; killSignal?: string; maxBuffer?: number; encoding?: string; } export interface ExecSyncOptionsWithStringEncoding extends ExecSyncOptions { encoding: BufferEncoding; } export interface ExecSyncOptionsWithBufferEncoding extends ExecSyncOptions { encoding: string; // specify `null`. } export function execSync(command: string): Buffer; export function execSync(command: string, options?: ExecSyncOptionsWithStringEncoding): string; export function execSync(command: string, options?: ExecSyncOptionsWithBufferEncoding): Buffer; export function execSync(command: string, options?: ExecSyncOptions): Buffer; export interface ExecFileSyncOptions { cwd?: string; input?: string | Buffer; stdio?: any; env?: any; uid?: number; gid?: number; timeout?: number; killSignal?: string; maxBuffer?: number; encoding?: string; } export interface ExecFileSyncOptionsWithStringEncoding extends ExecFileSyncOptions { encoding: BufferEncoding; } export interface ExecFileSyncOptionsWithBufferEncoding extends ExecFileSyncOptions { encoding: string; // specify `null`. } export function execFileSync(command: string): Buffer; export function execFileSync(command: string, options?: ExecFileSyncOptionsWithStringEncoding): string; export function execFileSync(command: string, options?: ExecFileSyncOptionsWithBufferEncoding): Buffer; export function execFileSync(command: string, options?: ExecFileSyncOptions): Buffer; export function execFileSync(command: string, args?: string[], options?: ExecFileSyncOptionsWithStringEncoding): string; export function execFileSync(command: string, args?: string[], options?: ExecFileSyncOptionsWithBufferEncoding): Buffer; export function execFileSync(command: string, args?: string[], options?: ExecFileSyncOptions): Buffer; } declare module "url" { export interface Url { href?: string; protocol?: string; auth?: string; hostname?: string; port?: string; host?: string; pathname?: string; search?: string; query?: string | any; slashes?: boolean; hash?: string; path?: string; } export interface UrlObject { protocol?: string; slashes?: boolean; auth?: string; host?: string; hostname?: string; port?: string | number; pathname?: string; search?: string; query?: { [key: string]: any; }; hash?: string; } export function parse(urlStr: string, parseQueryString?: boolean, slashesDenoteHost?: boolean): Url; export function format(urlObject: UrlObject): string; export function resolve(from: string, to: string): string; } declare module "dns" { // Supported getaddrinfo flags. export const ADDRCONFIG: number; export const V4MAPPED: number; export interface LookupOptions { family?: number; hints?: number; all?: boolean; } export interface LookupOneOptions extends LookupOptions { all?: false; } export interface LookupAllOptions extends LookupOptions { all: true; } export interface LookupAddress { address: string; family: number; } export function lookup(hostname: string, family: number, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void): void; export function lookup(hostname: string, options: LookupOneOptions, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void): void; export function lookup(hostname: string, options: LookupAllOptions, callback: (err: NodeJS.ErrnoException, addresses: LookupAddress[]) => void): void; export function lookup(hostname: string, options: LookupOptions, callback: (err: NodeJS.ErrnoException, address: string | LookupAddress[], family: number) => void): void; export function lookup(hostname: string, callback: (err: NodeJS.ErrnoException, address: string, family: number) => void): void; export interface MxRecord { priority: number; exchange: string; } export interface NaptrRecord { flags: string; service: string; regexp: string; replacement: string; order: number; preference: number; } export interface SoaRecord { nsname: string; hostmaster: string; serial: number; refresh: number; retry: number; expire: number; minttl: number; } export interface SrvRecord { priority: number; weight: number; port: number; name: string; } export function resolve(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve(hostname: string, rrtype: "A", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve(hostname: string, rrtype: "AAAA", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve(hostname: string, rrtype: "CNAME", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve(hostname: string, rrtype: "MX", callback: (err: NodeJS.ErrnoException, addresses: MxRecord[]) => void): void; export function resolve(hostname: string, rrtype: "NAPTR", callback: (err: NodeJS.ErrnoException, addresses: NaptrRecord[]) => void): void; export function resolve(hostname: string, rrtype: "NS", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve(hostname: string, rrtype: "PTR", callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve(hostname: string, rrtype: "SOA", callback: (err: NodeJS.ErrnoException, addresses: SoaRecord) => void): void; export function resolve(hostname: string, rrtype: "SRV", callback: (err: NodeJS.ErrnoException, addresses: SrvRecord[]) => void): void; export function resolve(hostname: string, rrtype: "TXT", callback: (err: NodeJS.ErrnoException, addresses: string[][]) => void): void; export function resolve(hostname: string, rrtype: string, callback: (err: NodeJS.ErrnoException, addresses: string[] | MxRecord[] | NaptrRecord[] | SoaRecord | SrvRecord[] | string[][]) => void): void; export function resolve4(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolve6(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolveCname(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolveMx(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: MxRecord[]) => void): void; export function resolveNaptr(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: NaptrRecord[]) => void): void; export function resolveNs(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolvePtr(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[]) => void): void; export function resolveSoa(hostname: string, callback: (err: NodeJS.ErrnoException, address: SoaRecord) => void): void; export function resolveSrv(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: SrvRecord[]) => void): void; export function resolveTxt(hostname: string, callback: (err: NodeJS.ErrnoException, addresses: string[][]) => void): void; export function reverse(ip: string, callback: (err: NodeJS.ErrnoException, hostnames: string[]) => void): void; export function setServers(servers: string[]): void; //Error codes export var NODATA: string; export var FORMERR: string; export var SERVFAIL: string; export var NOTFOUND: string; export var NOTIMP: string; export var REFUSED: string; export var BADQUERY: string; export var BADNAME: string; export var BADFAMILY: string; export var BADRESP: string; export var CONNREFUSED: string; export var TIMEOUT: string; export var EOF: string; export var FILE: string; export var NOMEM: string; export var DESTRUCTION: string; export var BADSTR: string; export var BADFLAGS: string; export var NONAME: string; export var BADHINTS: string; export var NOTINITIALIZED: string; export var LOADIPHLPAPI: string; export var ADDRGETNETWORKPARAMS: string; export var CANCELLED: string; } declare module "net" { import * as stream from "stream"; import * as events from "events"; export interface Socket extends stream.Duplex { // Extended base methods write(buffer: Buffer): boolean; write(buffer: Buffer, cb?: Function): boolean; write(str: string, cb?: Function): boolean; write(str: string, encoding?: string, cb?: Function): boolean; write(str: string, encoding?: string, fd?: string): boolean; connect(port: number, host?: string, connectionListener?: Function): void; connect(path: string, connectionListener?: Function): void; bufferSize: number; setEncoding(encoding?: string): void; write(data: any, encoding?: string, callback?: Function): void; destroy(): void; setTimeout(timeout: number, callback?: Function): void; setNoDelay(noDelay?: boolean): void; setKeepAlive(enable?: boolean, initialDelay?: number): void; address(): { port: number; family: string; address: string; }; unref(): void; ref(): void; remoteAddress: string; remoteFamily: string; remotePort: number; localAddress: string; localPort: number; bytesRead: number; bytesWritten: number; connecting: boolean; destroyed: boolean; // Extended base methods end(): void; end(buffer: Buffer, cb?: Function): void; end(str: string, cb?: Function): void; end(str: string, encoding?: string, cb?: Function): void; end(data?: any, encoding?: string): void; /** * events.EventEmitter * 1. close * 2. connect * 3. data * 4. drain * 5. end * 6. error * 7. lookup * 8. timeout */ addListener(event: string, listener: Function): this; addListener(event: "close", listener: (had_error: boolean) => void): this; addListener(event: "connect", listener: () => void): this; addListener(event: "data", listener: (data: Buffer) => void): this; addListener(event: "drain", listener: () => void): this; addListener(event: "end", listener: () => void): this; addListener(event: "error", listener: (err: Error) => void): this; addListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; addListener(event: "timeout", listener: () => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close", had_error: boolean): boolean; emit(event: "connect"): boolean; emit(event: "data", data: Buffer): boolean; emit(event: "drain"): boolean; emit(event: "end"): boolean; emit(event: "error", err: Error): boolean; emit(event: "lookup", err: Error, address: string, family: string | number, host: string): boolean; emit(event: "timeout"): boolean; on(event: string, listener: Function): this; on(event: "close", listener: (had_error: boolean) => void): this; on(event: "connect", listener: () => void): this; on(event: "data", listener: (data: Buffer) => void): this; on(event: "drain", listener: () => void): this; on(event: "end", listener: () => void): this; <|fim▁hole|> on(event: "timeout", listener: () => void): this; once(event: string, listener: Function): this; once(event: "close", listener: (had_error: boolean) => void): this; once(event: "connect", listener: () => void): this; once(event: "data", listener: (data: Buffer) => void): this; once(event: "drain", listener: () => void): this; once(event: "end", listener: () => void): this; once(event: "error", listener: (err: Error) => void): this; once(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; once(event: "timeout", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: (had_error: boolean) => void): this; prependListener(event: "connect", listener: () => void): this; prependListener(event: "data", listener: (data: Buffer) => void): this; prependListener(event: "drain", listener: () => void): this; prependListener(event: "end", listener: () => void): this; prependListener(event: "error", listener: (err: Error) => void): this; prependListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; prependListener(event: "timeout", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: (had_error: boolean) => void): this; prependOnceListener(event: "connect", listener: () => void): this; prependOnceListener(event: "data", listener: (data: Buffer) => void): this; prependOnceListener(event: "drain", listener: () => void): this; prependOnceListener(event: "end", listener: () => void): this; prependOnceListener(event: "error", listener: (err: Error) => void): this; prependOnceListener(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this; prependOnceListener(event: "timeout", listener: () => void): this; } export var Socket: { new (options?: { fd?: number; allowHalfOpen?: boolean; readable?: boolean; writable?: boolean; }): Socket; }; export interface ListenOptions { port?: number; host?: string; backlog?: number; path?: string; exclusive?: boolean; } export interface Server extends events.EventEmitter { listen(port: number, hostname?: string, backlog?: number, listeningListener?: Function): Server; listen(port: number, hostname?: string, listeningListener?: Function): Server; listen(port: number, backlog?: number, listeningListener?: Function): Server; listen(port: number, listeningListener?: Function): Server; listen(path: string, backlog?: number, listeningListener?: Function): Server; listen(path: string, listeningListener?: Function): Server; listen(options: ListenOptions, listeningListener?: Function): Server; listen(handle: any, backlog?: number, listeningListener?: Function): Server; listen(handle: any, listeningListener?: Function): Server; close(callback?: Function): Server; address(): { port: number; family: string; address: string; }; getConnections(cb: (error: Error, count: number) => void): void; ref(): Server; unref(): Server; maxConnections: number; connections: number; /** * events.EventEmitter * 1. close * 2. connection * 3. error * 4. listening */ addListener(event: string, listener: Function): this; addListener(event: "close", listener: () => void): this; addListener(event: "connection", listener: (socket: Socket) => void): this; addListener(event: "error", listener: (err: Error) => void): this; addListener(event: "listening", listener: () => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close"): boolean; emit(event: "connection", socket: Socket): boolean; emit(event: "error", err: Error): boolean; emit(event: "listening"): boolean; on(event: string, listener: Function): this; on(event: "close", listener: () => void): this; on(event: "connection", listener: (socket: Socket) => void): this; on(event: "error", listener: (err: Error) => void): this; on(event: "listening", listener: () => void): this; once(event: string, listener: Function): this; once(event: "close", listener: () => void): this; once(event: "connection", listener: (socket: Socket) => void): this; once(event: "error", listener: (err: Error) => void): this; once(event: "listening", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: () => void): this; prependListener(event: "connection", listener: (socket: Socket) => void): this; prependListener(event: "error", listener: (err: Error) => void): this; prependListener(event: "listening", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: () => void): this; prependOnceListener(event: "connection", listener: (socket: Socket) => void): this; prependOnceListener(event: "error", listener: (err: Error) => void): this; prependOnceListener(event: "listening", listener: () => void): this; } export function createServer(connectionListener?: (socket: Socket) => void): Server; export function createServer(options?: { allowHalfOpen?: boolean; }, connectionListener?: (socket: Socket) => void): Server; export function connect(options: { port: number, host?: string, localAddress?: string, localPort?: string, family?: number, allowHalfOpen?: boolean; }, connectionListener?: Function): Socket; export function connect(port: number, host?: string, connectionListener?: Function): Socket; export function connect(path: string, connectionListener?: Function): Socket; export function createConnection(options: { port: number, host?: string, localAddress?: string, localPort?: string, family?: number, allowHalfOpen?: boolean; }, connectionListener?: Function): Socket; export function createConnection(port: number, host?: string, connectionListener?: Function): Socket; export function createConnection(path: string, connectionListener?: Function): Socket; export function isIP(input: string): number; export function isIPv4(input: string): boolean; export function isIPv6(input: string): boolean; } declare module "dgram" { import * as events from "events"; interface RemoteInfo { address: string; family: string; port: number; } interface AddressInfo { address: string; family: string; port: number; } interface BindOptions { port: number; address?: string; exclusive?: boolean; } interface SocketOptions { type: "udp4" | "udp6"; reuseAddr?: boolean; } export function createSocket(type: string, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; export function createSocket(options: SocketOptions, callback?: (msg: Buffer, rinfo: RemoteInfo) => void): Socket; export interface Socket extends events.EventEmitter { send(msg: Buffer | String | any[], port: number, address: string, callback?: (error: Error, bytes: number) => void): void; send(msg: Buffer | String | any[], offset: number, length: number, port: number, address: string, callback?: (error: Error, bytes: number) => void): void; bind(port?: number, address?: string, callback?: () => void): void; bind(options: BindOptions, callback?: Function): void; close(callback?: any): void; address(): AddressInfo; setBroadcast(flag: boolean): void; setTTL(ttl: number): void; setMulticastTTL(ttl: number): void; setMulticastLoopback(flag: boolean): void; addMembership(multicastAddress: string, multicastInterface?: string): void; dropMembership(multicastAddress: string, multicastInterface?: string): void; ref(): this; unref(): this; /** * events.EventEmitter * 1. close * 2. error * 3. listening * 4. message **/ addListener(event: string, listener: Function): this; addListener(event: "close", listener: () => void): this; addListener(event: "error", listener: (err: Error) => void): this; addListener(event: "listening", listener: () => void): this; addListener(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close"): boolean; emit(event: "error", err: Error): boolean; emit(event: "listening"): boolean; emit(event: "message", msg: Buffer, rinfo: AddressInfo): boolean; on(event: string, listener: Function): this; on(event: "close", listener: () => void): this; on(event: "error", listener: (err: Error) => void): this; on(event: "listening", listener: () => void): this; on(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; once(event: string, listener: Function): this; once(event: "close", listener: () => void): this; once(event: "error", listener: (err: Error) => void): this; once(event: "listening", listener: () => void): this; once(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: () => void): this; prependListener(event: "error", listener: (err: Error) => void): this; prependListener(event: "listening", listener: () => void): this; prependListener(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: () => void): this; prependOnceListener(event: "error", listener: (err: Error) => void): this; prependOnceListener(event: "listening", listener: () => void): this; prependOnceListener(event: "message", listener: (msg: Buffer, rinfo: AddressInfo) => void): this; } } declare module "fs" { import * as stream from "stream"; import * as events from "events"; interface Stats { isFile(): boolean; isDirectory(): boolean; isBlockDevice(): boolean; isCharacterDevice(): boolean; isSymbolicLink(): boolean; isFIFO(): boolean; isSocket(): boolean; dev: number; ino: number; mode: number; nlink: number; uid: number; gid: number; rdev: number; size: number; blksize: number; blocks: number; atime: Date; mtime: Date; ctime: Date; birthtime: Date; } interface FSWatcher extends events.EventEmitter { close(): void; /** * events.EventEmitter * 1. change * 2. error */ addListener(event: string, listener: Function): this; addListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; addListener(event: "error", listener: (error: Error) => void): this; on(event: string, listener: Function): this; on(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; on(event: "error", listener: (error: Error) => void): this; once(event: string, listener: Function): this; once(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; once(event: "error", listener: (error: Error) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; prependListener(event: "error", listener: (error: Error) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "change", listener: (eventType: string, filename: string | Buffer) => void): this; prependOnceListener(event: "error", listener: (error: Error) => void): this; } export interface ReadStream extends stream.Readable { close(): void; destroy(): void; bytesRead: number; path: string | Buffer; /** * events.EventEmitter * 1. open * 2. close */ addListener(event: string, listener: Function): this; addListener(event: "open", listener: (fd: number) => void): this; addListener(event: "close", listener: () => void): this; on(event: string, listener: Function): this; on(event: "open", listener: (fd: number) => void): this; on(event: "close", listener: () => void): this; once(event: string, listener: Function): this; once(event: "open", listener: (fd: number) => void): this; once(event: "close", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "open", listener: (fd: number) => void): this; prependListener(event: "close", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "open", listener: (fd: number) => void): this; prependOnceListener(event: "close", listener: () => void): this; } export interface WriteStream extends stream.Writable { close(): void; bytesWritten: number; path: string | Buffer; /** * events.EventEmitter * 1. open * 2. close */ addListener(event: string, listener: Function): this; addListener(event: "open", listener: (fd: number) => void): this; addListener(event: "close", listener: () => void): this; on(event: string, listener: Function): this; on(event: "open", listener: (fd: number) => void): this; on(event: "close", listener: () => void): this; once(event: string, listener: Function): this; once(event: "open", listener: (fd: number) => void): this; once(event: "close", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "open", listener: (fd: number) => void): this; prependListener(event: "close", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "open", listener: (fd: number) => void): this; prependOnceListener(event: "close", listener: () => void): this; } /** * Asynchronous rename. * @param oldPath * @param newPath * @param callback No arguments other than a possible exception are given to the completion callback. */ export function rename(oldPath: string, newPath: string, callback?: (err?: NodeJS.ErrnoException) => void): void; /** * Synchronous rename * @param oldPath * @param newPath */ export function renameSync(oldPath: string, newPath: string): void; export function truncate(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void; export function truncate(path: string | Buffer, len: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function truncateSync(path: string | Buffer, len?: number): void; export function ftruncate(fd: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function ftruncate(fd: number, len: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function ftruncateSync(fd: number, len?: number): void; export function chown(path: string | Buffer, uid: number, gid: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function chownSync(path: string | Buffer, uid: number, gid: number): void; export function fchown(fd: number, uid: number, gid: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function fchownSync(fd: number, uid: number, gid: number): void; export function lchown(path: string | Buffer, uid: number, gid: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function lchownSync(path: string | Buffer, uid: number, gid: number): void; export function chmod(path: string | Buffer, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function chmod(path: string | Buffer, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void; export function chmodSync(path: string | Buffer, mode: number): void; export function chmodSync(path: string | Buffer, mode: string): void; export function fchmod(fd: number, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function fchmod(fd: number, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void; export function fchmodSync(fd: number, mode: number): void; export function fchmodSync(fd: number, mode: string): void; export function lchmod(path: string | Buffer, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function lchmod(path: string | Buffer, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void; export function lchmodSync(path: string | Buffer, mode: number): void; export function lchmodSync(path: string | Buffer, mode: string): void; export function stat(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, stats: Stats) => any): void; export function lstat(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, stats: Stats) => any): void; export function fstat(fd: number, callback?: (err: NodeJS.ErrnoException, stats: Stats) => any): void; export function statSync(path: string | Buffer): Stats; export function lstatSync(path: string | Buffer): Stats; export function fstatSync(fd: number): Stats; export function link(srcpath: string | Buffer, dstpath: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void; export function linkSync(srcpath: string | Buffer, dstpath: string | Buffer): void; export function symlink(srcpath: string | Buffer, dstpath: string | Buffer, type?: string, callback?: (err?: NodeJS.ErrnoException) => void): void; export function symlinkSync(srcpath: string | Buffer, dstpath: string | Buffer, type?: string): void; export function readlink(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, linkString: string) => any): void; export function readlinkSync(path: string | Buffer): string; export function realpath(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, resolvedPath: string) => any): void; export function realpath(path: string | Buffer, cache: { [path: string]: string }, callback: (err: NodeJS.ErrnoException, resolvedPath: string) => any): void; export function realpathSync(path: string | Buffer, cache?: { [path: string]: string }): string; /** * Asynchronous unlink - deletes the file specified in {path} * * @param path * @param callback No arguments other than a possible exception are given to the completion callback. */ export function unlink(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void; /** * Synchronous unlink - deletes the file specified in {path} * * @param path */ export function unlinkSync(path: string | Buffer): void; /** * Asynchronous rmdir - removes the directory specified in {path} * * @param path * @param callback No arguments other than a possible exception are given to the completion callback. */ export function rmdir(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void; /** * Synchronous rmdir - removes the directory specified in {path} * * @param path */ export function rmdirSync(path: string | Buffer): void; /** * Asynchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777. * * @param path * @param callback No arguments other than a possible exception are given to the completion callback. */ export function mkdir(path: string | Buffer, callback?: (err?: NodeJS.ErrnoException) => void): void; /** * Asynchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777. * * @param path * @param mode * @param callback No arguments other than a possible exception are given to the completion callback. */ export function mkdir(path: string | Buffer, mode: number, callback?: (err?: NodeJS.ErrnoException) => void): void; /** * Asynchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777. * * @param path * @param mode * @param callback No arguments other than a possible exception are given to the completion callback. */ export function mkdir(path: string | Buffer, mode: string, callback?: (err?: NodeJS.ErrnoException) => void): void; /** * Synchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777. * * @param path * @param mode * @param callback No arguments other than a possible exception are given to the completion callback. */ export function mkdirSync(path: string | Buffer, mode?: number): void; /** * Synchronous mkdir - creates the directory specified in {path}. Parameter {mode} defaults to 0777. * * @param path * @param mode * @param callback No arguments other than a possible exception are given to the completion callback. */ export function mkdirSync(path: string | Buffer, mode?: string): void; /** * Asynchronous mkdtemp - Creates a unique temporary directory. Generates six random characters to be appended behind a required prefix to create a unique temporary directory. * * @param prefix * @param callback The created folder path is passed as a string to the callback's second parameter. */ export function mkdtemp(prefix: string, callback?: (err: NodeJS.ErrnoException, folder: string) => void): void; /** * Synchronous mkdtemp - Creates a unique temporary directory. Generates six random characters to be appended behind a required prefix to create a unique temporary directory. * * @param prefix * @returns Returns the created folder path. */ export function mkdtempSync(prefix: string): string; export function readdir(path: string | Buffer, callback?: (err: NodeJS.ErrnoException, files: string[]) => void): void; export function readdirSync(path: string | Buffer): string[]; export function close(fd: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function closeSync(fd: number): void; export function open(path: string | Buffer, flags: string | number, callback: (err: NodeJS.ErrnoException, fd: number) => void): void; export function open(path: string | Buffer, flags: string | number, mode: number, callback: (err: NodeJS.ErrnoException, fd: number) => void): void; export function openSync(path: string | Buffer, flags: string | number, mode?: number): number; export function utimes(path: string | Buffer, atime: number, mtime: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function utimes(path: string | Buffer, atime: Date, mtime: Date, callback?: (err?: NodeJS.ErrnoException) => void): void; export function utimesSync(path: string | Buffer, atime: number, mtime: number): void; export function utimesSync(path: string | Buffer, atime: Date, mtime: Date): void; export function futimes(fd: number, atime: number, mtime: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function futimes(fd: number, atime: Date, mtime: Date, callback?: (err?: NodeJS.ErrnoException) => void): void; export function futimesSync(fd: number, atime: number, mtime: number): void; export function futimesSync(fd: number, atime: Date, mtime: Date): void; export function fsync(fd: number, callback?: (err?: NodeJS.ErrnoException) => void): void; export function fsyncSync(fd: number): void; export function write(fd: number, buffer: Buffer, offset: number, length: number, position: number | null, callback?: (err: NodeJS.ErrnoException, written: number, buffer: Buffer) => void): void; export function write(fd: number, buffer: Buffer, offset: number, length: number, callback?: (err: NodeJS.ErrnoException, written: number, buffer: Buffer) => void): void; export function write(fd: number, data: any, callback?: (err: NodeJS.ErrnoException, written: number, str: string) => void): void; export function write(fd: number, data: any, offset: number, callback?: (err: NodeJS.ErrnoException, written: number, str: string) => void): void; export function write(fd: number, data: any, offset: number, encoding: string, callback?: (err: NodeJS.ErrnoException, written: number, str: string) => void): void; export function writeSync(fd: number, buffer: Buffer, offset: number, length: number, position?: number | null): number; export function writeSync(fd: number, data: any, position?: number | null, enconding?: string): number; export function read(fd: number, buffer: Buffer, offset: number, length: number, position: number | null, callback?: (err: NodeJS.ErrnoException, bytesRead: number, buffer: Buffer) => void): void; export function readSync(fd: number, buffer: Buffer, offset: number, length: number, position: number | null): number; /** * Asynchronous readFile - Asynchronously reads the entire contents of a file. * * @param fileName * @param encoding * @param callback - The callback is passed two arguments (err, data), where data is the contents of the file. */ export function readFile(filename: string, encoding: null, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void; export function readFile(filename: string, encoding: string, callback: (err: NodeJS.ErrnoException, data: string) => void): void; export function readFile(filename: string, encoding: string | null, callback: (err: NodeJS.ErrnoException, data: string | Buffer) => void): void; /** * Asynchronous readFile - Asynchronously reads the entire contents of a file. * * @param fileName * @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFile returns a string; otherwise it returns a Buffer. * @param callback - The callback is passed two arguments (err, data), where data is the contents of the file. */ export function readFile(filename: string, options: { encoding: null; flag?: string; }, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void; export function readFile(filename: string, options: { encoding: string; flag?: string; }, callback: (err: NodeJS.ErrnoException, data: string) => void): void; export function readFile(filename: string, options: { encoding: string | null; flag?: string; }, callback: (err: NodeJS.ErrnoException, data: string | Buffer) => void): void; /** * Asynchronous readFile - Asynchronously reads the entire contents of a file. * * @param fileName * @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFile returns a string; otherwise it returns a Buffer. * @param callback - The callback is passed two arguments (err, data), where data is the contents of the file. */ export function readFile(filename: string, options: { flag?: string; }, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void; /** * Asynchronous readFile - Asynchronously reads the entire contents of a file. * * @param fileName * @param callback - The callback is passed two arguments (err, data), where data is the contents of the file. */ export function readFile(filename: string, callback: (err: NodeJS.ErrnoException, data: Buffer) => void): void; /** * Synchronous readFile - Synchronously reads the entire contents of a file. * * @param fileName * @param encoding */ export function readFileSync(filename: string, encoding: null): Buffer; export function readFileSync(filename: string, encoding: string): string; export function readFileSync(filename: string, encoding: string | null): string | Buffer; /** * Synchronous readFile - Synchronously reads the entire contents of a file. * * @param fileName * @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFileSync returns a string; otherwise it returns a Buffer. */ export function readFileSync(filename: string, options: { encoding: null; flag?: string; }): Buffer; export function readFileSync(filename: string, options: { encoding: string; flag?: string; }): string; export function readFileSync(filename: string, options: { encoding: string | null; flag?: string; }): string | Buffer; /** * Synchronous readFile - Synchronously reads the entire contents of a file. * * @param fileName * @param options An object with optional {encoding} and {flag} properties. If {encoding} is specified, readFileSync returns a string; otherwise it returns a Buffer. */ export function readFileSync(filename: string, options?: { flag?: string; }): Buffer; export function writeFile(filename: string, data: any, callback?: (err: NodeJS.ErrnoException) => void): void; export function writeFile(filename: string, data: any, encoding: string, callback: (err: NodeJS.ErrnoException) => void): void; export function writeFile(filename: string, data: any, options: { encoding?: string; mode?: number; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void; export function writeFile(filename: string, data: any, options: { encoding?: string; mode?: string; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void; export function writeFileSync(filename: string, data: any, encoding: string): void; export function writeFileSync(filename: string, data: any, options?: { encoding?: string; mode?: number; flag?: string; }): void; export function writeFileSync(filename: string, data: any, options?: { encoding?: string; mode?: string; flag?: string; }): void; export function appendFile(filename: string, data: any, encoding: string, callback: (err: NodeJS.ErrnoException) => void): void; export function appendFile(filename: string, data: any, options: { encoding?: string; mode?: number; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void; export function appendFile(filename: string, data: any, options: { encoding?: string; mode?: string; flag?: string; }, callback?: (err: NodeJS.ErrnoException) => void): void; export function appendFile(filename: string, data: any, callback?: (err: NodeJS.ErrnoException) => void): void; export function appendFileSync(filename: string, data: any, encoding: string): void; export function appendFileSync(filename: string, data: any, options?: { encoding?: string; mode?: number; flag?: string; }): void; export function appendFileSync(filename: string, data: any, options?: { encoding?: string; mode?: string; flag?: string; }): void; export function watchFile(filename: string, listener: (curr: Stats, prev: Stats) => void): void; export function watchFile(filename: string, options: { persistent?: boolean; interval?: number; }, listener: (curr: Stats, prev: Stats) => void): void; export function unwatchFile(filename: string, listener?: (curr: Stats, prev: Stats) => void): void; export function watch(filename: string, listener?: (event: string, filename: string) => any): FSWatcher; export function watch(filename: string, encoding: string, listener?: (event: string, filename: string | Buffer) => any): FSWatcher; export function watch(filename: string, options: { persistent?: boolean; recursive?: boolean; encoding?: string }, listener?: (event: string, filename: string | Buffer) => any): FSWatcher; export function exists(path: string | Buffer, callback?: (exists: boolean) => void): void; export function existsSync(path: string | Buffer): boolean; export namespace constants { // File Access Constants /** Constant for fs.access(). File is visible to the calling process. */ export const F_OK: number; /** Constant for fs.access(). File can be read by the calling process. */ export const R_OK: number; /** Constant for fs.access(). File can be written by the calling process. */ export const W_OK: number; /** Constant for fs.access(). File can be executed by the calling process. */ export const X_OK: number; // File Open Constants /** Constant for fs.open(). Flag indicating to open a file for read-only access. */ export const O_RDONLY: number; /** Constant for fs.open(). Flag indicating to open a file for write-only access. */ export const O_WRONLY: number; /** Constant for fs.open(). Flag indicating to open a file for read-write access. */ export const O_RDWR: number; /** Constant for fs.open(). Flag indicating to create the file if it does not already exist. */ export const O_CREAT: number; /** Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. */ export const O_EXCL: number; /** Constant for fs.open(). Flag indicating that if path identifies a terminal device, opening the path shall not cause that terminal to become the controlling terminal for the process (if the process does not already have one). */ export const O_NOCTTY: number; /** Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. */ export const O_TRUNC: number; /** Constant for fs.open(). Flag indicating that data will be appended to the end of the file. */ export const O_APPEND: number; /** Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. */ export const O_DIRECTORY: number; /** Constant for fs.open(). Flag indicating reading accesses to the file system will no longer result in an update to the atime information associated with the file. This flag is available on Linux operating systems only. */ export const O_NOATIME: number; /** Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. */ export const O_NOFOLLOW: number; /** Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. */ export const O_SYNC: number; /** Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. */ export const O_SYMLINK: number; /** Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. */ export const O_DIRECT: number; /** Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. */ export const O_NONBLOCK: number; // File Type Constants /** Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. */ export const S_IFMT: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. */ export const S_IFREG: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. */ export const S_IFDIR: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. */ export const S_IFCHR: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. */ export const S_IFBLK: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. */ export const S_IFIFO: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. */ export const S_IFLNK: number; /** Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. */ export const S_IFSOCK: number; // File Mode Constants /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. */ export const S_IRWXU: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. */ export const S_IRUSR: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. */ export const S_IWUSR: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. */ export const S_IXUSR: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. */ export const S_IRWXG: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. */ export const S_IRGRP: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. */ export const S_IWGRP: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. */ export const S_IXGRP: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. */ export const S_IRWXO: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. */ export const S_IROTH: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. */ export const S_IWOTH: number; /** Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. */ export const S_IXOTH: number; } /** Tests a user's permissions for the file specified by path. */ export function access(path: string | Buffer, callback: (err: NodeJS.ErrnoException) => void): void; export function access(path: string | Buffer, mode: number, callback: (err: NodeJS.ErrnoException) => void): void; /** Synchronous version of fs.access. This throws if any accessibility checks fail, and does nothing otherwise. */ export function accessSync(path: string | Buffer, mode?: number): void; export function createReadStream(path: string | Buffer, options?: { flags?: string; encoding?: string; fd?: number; mode?: number; autoClose?: boolean; start?: number; end?: number; }): ReadStream; export function createWriteStream(path: string | Buffer, options?: { flags?: string; encoding?: string; fd?: number; mode?: number; autoClose?: boolean; start?: number; }): WriteStream; export function fdatasync(fd: number, callback: Function): void; export function fdatasyncSync(fd: number): void; } declare module "path" { /** * A parsed path object generated by path.parse() or consumed by path.format(). */ export interface ParsedPath { /** * The root of the path such as '/' or 'c:\' */ root: string; /** * The full directory path such as '/home/user/dir' or 'c:\path\dir' */ dir: string; /** * The file name including extension (if any) such as 'index.html' */ base: string; /** * The file extension (if any) such as '.html' */ ext: string; /** * The file name without extension (if any) such as 'index' */ name: string; } /** * Normalize a string path, reducing '..' and '.' parts. * When multiple slashes are found, they're replaced by a single one; when the path contains a trailing slash, it is preserved. On Windows backslashes are used. * * @param p string path to normalize. */ export function normalize(p: string): string; /** * Join all arguments together and normalize the resulting path. * Arguments must be strings. In v0.8, non-string arguments were silently ignored. In v0.10 and up, an exception is thrown. * * @param paths paths to join. */ export function join(...paths: string[]): string; /** * The right-most parameter is considered {to}. Other parameters are considered an array of {from}. * * Starting from leftmost {from} paramter, resolves {to} to an absolute path. * * If {to} isn't already absolute, {from} arguments are prepended in right to left order, until an absolute path is found. If after using all {from} paths still no absolute path is found, the current working directory is used as well. The resulting path is normalized, and trailing slashes are removed unless the path gets resolved to the root directory. * * @param pathSegments string paths to join. Non-string arguments are ignored. */ export function resolve(...pathSegments: any[]): string; /** * Determines whether {path} is an absolute path. An absolute path will always resolve to the same location, regardless of the working directory. * * @param path path to test. */ export function isAbsolute(path: string): boolean; /** * Solve the relative path from {from} to {to}. * At times we have two absolute paths, and we need to derive the relative path from one to the other. This is actually the reverse transform of path.resolve. * * @param from * @param to */ export function relative(from: string, to: string): string; /** * Return the directory name of a path. Similar to the Unix dirname command. * * @param p the path to evaluate. */ export function dirname(p: string): string; /** * Return the last portion of a path. Similar to the Unix basename command. * Often used to extract the file name from a fully qualified path. * * @param p the path to evaluate. * @param ext optionally, an extension to remove from the result. */ export function basename(p: string, ext?: string): string; /** * Return the extension of the path, from the last '.' to end of string in the last portion of the path. * If there is no '.' in the last portion of the path or the first character of it is '.', then it returns an empty string * * @param p the path to evaluate. */ export function extname(p: string): string; /** * The platform-specific file separator. '\\' or '/'. */ export var sep: string; /** * The platform-specific file delimiter. ';' or ':'. */ export var delimiter: string; /** * Returns an object from a path string - the opposite of format(). * * @param pathString path to evaluate. */ export function parse(pathString: string): ParsedPath; /** * Returns a path string from an object - the opposite of parse(). * * @param pathString path to evaluate. */ export function format(pathObject: ParsedPath): string; export module posix { export function normalize(p: string): string; export function join(...paths: any[]): string; export function resolve(...pathSegments: any[]): string; export function isAbsolute(p: string): boolean; export function relative(from: string, to: string): string; export function dirname(p: string): string; export function basename(p: string, ext?: string): string; export function extname(p: string): string; export var sep: string; export var delimiter: string; export function parse(p: string): ParsedPath; export function format(pP: ParsedPath): string; } export module win32 { export function normalize(p: string): string; export function join(...paths: any[]): string; export function resolve(...pathSegments: any[]): string; export function isAbsolute(p: string): boolean; export function relative(from: string, to: string): string; export function dirname(p: string): string; export function basename(p: string, ext?: string): string; export function extname(p: string): string; export var sep: string; export var delimiter: string; export function parse(p: string): ParsedPath; export function format(pP: ParsedPath): string; } } declare module "string_decoder" { export interface NodeStringDecoder { write(buffer: Buffer): string; end(buffer?: Buffer): string; } export var StringDecoder: { new (encoding?: string): NodeStringDecoder; }; } declare module "tls" { import * as crypto from "crypto"; import * as net from "net"; import * as stream from "stream"; var CLIENT_RENEG_LIMIT: number; var CLIENT_RENEG_WINDOW: number; export interface Certificate { /** * Country code. */ C: string; /** * Street. */ ST: string; /** * Locality. */ L: string; /** * Organization. */ O: string; /** * Organizational unit. */ OU: string; /** * Common name. */ CN: string; } export interface CipherNameAndProtocol { /** * The cipher name. */ name: string; /** * SSL/TLS protocol version. */ version: string; } export class TLSSocket extends net.Socket { /** * Construct a new tls.TLSSocket object from an existing TCP socket. */ constructor(socket:net.Socket, options?: { /** * An optional TLS context object from tls.createSecureContext() */ secureContext?: SecureContext, /** * If true the TLS socket will be instantiated in server-mode. * Defaults to false. */ isServer?: boolean, /** * An optional net.Server instance. */ server?: net.Server, /** * If true the server will request a certificate from clients that * connect and attempt to verify that certificate. Defaults to * false. */ requestCert?: boolean, /** * If true the server will reject any connection which is not * authorized with the list of supplied CAs. This option only has an * effect if requestCert is true. Defaults to false. */ rejectUnauthorized?: boolean, /** * An array of strings or a Buffer naming possible NPN protocols. * (Protocols should be ordered by their priority.) */ NPNProtocols?: string[] | Buffer, /** * An array of strings or a Buffer naming possible ALPN protocols. * (Protocols should be ordered by their priority.) When the server * receives both NPN and ALPN extensions from the client, ALPN takes * precedence over NPN and the server does not send an NPN extension * to the client. */ ALPNProtocols?: string[] | Buffer, /** * SNICallback(servername, cb) <Function> A function that will be * called if the client supports SNI TLS extension. Two arguments * will be passed when called: servername and cb. SNICallback should * invoke cb(null, ctx), where ctx is a SecureContext instance. * (tls.createSecureContext(...) can be used to get a proper * SecureContext.) If SNICallback wasn't provided the default callback * with high-level API will be used (see below). */ SNICallback?: Function, /** * An optional Buffer instance containing a TLS session. */ session?: Buffer, /** * If true, specifies that the OCSP status request extension will be * added to the client hello and an 'OCSPResponse' event will be * emitted on the socket before establishing a secure communication */ requestOCSP?: boolean }); /** * Returns the bound address, the address family name and port of the underlying socket as reported by * the operating system. * @returns {any} - An object with three properties, e.g. { port: 12346, family: 'IPv4', address: '127.0.0.1' }. */ address(): { port: number; family: string; address: string }; /** * A boolean that is true if the peer certificate was signed by one of the specified CAs, otherwise false. */ authorized: boolean; /** * The reason why the peer's certificate has not been verified. * This property becomes available only when tlsSocket.authorized === false. */ authorizationError: Error; /** * Static boolean value, always true. * May be used to distinguish TLS sockets from regular ones. */ encrypted: boolean; /** * Returns an object representing the cipher name and the SSL/TLS protocol version of the current connection. * @returns {CipherNameAndProtocol} - Returns an object representing the cipher name * and the SSL/TLS protocol version of the current connection. */ getCipher(): CipherNameAndProtocol; /** * Returns an object representing the peer's certificate. * The returned object has some properties corresponding to the field of the certificate. * If detailed argument is true the full chain with issuer property will be returned, * if false only the top certificate without issuer property. * If the peer does not provide a certificate, it returns null or an empty object. * @param {boolean} detailed - If true; the full chain with issuer property will be returned. * @returns {any} - An object representing the peer's certificate. */ getPeerCertificate(detailed?: boolean): { subject: Certificate; issuerInfo: Certificate; issuer: Certificate; raw: any; valid_from: string; valid_to: string; fingerprint: string; serialNumber: string; }; /** * Could be used to speed up handshake establishment when reconnecting to the server. * @returns {any} - ASN.1 encoded TLS session or undefined if none was negotiated. */ getSession(): any; /** * NOTE: Works only with client TLS sockets. * Useful only for debugging, for session reuse provide session option to tls.connect(). * @returns {any} - TLS session ticket or undefined if none was negotiated. */ getTLSTicket(): any; /** * The string representation of the local IP address. */ localAddress: string; /** * The numeric representation of the local port. */ localPort: number; /** * The string representation of the remote IP address. * For example, '74.125.127.100' or '2001:4860:a005::68'. */ remoteAddress: string; /** * The string representation of the remote IP family. 'IPv4' or 'IPv6'. */ remoteFamily: string; /** * The numeric representation of the remote port. For example, 443. */ remotePort: number; /** * Initiate TLS renegotiation process. * * NOTE: Can be used to request peer's certificate after the secure connection has been established. * ANOTHER NOTE: When running as the server, socket will be destroyed with an error after handshakeTimeout timeout. * @param {TlsOptions} options - The options may contain the following fields: rejectUnauthorized, * requestCert (See tls.createServer() for details). * @param {Function} callback - callback(err) will be executed with null as err, once the renegotiation * is successfully completed. */ renegotiate(options: TlsOptions, callback: (err: Error) => any): any; /** * Set maximum TLS fragment size (default and maximum value is: 16384, minimum is: 512). * Smaller fragment size decreases buffering latency on the client: large fragments are buffered by * the TLS layer until the entire fragment is received and its integrity is verified; * large fragments can span multiple roundtrips, and their processing can be delayed due to packet * loss or reordering. However, smaller fragments add extra TLS framing bytes and CPU overhead, * which may decrease overall server throughput. * @param {number} size - TLS fragment size (default and maximum value is: 16384, minimum is: 512). * @returns {boolean} - Returns true on success, false otherwise. */ setMaxSendFragment(size: number): boolean; /** * events.EventEmitter * 1. OCSPResponse * 2. secureConnect **/ addListener(event: string, listener: Function): this; addListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; addListener(event: "secureConnect", listener: () => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "OCSPResponse", response: Buffer): boolean; emit(event: "secureConnect"): boolean; on(event: string, listener: Function): this; on(event: "OCSPResponse", listener: (response: Buffer) => void): this; on(event: "secureConnect", listener: () => void): this; once(event: string, listener: Function): this; once(event: "OCSPResponse", listener: (response: Buffer) => void): this; once(event: "secureConnect", listener: () => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; prependListener(event: "secureConnect", listener: () => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "OCSPResponse", listener: (response: Buffer) => void): this; prependOnceListener(event: "secureConnect", listener: () => void): this; } export interface TlsOptions { host?: string; port?: number; pfx?: string | Buffer[]; key?: string | string[] | Buffer | any[]; passphrase?: string; cert?: string | string[] | Buffer | Buffer[]; ca?: string | string[] | Buffer | Buffer[]; crl?: string | string[]; ciphers?: string; honorCipherOrder?: boolean; requestCert?: boolean; rejectUnauthorized?: boolean; NPNProtocols?: string[] | Buffer; SNICallback?: (servername: string, cb: (err: Error, ctx: SecureContext) => any) => any; ecdhCurve?: string; dhparam?: string | Buffer; handshakeTimeout?: number; ALPNProtocols?: string[] | Buffer; sessionTimeout?: number; ticketKeys?: any; sessionIdContext?: string; secureProtocol?: string; } export interface ConnectionOptions { host?: string; port?: number; socket?: net.Socket; pfx?: string | Buffer key?: string | string[] | Buffer | Buffer[]; passphrase?: string; cert?: string | string[] | Buffer | Buffer[]; ca?: string | Buffer | (string | Buffer)[]; rejectUnauthorized?: boolean; NPNProtocols?: (string | Buffer)[]; servername?: string; path?: string; ALPNProtocols?: (string | Buffer)[]; checkServerIdentity?: (servername: string, cert: string | Buffer | (string | Buffer)[]) => any; secureProtocol?: string; secureContext?: Object; session?: Buffer; minDHSize?: number; } export interface Server extends net.Server { close(callback?: Function): Server; address(): { port: number; family: string; address: string; }; addContext(hostName: string, credentials: { key: string; cert: string; ca: string; }): void; maxConnections: number; connections: number; /** * events.EventEmitter * 1. tlsClientError * 2. newSession * 3. OCSPRequest * 4. resumeSession * 5. secureConnection **/ addListener(event: string, listener: Function): this; addListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; addListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; addListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; addListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; addListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "tlsClientError", err: Error, tlsSocket: TLSSocket): boolean; emit(event: "newSession", sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void): boolean; emit(event: "OCSPRequest", certificate: Buffer, issuer: Buffer, callback: Function): boolean; emit(event: "resumeSession", sessionId: any, callback: (err: Error, sessionData: any) => void): boolean; emit(event: "secureConnection", tlsSocket: TLSSocket): boolean; on(event: string, listener: Function): this; on(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; on(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; on(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; on(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; on(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; once(event: string, listener: Function): this; once(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; once(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; once(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; once(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; once(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; prependListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; prependListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; prependListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; prependListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "tlsClientError", listener: (err: Error, tlsSocket: TLSSocket) => void): this; prependOnceListener(event: "newSession", listener: (sessionId: any, sessionData: any, callback: (err: Error, resp: Buffer) => void) => void): this; prependOnceListener(event: "OCSPRequest", listener: (certificate: Buffer, issuer: Buffer, callback: Function) => void): this; prependOnceListener(event: "resumeSession", listener: (sessionId: any, callback: (err: Error, sessionData: any) => void) => void): this; prependOnceListener(event: "secureConnection", listener: (tlsSocket: TLSSocket) => void): this; } export interface ClearTextStream extends stream.Duplex { authorized: boolean; authorizationError: Error; getPeerCertificate(): any; getCipher: { name: string; version: string; }; address: { port: number; family: string; address: string; }; remoteAddress: string; remotePort: number; } export interface SecurePair { encrypted: any; cleartext: any; } export interface SecureContextOptions { pfx?: string | Buffer; key?: string | Buffer; passphrase?: string; cert?: string | Buffer; ca?: string | Buffer; crl?: string | string[] ciphers?: string; honorCipherOrder?: boolean; } export interface SecureContext { context: any; } export function createServer(options: TlsOptions, secureConnectionListener?: (socket: TLSSocket) => void): Server; export function connect(options: ConnectionOptions, secureConnectionListener?: () => void): TLSSocket; export function connect(port: number, host?: string, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; export function connect(port: number, options?: ConnectionOptions, secureConnectListener?: () => void): TLSSocket; export function createSecurePair(credentials?: crypto.Credentials, isServer?: boolean, requestCert?: boolean, rejectUnauthorized?: boolean): SecurePair; export function createSecureContext(details: SecureContextOptions): SecureContext; } declare module "crypto" { export interface Certificate { exportChallenge(spkac: string | Buffer): Buffer; exportPublicKey(spkac: string | Buffer): Buffer; verifySpkac(spkac: Buffer): boolean; } export var Certificate: { new (): Certificate; (): Certificate; } export var fips: boolean; export interface CredentialDetails { pfx: string; key: string; passphrase: string; cert: string; ca: string | string[]; crl: string | string[]; ciphers: string; } export interface Credentials { context?: any; } export function createCredentials(details: CredentialDetails): Credentials; export function createHash(algorithm: string): Hash; export function createHmac(algorithm: string, key: string | Buffer): Hmac; type Utf8AsciiLatin1Encoding = "utf8" | "ascii" | "latin1"; type HexBase64Latin1Encoding = "latin1" | "hex" | "base64"; type Utf8AsciiBinaryEncoding = "utf8" | "ascii" | "binary"; type HexBase64BinaryEncoding = "binary" | "base64" | "hex"; type ECDHKeyFormat = "compressed" | "uncompressed" | "hybrid"; export interface Hash extends NodeJS.ReadWriteStream { update(data: string | Buffer): Hash; update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Hash; digest(): Buffer; digest(encoding: HexBase64Latin1Encoding): string; } export interface Hmac extends NodeJS.ReadWriteStream { update(data: string | Buffer): Hmac; update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Hmac; digest(): Buffer; digest(encoding: HexBase64Latin1Encoding): string; } export function createCipher(algorithm: string, password: any): Cipher; export function createCipheriv(algorithm: string, key: any, iv: any): Cipher; export interface Cipher extends NodeJS.ReadWriteStream { update(data: Buffer): Buffer; update(data: string, input_encoding: Utf8AsciiBinaryEncoding): Buffer; update(data: Buffer, input_encoding: any, output_encoding: HexBase64BinaryEncoding): string; update(data: string, input_encoding: Utf8AsciiBinaryEncoding, output_encoding: HexBase64BinaryEncoding): string; final(): Buffer; final(output_encoding: string): string; setAutoPadding(auto_padding?: boolean): void; getAuthTag(): Buffer; setAAD(buffer: Buffer): void; } export function createDecipher(algorithm: string, password: any): Decipher; export function createDecipheriv(algorithm: string, key: any, iv: any): Decipher; export interface Decipher extends NodeJS.ReadWriteStream { update(data: Buffer): Buffer; update(data: string, input_encoding: HexBase64BinaryEncoding): Buffer; update(data: Buffer, input_encoding: any, output_encoding: Utf8AsciiBinaryEncoding): string; update(data: string, input_encoding: HexBase64BinaryEncoding, output_encoding: Utf8AsciiBinaryEncoding): string; final(): Buffer; final(output_encoding: string): string; setAutoPadding(auto_padding?: boolean): void; setAuthTag(tag: Buffer): void; setAAD(buffer: Buffer): void; } export function createSign(algorithm: string): Signer; export interface Signer extends NodeJS.WritableStream { update(data: string | Buffer): Signer; update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Signer; sign(private_key: string | { key: string; passphrase: string }): Buffer; sign(private_key: string | { key: string; passphrase: string }, output_format: HexBase64Latin1Encoding): string; } export function createVerify(algorith: string): Verify; export interface Verify extends NodeJS.WritableStream { update(data: string | Buffer): Verify; update(data: string | Buffer, input_encoding: Utf8AsciiLatin1Encoding): Verify; verify(object: string, signature: Buffer): boolean; verify(object: string, signature: string, signature_format: HexBase64Latin1Encoding): boolean; } export function createDiffieHellman(prime_length: number, generator?: number): DiffieHellman; export function createDiffieHellman(prime: Buffer): DiffieHellman; export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding): DiffieHellman; export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding, generator: number | Buffer): DiffieHellman; export function createDiffieHellman(prime: string, prime_encoding: HexBase64Latin1Encoding, generator: string, generator_encoding: HexBase64Latin1Encoding): DiffieHellman; export interface DiffieHellman { generateKeys(): Buffer; generateKeys(encoding: HexBase64Latin1Encoding): string; computeSecret(other_public_key: Buffer): Buffer; computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding): Buffer; computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding, output_encoding: HexBase64Latin1Encoding): string; getPrime(): Buffer; getPrime(encoding: HexBase64Latin1Encoding): string; getGenerator(): Buffer; getGenerator(encoding: HexBase64Latin1Encoding): string; getPublicKey(): Buffer; getPublicKey(encoding: HexBase64Latin1Encoding): string; getPrivateKey(): Buffer; getPrivateKey(encoding: HexBase64Latin1Encoding): string; setPublicKey(public_key: Buffer): void; setPublicKey(public_key: string, encoding: string): void; setPrivateKey(private_key: Buffer): void; setPrivateKey(private_key: string, encoding: string): void; verifyError: number; } export function getDiffieHellman(group_name: string): DiffieHellman; export function pbkdf2(password: string | Buffer, salt: string | Buffer, iterations: number, keylen: number, digest: string, callback: (err: Error, derivedKey: Buffer) => any): void; export function pbkdf2Sync(password: string | Buffer, salt: string | Buffer, iterations: number, keylen: number, digest: string): Buffer; export function randomBytes(size: number): Buffer; export function randomBytes(size: number, callback: (err: Error, buf: Buffer) => void): void; export function pseudoRandomBytes(size: number): Buffer; export function pseudoRandomBytes(size: number, callback: (err: Error, buf: Buffer) => void): void; export interface RsaPublicKey { key: string; padding?: number; } export interface RsaPrivateKey { key: string; passphrase?: string, padding?: number; } export function publicEncrypt(public_key: string | RsaPublicKey, buffer: Buffer): Buffer export function privateDecrypt(private_key: string | RsaPrivateKey, buffer: Buffer): Buffer export function privateEncrypt(private_key: string | RsaPrivateKey, buffer: Buffer): Buffer export function publicDecrypt(public_key: string | RsaPublicKey, buffer: Buffer): Buffer export function getCiphers(): string[]; export function getCurves(): string[]; export function getHashes(): string[]; export interface ECDH { generateKeys(): Buffer; generateKeys(encoding: HexBase64Latin1Encoding): string; generateKeys(encoding: HexBase64Latin1Encoding, format: ECDHKeyFormat): string; computeSecret(other_public_key: Buffer): Buffer; computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding): Buffer; computeSecret(other_public_key: string, input_encoding: HexBase64Latin1Encoding, output_encoding: HexBase64Latin1Encoding): string; getPrivateKey(): Buffer; getPrivateKey(encoding: HexBase64Latin1Encoding): string; getPublicKey(): Buffer; getPublicKey(encoding: HexBase64Latin1Encoding): string; getPublicKey(encoding: HexBase64Latin1Encoding, format: ECDHKeyFormat): string; setPrivateKey(private_key: Buffer): void; setPrivateKey(private_key: string, encoding: HexBase64Latin1Encoding): void; } export function createECDH(curve_name: string): ECDH; export function timingSafeEqual(a: Buffer, b: Buffer): boolean; export var DEFAULT_ENCODING: string; } declare module "stream" { import * as events from "events"; class internal extends events.EventEmitter { pipe<T extends NodeJS.WritableStream>(destination: T, options?: { end?: boolean; }): T; } namespace internal { export class Stream extends internal { } export interface ReadableOptions { highWaterMark?: number; encoding?: string; objectMode?: boolean; read?: (this: Readable, size?: number) => any; } export class Readable extends Stream implements NodeJS.ReadableStream { readable: boolean; constructor(opts?: ReadableOptions); _read(size: number): void; read(size?: number): any; setEncoding(encoding: string): void; pause(): this; resume(): this; isPaused(): boolean; pipe<T extends NodeJS.WritableStream>(destination: T, options?: { end?: boolean; }): T; unpipe<T extends NodeJS.WritableStream>(destination?: T): void; unshift(chunk: any): void; wrap(oldStream: NodeJS.ReadableStream): Readable; push(chunk: any, encoding?: string): boolean; /** * Event emitter * The defined events on documents including: * 1. close * 2. data * 3. end * 4. readable * 5. error **/ addListener(event: string, listener: Function): this; addListener(event: string, listener: Function): this; addListener(event: "close", listener: () => void): this; addListener(event: "data", listener: (chunk: Buffer | string) => void): this; addListener(event: "end", listener: () => void): this; addListener(event: "readable", listener: () => void): this; addListener(event: "error", listener: (err: Error) => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close"): boolean; emit(event: "data", chunk: Buffer | string): boolean; emit(event: "end"): boolean; emit(event: "readable"): boolean; emit(event: "error", err: Error): boolean; on(event: string, listener: Function): this; on(event: "close", listener: () => void): this; on(event: "data", listener: (chunk: Buffer | string) => void): this; on(event: "end", listener: () => void): this; on(event: "readable", listener: () => void): this; on(event: "error", listener: (err: Error) => void): this; once(event: string, listener: Function): this; once(event: "close", listener: () => void): this; once(event: "data", listener: (chunk: Buffer | string) => void): this; once(event: "end", listener: () => void): this; once(event: "readable", listener: () => void): this; once(event: "error", listener: (err: Error) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: () => void): this; prependListener(event: "data", listener: (chunk: Buffer | string) => void): this; prependListener(event: "end", listener: () => void): this; prependListener(event: "readable", listener: () => void): this; prependListener(event: "error", listener: (err: Error) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: () => void): this; prependOnceListener(event: "data", listener: (chunk: Buffer | string) => void): this; prependOnceListener(event: "end", listener: () => void): this; prependOnceListener(event: "readable", listener: () => void): this; prependOnceListener(event: "error", listener: (err: Error) => void): this; removeListener(event: string, listener: Function): this; removeListener(event: "close", listener: () => void): this; removeListener(event: "data", listener: (chunk: Buffer | string) => void): this; removeListener(event: "end", listener: () => void): this; removeListener(event: "readable", listener: () => void): this; removeListener(event: "error", listener: (err: Error) => void): this; } export interface WritableOptions { highWaterMark?: number; decodeStrings?: boolean; objectMode?: boolean; write?: (chunk: string | Buffer, encoding: string, callback: Function) => any; writev?: (chunks: { chunk: string | Buffer, encoding: string }[], callback: Function) => any; } export class Writable extends Stream implements NodeJS.WritableStream { writable: boolean; constructor(opts?: WritableOptions); _write(chunk: any, encoding: string, callback: Function): void; write(chunk: any, cb?: Function): boolean; write(chunk: any, encoding?: string, cb?: Function): boolean; setDefaultEncoding(encoding: string): this; end(): void; end(chunk: any, cb?: Function): void; end(chunk: any, encoding?: string, cb?: Function): void; /** * Event emitter * The defined events on documents including: * 1. close * 2. drain * 3. error * 4. finish * 5. pipe * 6. unpipe **/ addListener(event: string, listener: Function): this; addListener(event: "close", listener: () => void): this; addListener(event: "drain", listener: () => void): this; addListener(event: "error", listener: (err: Error) => void): this; addListener(event: "finish", listener: () => void): this; addListener(event: "pipe", listener: (src: Readable) => void): this; addListener(event: "unpipe", listener: (src: Readable) => void): this; emit(event: string | symbol, ...args: any[]): boolean; emit(event: "close"): boolean; emit(event: "drain", chunk: Buffer | string): boolean; emit(event: "error", err: Error): boolean; emit(event: "finish"): boolean; emit(event: "pipe", src: Readable): boolean; emit(event: "unpipe", src: Readable): boolean; on(event: string, listener: Function): this; on(event: "close", listener: () => void): this; on(event: "drain", listener: () => void): this; on(event: "error", listener: (err: Error) => void): this; on(event: "finish", listener: () => void): this; on(event: "pipe", listener: (src: Readable) => void): this; on(event: "unpipe", listener: (src: Readable) => void): this; once(event: string, listener: Function): this; once(event: "close", listener: () => void): this; once(event: "drain", listener: () => void): this; once(event: "error", listener: (err: Error) => void): this; once(event: "finish", listener: () => void): this; once(event: "pipe", listener: (src: Readable) => void): this; once(event: "unpipe", listener: (src: Readable) => void): this; prependListener(event: string, listener: Function): this; prependListener(event: "close", listener: () => void): this; prependListener(event: "drain", listener: () => void): this; prependListener(event: "error", listener: (err: Error) => void): this; prependListener(event: "finish", listener: () => void): this; prependListener(event: "pipe", listener: (src: Readable) => void): this; prependListener(event: "unpipe", listener: (src: Readable) => void): this; prependOnceListener(event: string, listener: Function): this; prependOnceListener(event: "close", listener: () => void): this; prependOnceListener(event: "drain", listener: () => void): this; prependOnceListener(event: "error", listener: (err: Error) => void): this; prependOnceListener(event: "finish", listener: () => void): this; prependOnceListener(event: "pipe", listener: (src: Readable) => void): this; prependOnceListener(event: "unpipe", listener: (src: Readable) => void): this; removeListener(event: string, listener: Function): this; removeListener(event: "close", listener: () => void): this; removeListener(event: "drain", listener: () => void): this; removeListener(event: "error", listener: (err: Error) => void): this; removeListener(event: "finish", listener: () => void): this; removeListener(event: "pipe", listener: (src: Readable) => void): this; removeListener(event: "unpipe", listener: (src: Readable) => void): this; } export interface DuplexOptions extends ReadableOptions, WritableOptions { allowHalfOpen?: boolean; readableObjectMode?: boolean; writableObjectMode?: boolean; } // Note: Duplex extends both Readable and Writable. export class Duplex extends Readable implements Writable { writable: boolean; constructor(opts?: DuplexOptions); _write(chunk: any, encoding: string, callback: Function): void; write(chunk: any, cb?: Function): boolean; write(chunk: any, encoding?: string, cb?: Function): boolean; setDefaultEncoding(encoding: string): this; end(): void; end(chunk: any, cb?: Function): void; end(chunk: any, encoding?: string, cb?: Function): void; } export interface TransformOptions extends DuplexOptions { transform?: (chunk: string | Buffer, encoding: string, callback: Function) => any; flush?: (callback: Function) => any; } export class Transform extends Duplex { constructor(opts?: TransformOptions); _transform(chunk: any, encoding: string, callback: Function): void; } export class PassThrough extends Transform { } } export = internal; } declare module "util" { export interface InspectOptions extends NodeJS.InspectOptions {} export function format(format: any, ...param: any[]): string; export function debug(string: string): void; export function error(...param: any[]): void; export function puts(...param: any[]): void; export function print(...param: any[]): void; export function log(string: string): void; export function inspect(object: any, showHidden?: boolean, depth?: number | null, color?: boolean): string; export function inspect(object: any, options: InspectOptions): string; export function isArray(object: any): boolean; export function isRegExp(object: any): boolean; export function isDate(object: any): boolean; export function isError(object: any): boolean; export function inherits(constructor: any, superConstructor: any): void; export function debuglog(key: string): (msg: string, ...param: any[]) => void; export function isBoolean(object: any): boolean; export function isBuffer(object: any): boolean; export function isFunction(object: any): boolean; export function isNull(object: any): boolean; export function isNullOrUndefined(object: any): boolean; export function isNumber(object: any): boolean; export function isObject(object: any): boolean; export function isPrimitive(object: any): boolean; export function isString(object: any): boolean; export function isSymbol(object: any): boolean; export function isUndefined(object: any): boolean; export function deprecate<T extends Function>(fn: T, message: string): T; } declare module "assert" { function internal(value: any, message?: string): void; namespace internal { export class AssertionError implements Error { name: string; message: string; actual: any; expected: any; operator: string; generatedMessage: boolean; constructor(options?: { message?: string; actual?: any; expected?: any; operator?: string; stackStartFunction?: Function }); } export function fail(actual: any, expected: any, message?: string, operator?: string): void; export function ok(value: any, message?: string): void; export function equal(actual: any, expected: any, message?: string): void; export function notEqual(actual: any, expected: any, message?: string): void; export function deepEqual(actual: any, expected: any, message?: string): void; export function notDeepEqual(acutal: any, expected: any, message?: string): void; export function strictEqual(actual: any, expected: any, message?: string): void; export function notStrictEqual(actual: any, expected: any, message?: string): void; export function deepStrictEqual(actual: any, expected: any, message?: string): void; export function notDeepStrictEqual(actual: any, expected: any, message?: string): void; export function throws(block: Function, message?: string): void; export function throws(block: Function, error: Function, message?: string): void; export function throws(block: Function, error: RegExp, message?: string): void; export function throws(block: Function, error: (err: any) => boolean, message?: string): void; export function doesNotThrow(block: Function, message?: string): void; export function doesNotThrow(block: Function, error: Function, message?: string): void; export function doesNotThrow(block: Function, error: RegExp, message?: string): void; export function doesNotThrow(block: Function, error: (err: any) => boolean, message?: string): void; export function ifError(value: any): void; } export = internal; } declare module "tty" { import * as net from "net"; export function isatty(fd: number): boolean; export interface ReadStream extends net.Socket { isRaw: boolean; setRawMode(mode: boolean): void; isTTY: boolean; } export interface WriteStream extends net.Socket { columns: number; rows: number; isTTY: boolean; } } declare module "domain" { import * as events from "events"; export class Domain extends events.EventEmitter implements NodeJS.Domain { run(fn: Function): void; add(emitter: events.EventEmitter): void; remove(emitter: events.EventEmitter): void; bind(cb: (err: Error, data: any) => any): any; intercept(cb: (data: any) => any): any; dispose(): void; members: any[]; enter(): void; exit(): void; } export function create(): Domain; } declare module "constants" { export var E2BIG: number; export var EACCES: number; export var EADDRINUSE: number; export var EADDRNOTAVAIL: number; export var EAFNOSUPPORT: number; export var EAGAIN: number; export var EALREADY: number; export var EBADF: number; export var EBADMSG: number; export var EBUSY: number; export var ECANCELED: number; export var ECHILD: number; export var ECONNABORTED: number; export var ECONNREFUSED: number; export var ECONNRESET: number; export var EDEADLK: number; export var EDESTADDRREQ: number; export var EDOM: number; export var EEXIST: number; export var EFAULT: number; export var EFBIG: number; export var EHOSTUNREACH: number; export var EIDRM: number; export var EILSEQ: number; export var EINPROGRESS: number; export var EINTR: number; export var EINVAL: number; export var EIO: number; export var EISCONN: number; export var EISDIR: number; export var ELOOP: number; export var EMFILE: number; export var EMLINK: number; export var EMSGSIZE: number; export var ENAMETOOLONG: number; export var ENETDOWN: number; export var ENETRESET: number; export var ENETUNREACH: number; export var ENFILE: number; export var ENOBUFS: number; export var ENODATA: number; export var ENODEV: number; export var ENOENT: number; export var ENOEXEC: number; export var ENOLCK: number; export var ENOLINK: number; export var ENOMEM: number; export var ENOMSG: number; export var ENOPROTOOPT: number; export var ENOSPC: number; export var ENOSR: number; export var ENOSTR: number; export var ENOSYS: number; export var ENOTCONN: number; export var ENOTDIR: number; export var ENOTEMPTY: number; export var ENOTSOCK: number; export var ENOTSUP: number; export var ENOTTY: number; export var ENXIO: number; export var EOPNOTSUPP: number; export var EOVERFLOW: number; export var EPERM: number; export var EPIPE: number; export var EPROTO: number; export var EPROTONOSUPPORT: number; export var EPROTOTYPE: number; export var ERANGE: number; export var EROFS: number; export var ESPIPE: number; export var ESRCH: number; export var ETIME: number; export var ETIMEDOUT: number; export var ETXTBSY: number; export var EWOULDBLOCK: number; export var EXDEV: number; export var WSAEINTR: number; export var WSAEBADF: number; export var WSAEACCES: number; export var WSAEFAULT: number; export var WSAEINVAL: number; export var WSAEMFILE: number; export var WSAEWOULDBLOCK: number; export var WSAEINPROGRESS: number; export var WSAEALREADY: number; export var WSAENOTSOCK: number; export var WSAEDESTADDRREQ: number; export var WSAEMSGSIZE: number; export var WSAEPROTOTYPE: number; export var WSAENOPROTOOPT: number; export var WSAEPROTONOSUPPORT: number; export var WSAESOCKTNOSUPPORT: number; export var WSAEOPNOTSUPP: number; export var WSAEPFNOSUPPORT: number; export var WSAEAFNOSUPPORT: number; export var WSAEADDRINUSE: number; export var WSAEADDRNOTAVAIL: number; export var WSAENETDOWN: number; export var WSAENETUNREACH: number; export var WSAENETRESET: number; export var WSAECONNABORTED: number; export var WSAECONNRESET: number; export var WSAENOBUFS: number; export var WSAEISCONN: number; export var WSAENOTCONN: number; export var WSAESHUTDOWN: number; export var WSAETOOMANYREFS: number; export var WSAETIMEDOUT: number; export var WSAECONNREFUSED: number; export var WSAELOOP: number; export var WSAENAMETOOLONG: number; export var WSAEHOSTDOWN: number; export var WSAEHOSTUNREACH: number; export var WSAENOTEMPTY: number; export var WSAEPROCLIM: number; export var WSAEUSERS: number; export var WSAEDQUOT: number; export var WSAESTALE: number; export var WSAEREMOTE: number; export var WSASYSNOTREADY: number; export var WSAVERNOTSUPPORTED: number; export var WSANOTINITIALISED: number; export var WSAEDISCON: number; export var WSAENOMORE: number; export var WSAECANCELLED: number; export var WSAEINVALIDPROCTABLE: number; export var WSAEINVALIDPROVIDER: number; export var WSAEPROVIDERFAILEDINIT: number; export var WSASYSCALLFAILURE: number; export var WSASERVICE_NOT_FOUND: number; export var WSATYPE_NOT_FOUND: number; export var WSA_E_NO_MORE: number; export var WSA_E_CANCELLED: number; export var WSAEREFUSED: number; export var SIGHUP: number; export var SIGINT: number; export var SIGILL: number; export var SIGABRT: number; export var SIGFPE: number; export var SIGKILL: number; export var SIGSEGV: number; export var SIGTERM: number; export var SIGBREAK: number; export var SIGWINCH: number; export var SSL_OP_ALL: number; export var SSL_OP_ALLOW_UNSAFE_LEGACY_RENEGOTIATION: number; export var SSL_OP_CIPHER_SERVER_PREFERENCE: number; export var SSL_OP_CISCO_ANYCONNECT: number; export var SSL_OP_COOKIE_EXCHANGE: number; export var SSL_OP_CRYPTOPRO_TLSEXT_BUG: number; export var SSL_OP_DONT_INSERT_EMPTY_FRAGMENTS: number; export var SSL_OP_EPHEMERAL_RSA: number; export var SSL_OP_LEGACY_SERVER_CONNECT: number; export var SSL_OP_MICROSOFT_BIG_SSLV3_BUFFER: number; export var SSL_OP_MICROSOFT_SESS_ID_BUG: number; export var SSL_OP_MSIE_SSLV2_RSA_PADDING: number; export var SSL_OP_NETSCAPE_CA_DN_BUG: number; export var SSL_OP_NETSCAPE_CHALLENGE_BUG: number; export var SSL_OP_NETSCAPE_DEMO_CIPHER_CHANGE_BUG: number; export var SSL_OP_NETSCAPE_REUSE_CIPHER_CHANGE_BUG: number; export var SSL_OP_NO_COMPRESSION: number; export var SSL_OP_NO_QUERY_MTU: number; export var SSL_OP_NO_SESSION_RESUMPTION_ON_RENEGOTIATION: number; export var SSL_OP_NO_SSLv2: number; export var SSL_OP_NO_SSLv3: number; export var SSL_OP_NO_TICKET: number; export var SSL_OP_NO_TLSv1: number; export var SSL_OP_NO_TLSv1_1: number; export var SSL_OP_NO_TLSv1_2: number; export var SSL_OP_PKCS1_CHECK_1: number; export var SSL_OP_PKCS1_CHECK_2: number; export var SSL_OP_SINGLE_DH_USE: number; export var SSL_OP_SINGLE_ECDH_USE: number; export var SSL_OP_SSLEAY_080_CLIENT_DH_BUG: number; export var SSL_OP_SSLREF2_REUSE_CERT_TYPE_BUG: number; export var SSL_OP_TLS_BLOCK_PADDING_BUG: number; export var SSL_OP_TLS_D5_BUG: number; export var SSL_OP_TLS_ROLLBACK_BUG: number; export var ENGINE_METHOD_DSA: number; export var ENGINE_METHOD_DH: number; export var ENGINE_METHOD_RAND: number; export var ENGINE_METHOD_ECDH: number; export var ENGINE_METHOD_ECDSA: number; export var ENGINE_METHOD_CIPHERS: number; export var ENGINE_METHOD_DIGESTS: number; export var ENGINE_METHOD_STORE: number; export var ENGINE_METHOD_PKEY_METHS: number; export var ENGINE_METHOD_PKEY_ASN1_METHS: number; export var ENGINE_METHOD_ALL: number; export var ENGINE_METHOD_NONE: number; export var DH_CHECK_P_NOT_SAFE_PRIME: number; export var DH_CHECK_P_NOT_PRIME: number; export var DH_UNABLE_TO_CHECK_GENERATOR: number; export var DH_NOT_SUITABLE_GENERATOR: number; export var NPN_ENABLED: number; export var RSA_PKCS1_PADDING: number; export var RSA_SSLV23_PADDING: number; export var RSA_NO_PADDING: number; export var RSA_PKCS1_OAEP_PADDING: number; export var RSA_X931_PADDING: number; export var RSA_PKCS1_PSS_PADDING: number; export var POINT_CONVERSION_COMPRESSED: number; export var POINT_CONVERSION_UNCOMPRESSED: number; export var POINT_CONVERSION_HYBRID: number; export var O_RDONLY: number; export var O_WRONLY: number; export var O_RDWR: number; export var S_IFMT: number; export var S_IFREG: number; export var S_IFDIR: number; export var S_IFCHR: number; export var S_IFBLK: number; export var S_IFIFO: number; export var S_IFSOCK: number; export var S_IRWXU: number; export var S_IRUSR: number; export var S_IWUSR: number; export var S_IXUSR: number; export var S_IRWXG: number; export var S_IRGRP: number; export var S_IWGRP: number; export var S_IXGRP: number; export var S_IRWXO: number; export var S_IROTH: number; export var S_IWOTH: number; export var S_IXOTH: number; export var S_IFLNK: number; export var O_CREAT: number; export var O_EXCL: number; export var O_NOCTTY: number; export var O_DIRECTORY: number; export var O_NOATIME: number; export var O_NOFOLLOW: number; export var O_SYNC: number; export var O_SYMLINK: number; export var O_DIRECT: number; export var O_NONBLOCK: number; export var O_TRUNC: number; export var O_APPEND: number; export var F_OK: number; export var R_OK: number; export var W_OK: number; export var X_OK: number; export var UV_UDP_REUSEADDR: number; export var SIGQUIT: number; export var SIGTRAP: number; export var SIGIOT: number; export var SIGBUS: number; export var SIGUSR1: number; export var SIGUSR2: number; export var SIGPIPE: number; export var SIGALRM: number; export var SIGCHLD: number; export var SIGSTKFLT: number; export var SIGCONT: number; export var SIGSTOP: number; export var SIGTSTP: number; export var SIGTTIN: number; export var SIGTTOU: number; export var SIGURG: number; export var SIGXCPU: number; export var SIGXFSZ: number; export var SIGVTALRM: number; export var SIGPROF: number; export var SIGIO: number; export var SIGPOLL: number; export var SIGPWR: number; export var SIGSYS: number; export var SIGUNUSED: number; export var defaultCoreCipherList: string; export var defaultCipherList: string; export var ENGINE_METHOD_RSA: number; export var ALPN_ENABLED: number; } declare module "process" { export = process; } declare module "v8" { interface HeapSpaceInfo { space_name: string; space_size: number; space_used_size: number; space_available_size: number; physical_space_size: number; } const enum DoesZapCodeSpaceFlag { Disabled = 0, Enabled = 1 } interface HeapInfo { total_heap_size: number; total_heap_size_executable: number; total_physical_size: number; total_available_size: number; used_heap_size: number; heap_size_limit: number; malloced_memory: number; peak_malloced_memory: number; does_zap_garbage: DoesZapCodeSpaceFlag; } export function getHeapStatistics(): HeapInfo; export function getHeapSpaceStatistics(): HeapSpaceInfo[]; export function setFlagsFromString(flags: string): void; } declare module "timers" { export function setTimeout(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; export function clearTimeout(timeoutId: NodeJS.Timer): void; export function setInterval(callback: (...args: any[]) => void, ms: number, ...args: any[]): NodeJS.Timer; export function clearInterval(intervalId: NodeJS.Timer): void; export function setImmediate(callback: (...args: any[]) => void, ...args: any[]): any; export function clearImmediate(immediateId: any): void; } declare module "console" { export = console; } /** * _debugger module is not documented. * Source code is at https://github.com/nodejs/node/blob/master/lib/_debugger.js */ declare module "_debugger" { export interface Packet { raw: string; headers: string[]; body: Message; } export interface Message { seq: number; type: string; } export interface RequestInfo { command: string; arguments: any; } export interface Request extends Message, RequestInfo { } export interface Event extends Message { event: string; body?: any; } export interface Response extends Message { request_seq: number; success: boolean; /** Contains error message if success === false. */ message?: string; /** Contains message body if success === true. */ body?: any; } export interface BreakpointMessageBody { type: string; target: number; line: number; } export class Protocol { res: Packet; state: string; execute(data: string): void; serialize(rq: Request): string; onResponse: (pkt: Packet) => void; } export var NO_FRAME: number; export var port: number; export interface ScriptDesc { name: string; id: number; isNative?: boolean; handle?: number; type: string; lineOffset?: number; columnOffset?: number; lineCount?: number; } export interface Breakpoint { id: number; scriptId: number; script: ScriptDesc; line: number; condition?: string; scriptReq?: string; } export interface RequestHandler { (err: boolean, body: Message, res: Packet): void; request_seq?: number; } export interface ResponseBodyHandler { (err: boolean, body?: any): void; request_seq?: number; } export interface ExceptionInfo { text: string; } export interface BreakResponse { script?: ScriptDesc; exception?: ExceptionInfo; sourceLine: number; sourceLineText: string; sourceColumn: number; } export function SourceInfo(body: BreakResponse): string; export interface ClientInstance extends NodeJS.EventEmitter { protocol: Protocol; scripts: ScriptDesc[]; handles: ScriptDesc[]; breakpoints: Breakpoint[]; currentSourceLine: number; currentSourceColumn: number; currentSourceLineText: string; currentFrame: number; currentScript: string; connect(port: number, host: string): void; req(req: any, cb: RequestHandler): void; reqFrameEval(code: string, frame: number, cb: RequestHandler): void; mirrorObject(obj: any, depth: number, cb: ResponseBodyHandler): void; setBreakpoint(rq: BreakpointMessageBody, cb: RequestHandler): void; clearBreakpoint(rq: Request, cb: RequestHandler): void; listbreakpoints(cb: RequestHandler): void; reqSource(from: number, to: number, cb: RequestHandler): void; reqScripts(cb: any): void; reqContinue(cb: RequestHandler): void; } export var Client : { new (): ClientInstance } }<|fim▁end|>
on(event: "error", listener: (err: Error) => void): this; on(event: "lookup", listener: (err: Error, address: string, family: string | number, host: string) => void): this;
<|file_name|>hello.rs<|end_file_name|><|fim▁begin|>extern crate hayaku_http; use hayaku_http::{Http, Handler, Request, Response}; #[derive(Copy, Clone)] struct Router; impl Handler<()> for Router { fn handler(&self, _req: &Request, res: &mut Response, _ctx: &()) { res.body(b"hello, world!"); } } fn main() {<|fim▁hole|> Http::new(router, ()).threads(4).listen_and_serve(addr); }<|fim▁end|>
let addr = "127.0.0.1:3000".parse().unwrap(); let router = Router;
<|file_name|>typeset.js<|end_file_name|><|fim▁begin|>var Typeset = require("typeset"); module.exports = function typeset(req, res, next) { var send = res.send; res.send = function(string) { var html = string instanceof Buffer ? string.toString() : string;<|fim▁hole|> }; next(); };<|fim▁end|>
html = Typeset(html, { disable: ["hyphenate"] }); send.call(this, html);
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "collegeassist.settings") try: from django.core.management import execute_from_command_line except ImportError: # The above import may fail for some other reason. Ensure that the # issue is really that Django is missing to avoid masking other # exceptions on Python 2.<|fim▁hole|> try: import django except ImportError: raise ImportError( "Couldn't import Django. Are you sure it's installed and " "available on your PYTHONPATH environment variable? Did you " "forget to activate a virtual environment?" ) raise execute_from_command_line(sys.argv)<|fim▁end|>
<|file_name|>foundation.section.js<|end_file_name|><|fim▁begin|>/*jslint unparam: true, browser: true, indent: 2 */ <|fim▁hole|> Foundation.libs.section = { name: 'section', version : '4.1.2', settings : { deep_linking: false, one_up: true, callback: function (){} }, init : function (scope, method, options) { var self = this; Foundation.inherit(this, 'throttle data_options position_right offset_right'); if (typeof method != 'string') { this.set_active_from_hash(); this.events(); return true; } else { return this[method].call(this, options); } }, events : function () { var self = this; $(this.scope) .on('click.fndtn.section', '[data-section] .title, [data-section] [data-section-title]', function (e) { var $this = $(this), section = $this.closest('[data-section]'); self.toggle_active.call(this, e, self); }); $(window) .on('resize.fndtn.section', self.throttle(function () { self.resize.call(this); }, 30)) .on('hashchange', function () { if (!self.settings.toggled){ self.set_active_from_hash(); $(this).trigger('resize'); } }).trigger('resize'); $(document) .on('click.fndtn.section', function (e) { if ($(e.target).closest('.title, [data-section-title]').length < 1) { $('[data-section="vertical-nav"], [data-section="horizontal-nav"]') .find('section, .section, [data-section-region]') .removeClass('active') .attr('style', ''); } }); }, toggle_active : function (e, self) { var $this = $(this), section = $this.closest('section, .section, [data-section-region]'), content = section.find('.content, [data-section-content]'), parent = section.closest('[data-section]'), self = Foundation.libs.section, settings = $.extend({}, self.settings, self.data_options(parent)); self.settings.toggled = true; if (!settings.deep_linking && content.length > 0) { e.preventDefault(); } if (section.hasClass('active')) { if (self.small(parent) || self.is_vertical(parent) || self.is_horizontal(parent) || self.is_accordion(parent)) { section .removeClass('active') .attr('style', ''); } } else { var prev_active_section = null, title_height = self.outerHeight(section.find('.title, [data-section-title]')); if (self.small(parent) || settings.one_up) { prev_active_section = $this.closest('[data-section]').find('section.active, .section.active, .active[data-section-region]'); if (self.small(parent)) { prev_active_section.attr('style', ''); } else { prev_active_section.attr('style', 'visibility: hidden; padding-top: '+title_height+'px;'); } } if (self.small(parent)) { section.attr('style', ''); } else { section.css('padding-top', title_height); } section.addClass('active'); if (prev_active_section !== null) { prev_active_section.removeClass('active').attr('style', ''); } } setTimeout(function () { self.settings.toggled = false; }, 300); settings.callback(); }, resize : function () { var sections = $('[data-section]'), self = Foundation.libs.section; sections.each(function() { var $this = $(this), active_section = $this.find('section.active, .section.active, .active[data-section-region]'), settings = $.extend({}, self.settings, self.data_options($this)); if (active_section.length > 1) { active_section .not(':first') .removeClass('active') .attr('style', ''); } else if (active_section.length < 1 && !self.is_vertical($this) && !self.is_horizontal($this) && !self.is_accordion($this)) { var first = $this.find('section, .section, [data-section-region]').first(); if (settings.one_up) { first.addClass('active'); } if (self.small($this)) { first.attr('style', ''); } else { first.css('padding-top', self.outerHeight(first.find('.title, [data-section-title]'))); } } if (self.small($this)) { active_section.attr('style', ''); } else { active_section.css('padding-top', self.outerHeight(active_section.find('.title, [data-section-title]'))); } self.position_titles($this); if (self.is_horizontal($this) && !self.small($this)) { self.position_content($this); } else { self.position_content($this, false); } }); }, is_vertical : function (el) { return /vertical-nav/i.test(el.data('section')); }, is_horizontal : function (el) { return /horizontal-nav/i.test(el.data('section')); }, is_accordion : function (el) { return /accordion/i.test(el.data('section')); }, is_tabs : function (el) { return /tabs/i.test(el.data('section')); }, set_active_from_hash : function () { var hash = window.location.hash.substring(1), sections = $('[data-section]'), self = this; sections.each(function () { var section = $(this), settings = $.extend({}, self.settings, self.data_options(section)); if (hash.length > 0 && settings.deep_linking) { section .find('section, .section, [data-section-region]') .attr('style', '') .removeClass('active'); section .find('.content[data-slug="' + hash + '"], [data-section-content][data-slug="' + hash + '"]') .closest('section, .section, [data-section-region]') .addClass('active'); } }); }, position_titles : function (section, off) { var titles = section.find('.title, [data-section-title]'), previous_width = 0, self = this; if (typeof off === 'boolean') { titles.attr('style', ''); } else { titles.each(function () { if (!self.rtl) { $(this).css('left', previous_width); } else { $(this).css('right', previous_width); } previous_width += self.outerWidth($(this)); }); } }, position_content : function (section, off) { var titles = section.find('.title, [data-section-title]'), content = section.find('.content, [data-section-content]'), self = this; if (typeof off === 'boolean') { content.attr('style', ''); section.attr('style', ''); } else { section.find('section, .section, [data-section-region]').each(function () { var title = $(this).find('.title, [data-section-title]'), content = $(this).find('.content, [data-section-content]'); if (!self.rtl) { content.css({left: title.position().left - 1, top: self.outerHeight(title) - 2}); } else { content.css({right: self.position_right(title) + 1, top: self.outerHeight(title) - 2}); } }); // temporary work around for Zepto outerheight calculation issues. if (typeof Zepto === 'function') { section.height(this.outerHeight(titles.first())); } else { section.height(this.outerHeight(titles.first()) - 2); } } }, position_right : function (el) { var section = el.closest('[data-section]'), section_width = el.closest('[data-section]').width(), offset = section.find('.title, [data-section-title]').length; return (section_width - el.position().left - el.width() * (el.index() + 1) - offset); }, reflow : function () { $('[data-section]').trigger('resize'); }, small : function (el) { var settings = $.extend({}, this.settings, this.data_options(el)); if (this.is_tabs(el)) { return false; } if (el && this.is_accordion(el)) { return true; } if ($('html').hasClass('lt-ie9')) { return true; } if ($('html').hasClass('ie8compat')) { return true; } return $(this.scope).width() < 768; }, off : function () { $(this.scope).off('.fndtn.section'); $(window).off('.fndtn.section'); $(document).off('.fndtn.section') } }; }(Foundation.zj, this, this.document));<|fim▁end|>
;(function ($, window, document, undefined) { 'use strict';
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. //! A typesafe bitmask flag generator. /// The `bitflags!` macro generates a `struct` that holds a set of C-style /// bitmask flags. It is useful for creating typesafe wrappers for C APIs. /// /// The flags should only be defined for integer types, otherwise unexpected /// type errors may occur at compile time. /// /// # Example /// /// ```{.rust} /// #[macro_use] /// extern crate bitflags; /// /// bitflags! { /// flags Flags: u32 { /// const FLAG_A = 0b00000001, /// const FLAG_B = 0b00000010, /// const FLAG_C = 0b00000100, /// const FLAG_ABC = FLAG_A.bits /// | FLAG_B.bits /// | FLAG_C.bits, /// } /// } /// /// fn main() { /// let e1 = FLAG_A | FLAG_C; /// let e2 = FLAG_B | FLAG_C; /// assert_eq!((e1 | e2), FLAG_ABC); // union /// assert_eq!((e1 & e2), FLAG_C); // intersection /// assert_eq!((e1 - e2), FLAG_A); // set difference /// assert_eq!(!e2, FLAG_A); // set complement /// } /// ``` /// /// The generated `struct`s can also be extended with type and trait /// implementations: /// /// ```{.rust} /// #[macro_use] /// extern crate bitflags; /// /// use std::fmt; /// /// bitflags! { /// flags Flags: u32 { /// const FLAG_A = 0b00000001, /// const FLAG_B = 0b00000010, /// } /// } /// /// impl Flags { /// pub fn clear(&mut self) { /// self.bits = 0; // The `bits` field can be accessed from within the /// // same module where the `bitflags!` macro was invoked. /// } /// } /// /// impl fmt::Display for Flags { /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { /// write!(f, "hi!") /// } /// } /// /// fn main() { /// let mut flags = FLAG_A | FLAG_B; /// flags.clear(); /// assert!(flags.is_empty()); /// assert_eq!(format!("{}", flags), "hi!"); /// assert_eq!(format!("{:?}", FLAG_A | FLAG_B), "FLAG_A | FLAG_B"); /// assert_eq!(format!("{:?}", FLAG_B), "FLAG_B"); /// } /// ``` /// /// # Visibility /// /// The generated struct and its associated flag constants are not exported /// out of the current module by default. A definition can be exported out of /// the current module by adding `pub` before `flags`: /// /// ```{.rust},ignore /// #[macro_use] /// extern crate bitflags; /// /// mod example { /// bitflags! { /// pub flags Flags1: u32 { /// const FLAG_A = 0b00000001, /// } /// } /// bitflags! { /// flags Flags2: u32 { /// const FLAG_B = 0b00000010, /// } /// } /// } /// /// fn main() { /// let flag1 = example::FLAG_A; /// let flag2 = example::FLAG_B; // error: const `FLAG_B` is private /// } /// ``` /// /// # Attributes /// /// Attributes can be attached to the generated `struct` by placing them /// before the `flags` keyword. /// /// # Trait implementations /// /// The `Copy`, `Clone`, `PartialEq`, `Eq`, `PartialOrd`, `Ord` and `Hash` /// traits automatically derived for the `struct` using the `derive` attribute. /// Additional traits can be derived by providing an explicit `derive` /// attribute on `flags`. /// /// The `Extend` and `FromIterator` traits are implemented for the `struct`, /// too: `Extend` adds the union of the instances of the `struct` iterated over, /// while `FromIterator` calculates the union. /// /// The `Debug` trait is also implemented by displaying the bits value of the /// internal struct. This can be excluded by setting `bitflags_no_debug` feature /// in the crate depending on bitflags. /// /// ## Operators /// /// The following operator traits are implemented for the generated `struct`: /// /// - `BitOr` and `BitOrAssign`: union /// - `BitAnd` and `BitAndAssign`: intersection /// - `BitXor` and `BitXorAssign`: toggle /// - `Sub` and `SubAssign`: set difference /// - `Not`: set complement /// /// As long as the assignment operators are unstable rust feature they are only /// available with the crate feature `assignment_ops` enabled. /// /// # Methods /// /// The following methods are defined for the generated `struct`: /// /// - `empty`: an empty set of flags /// - `all`: the set of all flags /// - `bits`: the raw value of the flags currently stored /// - `from_bits`: convert from underlying bit representation, unless that /// representation contains bits that do not correspond to a flag /// - `from_bits_truncate`: convert from underlying bit representation, dropping /// any bits that do not correspond to flags /// - `is_empty`: `true` if no flags are currently stored /// - `is_all`: `true` if all flags are currently set /// - `intersects`: `true` if there are flags common to both `self` and `other` /// - `contains`: `true` all of the flags in `other` are contained within `self` /// - `insert`: inserts the specified flags in-place /// - `remove`: removes the specified flags in-place /// - `toggle`: the specified flags will be inserted if not present, and removed /// if they are. #[macro_export] macro_rules! bitflags { ($(#[$attr:meta])* pub flags $BitFlags:ident: $T:ty { $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+ }) => { #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] $(#[$attr])*<|fim▁hole|> } $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { bits: $value };)+ bitflags! { @_impl flags $BitFlags: $T { $($(#[$Flag_attr])* const $Flag = $value),+ } } }; ($(#[$attr:meta])* flags $BitFlags:ident: $T:ty { $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+ }) => { #[derive(Copy, PartialEq, Eq, Clone, PartialOrd, Ord, Hash)] $(#[$attr])* struct $BitFlags { bits: $T, } $($(#[$Flag_attr])* const $Flag: $BitFlags = $BitFlags { bits: $value };)+ bitflags! { @_impl flags $BitFlags: $T { $($(#[$Flag_attr])* const $Flag = $value),+ } } }; (@_impl flags $BitFlags:ident: $T:ty { $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+ }) => { #[cfg(not(feature = "ndebug"))] impl ::core::fmt::Debug for $BitFlags { fn fmt(&self, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { // This convoluted approach is to handle #[cfg]-based flag // omission correctly. Some of the $Flag variants may not be // defined in this module so we create an inner module which // defines *all* flags to the value of 0. We then create a // second inner module that defines all of the flags with #[cfg] // to their real values. Afterwards the glob will import // variants from the second inner module, shadowing all // defined variants, leaving only the undefined ones with the // bit value of 0. #[allow(dead_code)] #[allow(unused_assignments)] mod dummy { // We can't use the real $BitFlags struct because it may be // private, which prevents us from using it to define // public constants. pub struct $BitFlags { bits: u64, } mod real_flags { use super::$BitFlags; $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { bits: super::super::$Flag.bits as u64 };)+ } // Now we define the "undefined" versions of the flags. // This way, all the names exist, even if some are #[cfg]ed // out. $(const $Flag: $BitFlags = $BitFlags { bits: 0 };)+ #[inline] pub fn fmt(self_: u64, f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { // Now we import the real values for the flags. // Only ones that are #[cfg]ed out will be 0. use self::real_flags::*; let mut first = true; $( // $Flag.bits == 0 means that $Flag doesn't exist if $Flag.bits != 0 && self_ & $Flag.bits as u64 == $Flag.bits as u64 { if !first { try!(f.write_str(" | ")); } first = false; try!(f.write_str(stringify!($Flag))); } )+ Ok(()) } } dummy::fmt(self.bits as u64, f) } } #[cfg(all(test, feature = "ndebug"))] impl ::core::fmt::Debug for $BitFlags { fn fmt(&self, _f: &mut ::core::fmt::Formatter) -> ::core::fmt::Result { unimplemented!(); } } #[allow(dead_code)] impl $BitFlags { /// Returns an empty set of flags. #[inline] pub fn empty() -> $BitFlags { $BitFlags { bits: 0 } } /// Returns the set containing all flags. #[inline] pub fn all() -> $BitFlags { // See above `dummy` module for why this approach is taken. #[allow(dead_code)] mod dummy { pub struct $BitFlags { bits: u64, } mod real_flags { use super::$BitFlags; $($(#[$Flag_attr])* pub const $Flag: $BitFlags = $BitFlags { bits: super::super::$Flag.bits as u64 };)+ } $(const $Flag: $BitFlags = $BitFlags { bits: 0 };)+ #[inline] pub fn all() -> u64 { use self::real_flags::*; $($Flag.bits)|+ } } $BitFlags { bits: dummy::all() as $T } } /// Returns the raw value of the flags currently stored. #[inline] pub fn bits(&self) -> $T { self.bits } /// Convert from underlying bit representation, unless that /// representation contains bits that do not correspond to a flag. #[inline] pub fn from_bits(bits: $T) -> ::core::option::Option<$BitFlags> { if (bits & !$BitFlags::all().bits()) == 0 { ::core::option::Option::Some($BitFlags { bits: bits }) } else { ::core::option::Option::None } } /// Convert from underlying bit representation, dropping any bits /// that do not correspond to flags. #[inline] pub fn from_bits_truncate(bits: $T) -> $BitFlags { $BitFlags { bits: bits } & $BitFlags::all() } /// Returns `true` if no flags are currently stored. #[inline] pub fn is_empty(&self) -> bool { *self == $BitFlags::empty() } /// Returns `true` if all flags are currently set. #[inline] pub fn is_all(&self) -> bool { *self == $BitFlags::all() } /// Returns `true` if there are flags common to both `self` and `other`. #[inline] pub fn intersects(&self, other: $BitFlags) -> bool { !(*self & other).is_empty() } /// Returns `true` all of the flags in `other` are contained within `self`. #[inline] pub fn contains(&self, other: $BitFlags) -> bool { (*self & other) == other } /// Inserts the specified flags in-place. #[inline] pub fn insert(&mut self, other: $BitFlags) { self.bits |= other.bits; } /// Removes the specified flags in-place. #[inline] pub fn remove(&mut self, other: $BitFlags) { self.bits &= !other.bits; } /// Toggles the specified flags in-place. #[inline] pub fn toggle(&mut self, other: $BitFlags) { self.bits ^= other.bits; } } impl ::core::ops::BitOr for $BitFlags { type Output = $BitFlags; /// Returns the union of the two sets of flags. #[inline] fn bitor(self, other: $BitFlags) -> $BitFlags { $BitFlags { bits: self.bits | other.bits } } } impl ::core::ops::BitOrAssign for $BitFlags { /// Adds the set of flags. #[inline] fn bitor_assign(&mut self, other: $BitFlags) { self.bits |= other.bits; } } impl ::core::ops::BitXor for $BitFlags { type Output = $BitFlags; /// Returns the left flags, but with all the right flags toggled. #[inline] fn bitxor(self, other: $BitFlags) -> $BitFlags { $BitFlags { bits: self.bits ^ other.bits } } } impl ::core::ops::BitXorAssign for $BitFlags { /// Toggles the set of flags. #[inline] fn bitxor_assign(&mut self, other: $BitFlags) { self.bits ^= other.bits; } } impl ::core::ops::BitAnd for $BitFlags { type Output = $BitFlags; /// Returns the intersection between the two sets of flags. #[inline] fn bitand(self, other: $BitFlags) -> $BitFlags { $BitFlags { bits: self.bits & other.bits } } } impl ::core::ops::BitAndAssign for $BitFlags { /// Disables all flags disabled in the set. #[inline] fn bitand_assign(&mut self, other: $BitFlags) { self.bits &= other.bits; } } impl ::core::ops::Sub for $BitFlags { type Output = $BitFlags; /// Returns the set difference of the two sets of flags. #[inline] fn sub(self, other: $BitFlags) -> $BitFlags { $BitFlags { bits: self.bits & !other.bits } } } impl ::core::ops::SubAssign for $BitFlags { /// Disables all flags enabled in the set. #[inline] fn sub_assign(&mut self, other: $BitFlags) { self.bits &= !other.bits; } } impl ::core::ops::Not for $BitFlags { type Output = $BitFlags; /// Returns the complement of this set of flags. #[inline] fn not(self) -> $BitFlags { $BitFlags { bits: !self.bits } & $BitFlags::all() } } impl ::core::iter::Extend<$BitFlags> for $BitFlags { fn extend<T: ::core::iter::IntoIterator<Item=$BitFlags>>(&mut self, iterator: T) { for item in iterator { self.insert(item) } } } impl ::core::iter::FromIterator<$BitFlags> for $BitFlags { fn from_iter<T: ::core::iter::IntoIterator<Item=$BitFlags>>(iterator: T) -> $BitFlags { let mut result = Self::empty(); result.extend(iterator); result } } }; ($(#[$attr:meta])* pub flags $BitFlags:ident: $T:ty { $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+, }) => { bitflags! { $(#[$attr])* pub flags $BitFlags: $T { $($(#[$Flag_attr])* const $Flag = $value),+ } } }; ($(#[$attr:meta])* flags $BitFlags:ident: $T:ty { $($(#[$Flag_attr:meta])* const $Flag:ident = $value:expr),+, }) => { bitflags! { $(#[$attr])* flags $BitFlags: $T { $($(#[$Flag_attr])* const $Flag = $value),+ } } }; } #[cfg(test)] #[allow(non_upper_case_globals, dead_code)] mod tests { use std::hash::{SipHasher, Hash, Hasher}; bitflags! { #[doc = "> The first principle is that you must not fool yourself — and"] #[doc = "> you are the easiest person to fool."] #[doc = "> "] #[doc = "> - Richard Feynman"] flags Flags: u32 { const FlagA = 0b00000001, #[doc = "<pcwalton> macros are way better at generating code than trans is"] const FlagB = 0b00000010, const FlagC = 0b00000100, #[doc = "* cmr bed"] #[doc = "* strcat table"] #[doc = "<strcat> wait what?"] const FlagABC = FlagA.bits | FlagB.bits | FlagC.bits, } } bitflags! { flags _CfgFlags: u32 { #[cfg(windows)] const _CfgA = 0b01, #[cfg(unix)] const _CfgB = 0b01, #[cfg(windows)] const _CfgC = _CfgA.bits | 0b10, } } bitflags! { flags AnotherSetOfFlags: i8 { const AnotherFlag = -1_i8, } } #[test] fn test_bits(){ assert_eq!(Flags::empty().bits(), 0b00000000); assert_eq!(FlagA.bits(), 0b00000001); assert_eq!(FlagABC.bits(), 0b00000111); assert_eq!(AnotherSetOfFlags::empty().bits(), 0b00); assert_eq!(AnotherFlag.bits(), !0_i8); } #[test] fn test_from_bits() { assert_eq!(Flags::from_bits(0), Some(Flags::empty())); assert_eq!(Flags::from_bits(0b1), Some(FlagA)); assert_eq!(Flags::from_bits(0b10), Some(FlagB)); assert_eq!(Flags::from_bits(0b11), Some(FlagA | FlagB)); assert_eq!(Flags::from_bits(0b1000), None); assert_eq!(AnotherSetOfFlags::from_bits(!0_i8), Some(AnotherFlag)); } #[test] fn test_from_bits_truncate() { assert_eq!(Flags::from_bits_truncate(0), Flags::empty()); assert_eq!(Flags::from_bits_truncate(0b1), FlagA); assert_eq!(Flags::from_bits_truncate(0b10), FlagB); assert_eq!(Flags::from_bits_truncate(0b11), (FlagA | FlagB)); assert_eq!(Flags::from_bits_truncate(0b1000), Flags::empty()); assert_eq!(Flags::from_bits_truncate(0b1001), FlagA); assert_eq!(AnotherSetOfFlags::from_bits_truncate(0_i8), AnotherSetOfFlags::empty()); } #[test] fn test_is_empty(){ assert!(Flags::empty().is_empty()); assert!(!FlagA.is_empty()); assert!(!FlagABC.is_empty()); assert!(!AnotherFlag.is_empty()); } #[test] fn test_is_all() { assert!(Flags::all().is_all()); assert!(!FlagA.is_all()); assert!(FlagABC.is_all()); assert!(AnotherFlag.is_all()); } #[test] fn test_two_empties_do_not_intersect() { let e1 = Flags::empty(); let e2 = Flags::empty(); assert!(!e1.intersects(e2)); assert!(AnotherFlag.intersects(AnotherFlag)); } #[test] fn test_empty_does_not_intersect_with_full() { let e1 = Flags::empty(); let e2 = FlagABC; assert!(!e1.intersects(e2)); } #[test] fn test_disjoint_intersects() { let e1 = FlagA; let e2 = FlagB; assert!(!e1.intersects(e2)); } #[test] fn test_overlapping_intersects() { let e1 = FlagA; let e2 = FlagA | FlagB; assert!(e1.intersects(e2)); } #[test] fn test_contains() { let e1 = FlagA; let e2 = FlagA | FlagB; assert!(!e1.contains(e2)); assert!(e2.contains(e1)); assert!(FlagABC.contains(e2)); assert!(AnotherFlag.contains(AnotherFlag)); } #[test] fn test_insert(){ let mut e1 = FlagA; let e2 = FlagA | FlagB; e1.insert(e2); assert_eq!(e1, e2); let mut e3 = AnotherSetOfFlags::empty(); e3.insert(AnotherFlag); assert_eq!(e3, AnotherFlag); } #[test] fn test_remove(){ let mut e1 = FlagA | FlagB; let e2 = FlagA | FlagC; e1.remove(e2); assert_eq!(e1, FlagB); let mut e3 = AnotherFlag; e3.remove(AnotherFlag); assert_eq!(e3, AnotherSetOfFlags::empty()); } #[test] fn test_operators() { let e1 = FlagA | FlagC; let e2 = FlagB | FlagC; assert_eq!((e1 | e2), FlagABC); // union assert_eq!((e1 & e2), FlagC); // intersection assert_eq!((e1 - e2), FlagA); // set difference assert_eq!(!e2, FlagA); // set complement assert_eq!(e1 ^ e2, FlagA | FlagB); // toggle let mut e3 = e1; e3.toggle(e2); assert_eq!(e3, FlagA | FlagB); let mut m4 = AnotherSetOfFlags::empty(); m4.toggle(AnotherSetOfFlags::empty()); assert_eq!(m4, AnotherSetOfFlags::empty()); } #[test] fn test_assignment_operators() { let mut m1 = Flags::empty(); let e1 = FlagA | FlagC; // union m1 |= FlagA; assert_eq!(m1, FlagA); // intersection m1 &= e1; assert_eq!(m1, FlagA); // set difference m1 -= m1; assert_eq!(m1, Flags::empty()); // toggle m1 ^= e1; assert_eq!(m1, e1); } #[test] fn test_extend() { let mut flags; flags = Flags::empty(); flags.extend([].iter().cloned()); assert_eq!(flags, Flags::empty()); flags = Flags::empty(); flags.extend([FlagA, FlagB].iter().cloned()); assert_eq!(flags, FlagA | FlagB); flags = FlagA; flags.extend([FlagA, FlagB].iter().cloned()); assert_eq!(flags, FlagA | FlagB); flags = FlagB; flags.extend([FlagA, FlagABC].iter().cloned()); assert_eq!(flags, FlagABC); } #[test] fn test_from_iterator() { assert_eq!([].iter().cloned().collect::<Flags>(), Flags::empty()); assert_eq!([FlagA, FlagB].iter().cloned().collect::<Flags>(), FlagA | FlagB); assert_eq!([FlagA, FlagABC].iter().cloned().collect::<Flags>(), FlagABC); } #[test] fn test_lt() { let mut a = Flags::empty(); let mut b = Flags::empty(); assert!(!(a < b) && !(b < a)); b = FlagB; assert!(a < b); a = FlagC; assert!(!(a < b) && b < a); b = FlagC | FlagB; assert!(a < b); } #[test] fn test_ord() { let mut a = Flags::empty(); let mut b = Flags::empty(); assert!(a <= b && a >= b); a = FlagA; assert!(a > b && a >= b); assert!(b < a && b <= a); b = FlagB; assert!(b > a && b >= a); assert!(a < b && a <= b); } fn hash<T: Hash>(t: &T) -> u64 { let mut s = SipHasher::new_with_keys(0, 0); t.hash(&mut s); s.finish() } #[test] fn test_hash() { let mut x = Flags::empty(); let mut y = Flags::empty(); assert_eq!(hash(&x), hash(&y)); x = Flags::all(); y = FlagABC; assert_eq!(hash(&x), hash(&y)); } #[test] #[cfg_attr(feature = "ndebug", should_panic)] fn test_debug() { assert_eq!(format!("{:?}", FlagA | FlagB), "FlagA | FlagB"); assert_eq!(format!("{:?}", FlagABC), "FlagA | FlagB | FlagC | FlagABC"); } mod submodule { bitflags! { pub flags PublicFlags: i8 { const FlagX = 0, } } bitflags! { flags PrivateFlags: i8 { const FlagY = 0, } } #[test] fn test_private() { let _ = FlagY; } } #[test] fn test_public() { let _ = submodule::FlagX; } mod t1 { mod foo { pub type Bar = i32; } bitflags! { /// baz flags Flags: foo::Bar { const A = 0b00000001, #[cfg(foo)] const B = 0b00000010, #[cfg(foo)] const C = 0b00000010, } } } }<|fim▁end|>
pub struct $BitFlags { bits: $T,
<|file_name|>test_stale_peer.rs<|end_file_name|><|fim▁begin|>// Copyright 2016 TiKV Project Authors. Licensed under Apache-2.0. //! A module contains test cases of stale peers gc. use std::sync::Arc; use std::thread; use std::time::*; use kvproto::raft_serverpb::{PeerState, RegionLocalState}; use raft::eraftpb::MessageType; use engine_rocks::Compat; use engine_traits::Peekable; use engine_traits::CF_RAFT; use test_raftstore::*; use tikv_util::config::ReadableDuration; use tikv_util::HandyRwLock; /// A helper function for testing the behaviour of the gc of stale peer /// which is out of region. /// If a peer detects the leader is missing for a specified long time, /// it should consider itself as a stale peer which is removed from the region. /// This test case covers the following scenario: /// At first, there are three peer A, B, C in the cluster, and A is leader. /// Peer B gets down. And then A adds D, E, F into the cluster. /// Peer D becomes leader of the new cluster, and then removes peer A, B, C. /// After all these peer in and out, now the cluster has peer D, E, F. /// If peer B goes up at this moment, it still thinks it is one of the cluster /// and has peers A, C. However, it could not reach A, C since they are removed from /// the cluster or probably destroyed. /// Meantime, D, E, F would not reach B, Since it's not in the cluster anymore. /// In this case, Peer B would notice that the leader is missing for a long time, /// and it would check with pd to confirm whether it's still a member of the cluster. /// If not, it should destroy itself as a stale peer which is removed out already. fn test_stale_peer_out_of_region<T: Simulator>(cluster: &mut Cluster<T>) { let pd_client = Arc::clone(&cluster.pd_client); // Disable default max peer number check. pd_client.disable_default_operator(); let r1 = cluster.run_conf_change(); pd_client.must_add_peer(r1, new_learner_peer(2, 2)); pd_client.must_add_peer(r1, new_peer(2, 2)); pd_client.must_add_peer(r1, new_learner_peer(3, 3)); pd_client.must_add_peer(r1, new_peer(3, 3)); let (key, value) = (b"k1", b"v1"); cluster.must_put(key, value); assert_eq!(cluster.get(key), Some(value.to_vec())); let engine_2 = cluster.get_engine(2); must_get_equal(&engine_2, key, value); // Isolate peer 2 from rest of the cluster. cluster.add_send_filter(IsolationFilterFactory::new(2)); // In case 2 is leader, it will fail to pass the healthy nodes check, // so remove isolated node first. Because 2 is isolated, so it can't remove itself. pd_client.must_remove_peer(r1, new_peer(2, 2)); // Add peer [(4, 4), (5, 5), (6, 6)]. pd_client.must_add_peer(r1, new_learner_peer(4, 4)); pd_client.must_add_peer(r1, new_peer(4, 4)); pd_client.must_add_peer(r1, new_learner_peer(5, 5)); pd_client.must_add_peer(r1, new_peer(5, 5)); pd_client.must_add_peer(r1, new_learner_peer(6, 6)); pd_client.must_add_peer(r1, new_peer(6, 6)); // Remove peer [(1, 1), (3, 3)]. pd_client.must_remove_peer(r1, new_peer(1, 1)); pd_client.must_remove_peer(r1, new_peer(3, 3)); // Keep peer 2 isolated. Otherwise whether peer 3 is destroyed or not, // it will handle the stale raft message from peer 2 and cause peer 2 to // destroy itself earlier than this test case expects. // Wait for max_leader_missing_duration to time out. cluster.must_remove_region(2, r1); // Check whether this region is still functional properly. let (key2, value2) = (b"k2", b"v2"); cluster.must_put(key2, value2); assert_eq!(cluster.get(key2), Some(value2.to_vec())); // Check whether peer(2, 2) and its data are destroyed. must_get_none(&engine_2, key); must_get_none(&engine_2, key2); let state_key = keys::region_state_key(1); let state: RegionLocalState = engine_2 .c() .get_msg_cf(CF_RAFT, &state_key) .unwrap() .unwrap(); assert_eq!(state.get_state(), PeerState::Tombstone); } #[test] fn test_node_stale_peer_out_of_region() { let count = 6; let mut cluster = new_node_cluster(0, count); test_stale_peer_out_of_region(&mut cluster); } #[test] fn test_server_stale_peer_out_of_region() { let count = 6; let mut cluster = new_server_cluster(0, count); test_stale_peer_out_of_region(&mut cluster); } /// A help function for testing the behaviour of the gc of stale peer /// which is out or region.<|fim▁hole|>/// it should consider itself as a stale peer which is removed from the region. /// This test case covers the following scenario: /// A peer, B is initialized as a replicated peer without data after /// receiving a single raft AE message. But then it goes through some process like /// the case of `test_stale_peer_out_of_region`, it's removed out of the region /// and wouldn't be contacted anymore. /// In both cases, peer B would notice that the leader is missing for a long time, /// and it's an initialized peer without any data. It would destroy itself as /// as stale peer directly and should not impact other region data on the same store. fn test_stale_peer_without_data<T: Simulator>(cluster: &mut Cluster<T>, right_derive: bool) { cluster.cfg.raft_store.right_derive_when_split = right_derive; let pd_client = Arc::clone(&cluster.pd_client); // Disable default max peer number check. pd_client.disable_default_operator(); let r1 = cluster.run_conf_change(); cluster.must_put(b"k1", b"v1"); cluster.must_put(b"k3", b"v3"); let region = cluster.get_region(b""); pd_client.must_add_peer(r1, new_peer(2, 2)); cluster.must_split(&region, b"k2"); pd_client.must_add_peer(r1, new_peer(3, 3)); let engine3 = cluster.get_engine(3); if right_derive { must_get_none(&engine3, b"k1"); must_get_equal(&engine3, b"k3", b"v3"); } else { must_get_equal(&engine3, b"k1", b"v1"); must_get_none(&engine3, b"k3"); } let new_region = if right_derive { cluster.get_region(b"k1") } else { cluster.get_region(b"k3") }; let new_region_id = new_region.get_id(); // Block peer (3, 4) at receiving snapshot, but not the heartbeat cluster.add_send_filter(CloneFilterFactory( RegionPacketFilter::new(new_region_id, 3).msg_type(MessageType::MsgSnapshot), )); pd_client.must_add_peer(new_region_id, new_peer(3, 4)); // Wait for the heartbeat broadcasted from peer (1, 1000) to peer (3, 4). cluster.must_region_exist(new_region_id, 3); // And then isolate peer (3, 4) from peer (1, 1000). cluster.add_send_filter(IsolationFilterFactory::new(3)); pd_client.must_remove_peer(new_region_id, new_peer(3, 4)); cluster.must_remove_region(3, new_region_id); // There must be no data on store 3 belongs to new region if right_derive { must_get_none(&engine3, b"k1"); } else { must_get_none(&engine3, b"k3"); } // Check whether peer(3, 4) is destroyed. // Before peer 4 is destroyed, a tombstone mark will be written into the engine. // So we could check the tombstone mark to make sure peer 4 is destroyed. let state_key = keys::region_state_key(new_region_id); let state: RegionLocalState = engine3 .c() .get_msg_cf(CF_RAFT, &state_key) .unwrap() .unwrap(); assert_eq!(state.get_state(), PeerState::Tombstone); // other region should not be affected. if right_derive { must_get_equal(&engine3, b"k3", b"v3"); } else { must_get_equal(&engine3, b"k1", b"v1"); } } #[test] fn test_node_stale_peer_without_data_left_derive_when_split() { let count = 3; let mut cluster = new_node_cluster(0, count); test_stale_peer_without_data(&mut cluster, false); } #[test] fn test_node_stale_peer_without_data_right_derive_when_split() { let count = 3; let mut cluster = new_node_cluster(0, count); test_stale_peer_without_data(&mut cluster, true); } #[test] fn test_server_stale_peer_without_data_left_derive_when_split() { let count = 3; let mut cluster = new_server_cluster(0, count); test_stale_peer_without_data(&mut cluster, false); } #[test] fn test_server_stale_peer_without_data_right_derive_when_split() { let count = 3; let mut cluster = new_server_cluster(0, count); test_stale_peer_without_data(&mut cluster, true); } /// Test if a stale learner can be destroyed by sending ValidatePeer msg to /// PD then it will reply to this stale learner with a tombstone msg. #[test] fn test_stale_learner() { let mut cluster = new_server_cluster(0, 4); cluster.cfg.raft_store.raft_election_timeout_ticks = 5; cluster.cfg.raft_store.raft_store_max_leader_lease = ReadableDuration::millis(40); cluster.cfg.raft_store.max_leader_missing_duration = ReadableDuration::millis(150); cluster.cfg.raft_store.abnormal_leader_missing_duration = ReadableDuration::millis(100); cluster.cfg.raft_store.peer_stale_state_check_interval = ReadableDuration::millis(100); let pd_client = Arc::clone(&cluster.pd_client); // Disable default max peer number check. pd_client.disable_default_operator(); let r1 = cluster.run_conf_change(); pd_client.must_add_peer(r1, new_peer(2, 2)); pd_client.must_add_peer(r1, new_learner_peer(3, 3)); cluster.must_put(b"k1", b"v1"); let engine3 = cluster.get_engine(3); must_get_equal(&engine3, b"k1", b"v1"); // And then isolate peer on store 3 from leader. cluster.add_send_filter(IsolationFilterFactory::new(3)); // Add a new peer to increase the conf version. pd_client.must_add_peer(r1, new_peer(4, 4)); // It should not be deleted. thread::sleep(Duration::from_millis(200)); must_get_equal(&engine3, b"k1", b"v1"); // Promote the learner pd_client.must_add_peer(r1, new_peer(3, 3)); // It should not be deleted. thread::sleep(Duration::from_millis(200)); must_get_equal(&engine3, b"k1", b"v1"); // Delete the learner pd_client.must_remove_peer(r1, new_peer(3, 3)); // Check not leader should fail, all data should be removed. must_get_none(&engine3, b"k1"); let state_key = keys::region_state_key(r1); let state: RegionLocalState = engine3 .c() .get_msg_cf(CF_RAFT, &state_key) .unwrap() .unwrap(); assert_eq!(state.get_state(), PeerState::Tombstone); } /// Test if a stale learner can be destroyed by sending msg(like read index) to /// leader then it will reply to this stale learner with a tombstone msg. #[test] fn test_stale_learner_with_read_index() { let mut cluster = new_server_cluster(0, 4); // Do not rely on pd to remove stale peer cluster.cfg.raft_store.max_leader_missing_duration = ReadableDuration::hours(2); cluster.cfg.raft_store.abnormal_leader_missing_duration = ReadableDuration::minutes(20); cluster.cfg.raft_store.peer_stale_state_check_interval = ReadableDuration::minutes(10); let pd_client = Arc::clone(&cluster.pd_client); // Disable default max peer number check pd_client.disable_default_operator(); let r1 = cluster.run_conf_change(); pd_client.must_add_peer(r1, new_peer(2, 2)); pd_client.must_add_peer(r1, new_learner_peer(3, 3)); cluster.must_put(b"k1", b"v1"); let engine3 = cluster.get_engine(3); must_get_equal(&engine3, b"k1", b"v1"); // And then isolate peer on store 3 from leader cluster.add_send_filter(IsolationFilterFactory::new(3)); // Delete the learner pd_client.must_remove_peer(r1, new_learner_peer(3, 3)); cluster.clear_send_filters(); // Stale learner should exist must_get_equal(&engine3, b"k1", b"v1"); let region = cluster.get_region(b"k1"); let mut request = new_request( region.get_id(), region.get_region_epoch().clone(), vec![new_get_cf_cmd("default", b"k1")], false, ); request.mut_header().set_peer(new_peer(3, 3)); request.mut_header().set_replica_read(true); let (cb, _) = make_cb(&request); cluster .sim .rl() .async_command_on_node(3, request, cb) .unwrap(); // Stale learner should be destroyed due to interaction between leader must_get_none(&engine3, b"k1"); let state_key = keys::region_state_key(r1); let state: RegionLocalState = engine3 .c() .get_msg_cf(CF_RAFT, &state_key) .unwrap() .unwrap(); assert_eq!(state.get_state(), PeerState::Tombstone); }<|fim▁end|>
/// If a peer detects the leader is missing for a specified long time,
<|file_name|>reqpart.py<|end_file_name|><|fim▁begin|># # Chris Lumens <[email protected]> # # Copyright 2015 Red Hat, Inc. # # This copyrighted material is made available to anyone wishing to use, modify, # copy, or redistribute it subject to the terms and conditions of the GNU # General Public License v.2. This program is distributed in the hope that it # will be useful, but WITHOUT ANY WARRANTY expressed or implied, including the # implied warranties of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU General Public License for more details.<|fim▁hole|># Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. Any Red Hat # trademarks that are incorporated in the source code or documentation are not # subject to the GNU General Public License and may only be used or replicated # with the express permission of Red Hat, Inc. # import unittest from tests.baseclass import CommandTest, CommandSequenceTest class F23_TestCase(CommandTest): command = "reqpart" def runTest(self): # pass self.assert_parse("reqpart", "reqpart\n") # pass self.assert_parse("reqpart --add-boot", "reqpart --add-boot\n") class F23_AutopartReqpart_TestCase(CommandSequenceTest): def runTest(self): # fail - can't use both autopart and reqpart self.assert_parse_error(""" autopart reqpart""") RHEL7_TestCase = F23_TestCase if __name__ == "__main__": unittest.main()<|fim▁end|>
# # You should have received a copy of the GNU General Public License along with # this program; if not, write to the Free Software Foundation, Inc., 51
<|file_name|>hello-multithreaded.rs<|end_file_name|><|fim▁begin|>use std::task::spawn;<|fim▁hole|> let (sender, receiver) = channel::<int>(); for child_number in range(0, 20i) { let child_sender = sender.clone(); spawn(move || { child_sender.send(child_number); }); } for _ in range(0, 20i) { let received = receiver.recv(); println!("Message received from child {}", received); } }<|fim▁end|>
use std::comm::channel; fn main () {
<|file_name|>nuevo-pedido.js<|end_file_name|><|fim▁begin|>var _servicio = '../php/servicios/proc-pedidos.php'; var _idPedidoX = 0, _docGenerate = '', _serieUsar = ''; (function($){ var _cboProd = []; var template = Handlebars.compile($("#result-template").html()); var empty = Handlebars.compile($("#empty-template").html()); $(document).ready(function(){ /* -------------------------------------- */ $('#txtProducto').autoComplete({ source: function(term, response){ $.post('../php/servicios/get_prods_lotes.php', { q: term }, function(data){ response(data); },'json'); }, renderItem: function (item, search){ var re = new RegExp("(" + search.split(' ').join('|') + ")", "gi"); var _html = ''; _html += '<div class="autocomplete-suggestion" data-prodi="'+item[0]+'" data-lote="'+item[1]+'" data-val="'+search+'" data-idprod="'+item[5]+'" data-idum="'+item[6]+'" data-precio="'+item[4]+'" data-idlote="'+item[7]+'" >'; _html += item[0].replace(re, "<b>$1</b>")+' Precio <strong>S/. '+item[4]+'</strong>, Stock: '+item[2]+', Lote: '+item[1]+', Vence: '+item[3]; _html += '</div>'; return _html; }, onSelect: function(e, term, item){ $('#idProd').val( item.data('idprod') ); $('#idUM').val( item.data('idum') ); $('#txtPrecio').val( item.data('precio') ); $('#txtProducto').val( item.data('prodi') ); $('#idLote').val( item.data('idlote') ); $('#txtCantidad').focus(); //alert('Item "'+item.data('prodi')+' Lote: '+item.data('lote')+' " selected by '+(e.type == 'keydown' ? 'pressing enter' : 'mouse click')+'.'); e.preventDefault(); } }); /* -------------------------------------- */ _cboCLiente = $('#cboCliente').selectize({ options: _json_clientes, labelField: 'texto', valueField: 'id', }); if( _idCliente > 0 ){ _cboCLiente[0].selectize.setValue(_idCliente); }else{ _cboCLiente[0].selectize.setValue("1"); } /* -------------------------------------- */ $('#txtVence').datepicker({ format: 'dd/mm/yyyy', startDate: '-3d', language : 'es', startView : 'year' }); /* -------------------------------------- */ $('#txtFecha').datepicker({ format: 'dd/mm/yyyy', startDate: '-3d', language : 'es', startView : 'day' }); /* -------------------------------------- */ $('[data-toggle="tooltip"]').tooltip(); /* -------------------------------------- */ $(document).delegate('.copiarPedido', 'click', function(event) { /**/ var _idPedido = $(this).attr('href'); $('#CopiarModal').modal('show'); $.post( _servicio , {f:'copy',idp:_idPedido} , function(data, textStatus, xhr) { $('#CopiarModal').modal('hide'); document.location.reload(); },'json'); event.preventDefault(); /**/ }); /* -------------------------------------- */ $('#addNuevoItem').click(function(event) { resetAddItems(); $('#editorProducto').fadeIn('fast'); $('#txtProducto').focus(); event.preventDefault(); }); /* -------------------------------------- */ $('#cerrarProd').click(function(event) { resetAddItems(); $('#editorProducto').fadeOut('fast'); event.preventDefault(); }); /* -------------------------------------- */ $('#txtCantidad').keypress(function(event) { /* Act on the event */ if( event.keyCode == 13 ){ $('#addProducto').focus(); } }); /* -------------------------------------- */ $('#SavePedido').click(function(event) { /* Act on the event */ var _data = $('#frmData').serialize(); var $btn = $(this).button('loading') $.post( _servicio , _data , function(data, textStatus, xhr) { if( data.error == '' ){ $('#idPedido').val( data.idPedido ); _idPedidoX = data.idPedido; $('#labelidPedido').html( 'Pre venta #'+data.idPedido ); $('#labelPedido').html( '#'+data.idPedido ); alertify.alert('Pre venta generado #'+data.idPedido,function(){ document.location.href = 'nuevo-pedido.php?idpedido='+data.idPedido; }); $('#panelFacturar').show(); } $btn.button('reset'); },'json'); event.preventDefault(); }); /* -------------------------------------- */ $('#NuevoItem').click(function(event) { /* Act on the event */ $('#myModal').modal('show'); $('#myModalLabel').html('Nuevo Pedido'); event.preventDefault(); }); /* -------------------------------------- */ //$('.jChosen').chosen({width: "100%"}); /* -------------------------------------- */ $('#addProducto').click(function(event) { $(this).attr({'disabled':'disabled'}); // var _data = $('#frmEditor').serialize(); // $.post( _servicio , _data , function(data, textStatus, xhr) { $('#addProducto').removeAttr('disabled'); loadTablita(data); resetAddItems(); $('#txtProducto').val(''); $('#txtProducto').focus(); alertify.success('Producto agregado'); },'json'); event.preventDefault(); }); /* -------------------------------------- */ $('#txtPrecio').keyup(function(event) { var _cant = $('#txtCantidad').val(), _precio = $(this).val(); var _total = _cant * _precio; $('#txtTotal').val(_total); }); /* -------------------------------------- */ $('#txtCantidad').keyup(function(event) { var _cant = $(this).val(), _precio = $('#txtPrecio').val(); var _total = _cant * _precio; $('#txtTotal').val(_total); }); /* -------------------------------------- */ $(document).delegate('.quitarProd', 'click', function(event) { /* Quitar un item de la lista y volver a dibujar la tabla. */ var _Nombre = $(this).attr('rel'), _idd = $(this).attr('href'); alertify.confirm('Confirme quitar Item: '+_Nombre,function(e){ if(e){ $.post( _servicio , {f:'delItem',idItem:_idd,'idp':_idPedido} , function(data, textStatus, xhr) { $('#Fila_'+_idd).hide('slow'); loadTablita(data); alertify.error('Producto quitado.'); },'json'); } }); event.preventDefault(); }); /* -------------------------------------- */ $(document).delegate('.ItemLista', 'click', function(event) { /* Agregar un item de la lista y volver a dibujar la tabla. */ var _idp = $(this).attr('href'); $.post( _servicio , { f:'getItem', idp:_idp } , function(data, textStatus, xhr) { LoadItem( data ); },'json'); event.preventDefault(); }); /* -------------------------------------- */ $(document).delegate('.goPedido', 'click', function(event) { var _idp = $(this).attr('href'); $('#myModal').modal('show'); event.preventDefault(); }); /* -------------------------------------- */ $('#btnGoFacturar').click(function(event) { var _filtro = ''; _docGenerate = $('#Documento').val(); _serieUsar = $('#Correlativo').val(); var _idPedido = $('#idPedido').val(); _filtro = 'goBoleta'; /**/ $.post( _servicio , { f:_filtro, idp:_idPedido, 'TipoDoc':_docGenerate, serie:_serieUsar} , function(data, textStatus, xhr) { if( data.idVenta > 0 ){ switch(_docGenerate){ case 'B': document.location.href = 'nueva-boleta.php?id='+data.idVenta; break; case 'F': document.location.href = 'nueva-factura.php?id='+data.idVenta; break; case 'R': document.location.href = 'nuevo-recibo.php?id='+data.idVenta; break; } } },'json'); /**/ event.preventDefault(); }); /* -------------------------------------- */ }); })(jQuery); function LoadItem( json ){ if( json.data != undefined || json.data != null ){ var _html = '', _item = []; for (var i = 0; i < json.data.length; i++) { _item = json.data[0]; $('#txtCantidad').val( _item.int_Cantidad ); //$('#txtProducto').val( _item.var_Nombre+' x '+_item.unidadMedida ); $('#txtPrecio').val( _item.flt_Precio ); $('#txtTotal').val( _item.flt_Total ); // $('#idProd').val( _item.int_IdProducto ); $('#idUM').val( _item.int_IdUnidadMedida ); $('#idItem').val( _item.int_IdDetallePedido ); $('#editorProducto').fadeIn('fast'); }; } } function resetAddItems(){ $('#containerProducto').removeClass().addClass('form-group'); $('#idProd').val('0'); $('#idUM').val('0'); $('#txtPrecio').val( '0' ); $('#txtCantidad').val( '' ); $('#txtTotal').val( '0' ); //$("#txtProducto").val(''); $("#idItem").val('0'); } function loadTablita( json ){ if( json.data != undefined || json.data != null ){ var _fila = [], _html = '', _total = 0; for (var i = 0; i < json.data.length; i++) { _fila = json.data[i]; _html += '<tr id="Fila_'+_fila.int_IdDetallePedido+'" >'; _html += '<td>'; _html += '<span class="fa fa-barcode" ></span> '+_fila.prod+' x '+_fila.um+''; if( _fila.int_IdPromo != null ){ _html += '<br/><small>'+_fila.var_Promo+' antes ('+_fila.flt_Precio+')</small>'; } _html += '</td>'; _html += '<td>'+_fila.lote+'</td>'; if( _fila.int_IdPromo != null ){ _html += '<td class="text-right" >S/. '+_fila.flt_Promo+'</td>'; }else{ _html += '<td class="text-right" >S/. '+_fila.flt_Precio+'</td>'; } _html += '<td class="text-right" >'+_fila.cant+'</td>'; _total = _total + parseFloat(_fila.flt_Total); _html += '<td class="text-right" >'+_fila.flt_Total+'</td>'; _html += '<td>'; _html += '<a href="'+_fila.int_IdDetallePedido+'" class="pull-right quitarProd" rel="'+_fila.prod+'" ><span class="glyphicon glyphicon-remove" ></span></a>'; _html += '</td>'; }; $('#TotalPedido').val( _total ); $('#LabelTotal').html('Total Pre venta: '+_total); $('#Tablita tbody').html( _html ); } } /*Solo Numeros*/ /* onkeypress="return validar(event);" */ function validar(e) { tecla = (document.all)?e.keyCode:e.which;//ascii //alert(tecla); switch(tecla){ case 8: return true; break; case 46://punto return true; break; case 43://Mas return true; break; case 45://Menos return true; break; case 44://Coma return true; break; case 0://Suprimir return true; break; default:<|fim▁hole|> patron = /\d/; te = String.fromCharCode(tecla); return patron.test(te); }<|fim▁end|>
break; }
<|file_name|>instance_groups.py<|end_file_name|><|fim▁begin|># ========================================================================= # Copyright 2012-present Yunify, Inc. # ------------------------------------------------------------------------- # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this work except in compliance with the License. # You may obtain a copy of the License in the LICENSE file, or at: # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ========================================================================= from qingcloud.iaas import constants as const from qingcloud.misc.utils import filter_out_none class InstanceGroupsAction(object): def __init__(self, conn): self.conn = conn <|fim▁hole|> description=None, **ignore): """ Create an instance group. @param relation: Define the relation between instances in the same group. "repel" means these instances prefer distributing on the different physical units. "attract" means these instances prefer converging on the same physical unit. @param instance_group_name: The name of this group. @param description: The description of this group. """ action = const.ACTION_CREATE_INSTANCE_GROUPS valid_keys = ['relation', 'instance_group_name', 'description'] body = filter_out_none(locals(), valid_keys) if not self.conn.req_checker.check_params(body, required_params=['relation'], ): return None return self.conn.send_request(action, body) def delete_instance_groups(self, instance_groups, **ignore): """ Delete the specific instance group. @param instance_groups: An id list contains the group(s) id which will be deleted. """ action = const.ACTION_DELETE_INSTANCE_GROUPS valid_keys = ['instance_groups'] body = filter_out_none(locals(), valid_keys) if not self.conn.req_checker.check_params(body, required_params=['instance_groups'], list_params=['instance_groups'] ): return None return self.conn.send_request(action, body) def join_instance_group(self, instances, instance_group, **ignore): """ Add the instance(s) to the instance group. @param instances: An id list contains the instances(s) that will be added in the specific group. @param instance_group: The group id. """ action = const.ACTION_JOIN_INSTANCE_GROUP valid_keys = ['instances', 'instance_group'] body = filter_out_none(locals(), valid_keys) if not self.conn.req_checker.check_params(body, required_params=['instances', 'instance_group'], list_params=['instances'] ): return None return self.conn.send_request(action, body) def leave_instance_group(self, instances, instance_group, **ignore): """ Delete the specific instance(s) from the group. @param instances: An id list contains the instance(s) who want to leave the instance group. @param instance_group: The instance group id. """ action = const.ACTION_LEAVE_INSTANCE_GROUP valid_keys = ['instances', 'instance_group'] body = filter_out_none(locals(), valid_keys) if not self.conn.req_checker.check_params(body, required_params=['instances', 'instance_group'], list_params=['instances'] ): return None return self.conn.send_request(action, body) def describe_instance_groups(self, instance_groups=[], relation=None, tags=None, owner=None, verbose=0, offset=0, limit=20, **ignore): """ Describe the instance groups filtered by conditions. @param instance_groups: If this param was given, only return the group(s) info in this given list. @param relation: Filter by the relation type. @param tags: Filter by the tag id. @param owner: Filter by the owner id. @param verbose: Whether return the verbose information. @param offset: The offset of the item cursor and its default value is 0. @param limit: The number of items that will be displayed. Default is 20, maximum is 100. """ action = const.ACTION_DESCRIBE_INSTANCE_GROUPS valid_keys = ['instance_groups', 'relation', 'tags', 'owner', 'verbose', 'offset', 'limit'] body = filter_out_none(locals(), valid_keys) if not self.conn.req_checker.check_params(body, list_params=['instance_groups', 'tags'], integer_params=['limit', 'verbose', 'offset'] ): return None return self.conn.send_request(action, body)<|fim▁end|>
def create_instance_groups(self, relation, instance_group_name=None,
<|file_name|>fixture_model.py<|end_file_name|><|fim▁begin|>class Penguin(object): def __init__(self, name, mood, id=None): self.name = name self.mood = mood self.id = id def __repr__(self): return '< %s the %s penguin >' % (self.name, self.mood) class Goose(object): def __init__(self, name, favorite_penguin, id=None): self.name = name self.favorite_penguin = favorite_penguin self.id = id def __repr__(self):<|fim▁hole|><|fim▁end|>
template = '< %s, the goose that likes %s >' return template % (self.name, repr(self.favorite_penguin))
<|file_name|>geo.py<|end_file_name|><|fim▁begin|>from math import sqrt def euclidean_distance(p1, p2):<|fim▁hole|> :return: """ dx, dy = p2[0] - p1[0], p2[1] - p1[1] # Magnitude. Coulomb law. return sqrt(dx ** 2 + dy ** 2)<|fim▁end|>
""" Compute euclidean distance for two points :param p1: :param p2:
<|file_name|>search_filters_limit.go<|end_file_name|><|fim▁begin|><|fim▁hole|>// Copyright 2012-2015 Oliver Eilhard. All rights reserved. // Use of this source code is governed by a MIT-license. // See http://olivere.mit-license.org/license.txt for details. package elastic // A limit filter limits the number of documents (per shard) to execute on. // For details, see: // http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/query-dsl-limit-filter.html type LimitFilter struct { Filter limit int } func NewLimitFilter(limit int) LimitFilter { f := LimitFilter{limit: limit} return f } func (f LimitFilter) Source() interface{} { // { // "limit" : { // "value" : "..." // } // } source := make(map[string]interface{}) params := make(map[string]interface{}) source["limit"] = params params["value"] = f.limit return source }<|fim▁end|>
<|file_name|>GroovyScriptIT.java<|end_file_name|><|fim▁begin|>/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.script; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchPhaseExecutionException; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.lucene.search.function.CombineFunction; import org.elasticsearch.script.ScriptService.ScriptType; import org.elasticsearch.script.groovy.GroovyScriptEngineService; import org.elasticsearch.test.ESIntegTestCase; import org.junit.Test; import java.util.ArrayList; import java.util.List; import static org.elasticsearch.index.query.QueryBuilders.*; import static org.elasticsearch.index.query.functionscore.ScoreFunctionBuilders.scriptFunction; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.*; import static org.hamcrest.Matchers.equalTo; /** * Various tests for Groovy scripting */ public class GroovyScriptIT extends ESIntegTestCase { @Test public void testGroovyBigDecimalTransformation() { client().prepareIndex("test", "doc", "1").setSource("foo", 5).setRefresh(true).get(); // Test that something that would usually be a BigDecimal is transformed into a Double assertScript("def n = 1.23; assert n instanceof Double;"); assertScript("def n = 1.23G; assert n instanceof Double;"); assertScript("def n = BigDecimal.ONE; assert n instanceof BigDecimal;"); } public void assertScript(String script) { SearchResponse resp = client().prepareSearch("test") .setSource(new BytesArray("{\"query\": {\"match_all\": {}}," + "\"sort\":{\"_script\": {\"script\": \""+ script + "; 1\", \"type\": \"number\", \"lang\": \"groovy\"}}}")).get(); assertNoFailures(resp); } @Test public void testGroovyExceptionSerialization() throws Exception { List<IndexRequestBuilder> reqs = new ArrayList<>(); for (int i = 0; i < randomIntBetween(50, 500); i++) { reqs.add(client().prepareIndex("test", "doc", "" + i).setSource("foo", "bar")); } indexRandom(true, false, reqs); try { client().prepareSearch("test") .setQuery( constantScoreQuery(scriptQuery(new Script("1 == not_found", ScriptType.INLINE, GroovyScriptEngineService.NAME, null)))).get(); fail("should have thrown an exception"); } catch (SearchPhaseExecutionException e) { assertThat(e.toString()+ "should not contained NotSerializableTransportException", e.toString().contains("NotSerializableTransportException"), equalTo(false)); assertThat(e.toString()+ "should have contained GroovyScriptExecutionException", e.toString().contains("GroovyScriptExecutionException"), equalTo(true)); assertThat(e.toString()+ "should have contained not_found", e.toString().contains("No such property: not_found"), equalTo(true)); } try {<|fim▁hole|> client().prepareSearch("test") .setQuery(constantScoreQuery(scriptQuery(new Script("assert false", ScriptType.INLINE, "groovy", null)))).get(); fail("should have thrown an exception"); } catch (SearchPhaseExecutionException e) { assertThat(e.toString() + "should not contained NotSerializableTransportException", e.toString().contains("NotSerializableTransportException"), equalTo(false)); assertThat(e.toString() + "should have contained GroovyScriptExecutionException", e.toString().contains("GroovyScriptExecutionException"), equalTo(true)); assertThat(e.toString()+ "should have contained an assert error", e.toString().contains("AssertionError[assert false"), equalTo(true)); } } @Test public void testGroovyScriptAccess() { client().prepareIndex("test", "doc", "1").setSource("foo", "quick brow fox jumped over the lazy dog", "bar", 1).get(); client().prepareIndex("test", "doc", "2").setSource("foo", "fast jumping spiders", "bar", 2).get(); client().prepareIndex("test", "doc", "3").setSource("foo", "dog spiders that can eat a dog", "bar", 3).get(); refresh(); // doc[] access SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchAllQuery()) .add( scriptFunction(new Script("doc['bar'].value", ScriptType.INLINE, "groovy", null))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); assertOrderedSearchHits(resp, "3", "2", "1"); } public void testScoreAccess() { client().prepareIndex("test", "doc", "1").setSource("foo", "quick brow fox jumped over the lazy dog", "bar", 1).get(); client().prepareIndex("test", "doc", "2").setSource("foo", "fast jumping spiders", "bar", 2).get(); client().prepareIndex("test", "doc", "3").setSource("foo", "dog spiders that can eat a dog", "bar", 3).get(); refresh(); // _score can be accessed SearchResponse resp = client().prepareSearch("test").setQuery(functionScoreQuery(matchQuery("foo", "dog")) .add(scriptFunction(new Script("_score", ScriptType.INLINE, "groovy", null))) .boostMode(CombineFunction.REPLACE)).get(); assertNoFailures(resp); assertSearchHits(resp, "3", "1"); // _score is comparable // NOTE: it is important to use 0.0 instead of 0 instead Groovy will do an integer comparison // and if the score if between 0 and 1 it will be considered equal to 0 due to the cast resp = client() .prepareSearch("test") .setQuery( functionScoreQuery(matchQuery("foo", "dog")).add( scriptFunction(new Script("_score > 0.0 ? _score : 0", ScriptType.INLINE, "groovy", null))).boostMode( CombineFunction.REPLACE)).get(); assertNoFailures(resp); assertSearchHits(resp, "3", "1"); } }<|fim▁end|>
<|file_name|>NativeWalletEventListener.java<|end_file_name|><|fim▁begin|>/** * Copyright 2013 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0<|fim▁hole|> * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.bushstar.htmlcoinj.jni; import com.bushstar.htmlcoinj.core.ECKey; import com.bushstar.htmlcoinj.core.Transaction; import com.bushstar.htmlcoinj.core.Wallet; import com.bushstar.htmlcoinj.core.WalletEventListener; import com.bushstar.htmlcoinj.script.Script; import java.math.BigInteger; import java.util.List; /** * An event listener that relays events to a native C++ object. A pointer to that object is stored in * this class using JNI on the native side, thus several instances of this can point to different actual * native implementations. */ public class NativeWalletEventListener implements WalletEventListener { public long ptr; @Override public native void onCoinsReceived(Wallet wallet, Transaction tx, BigInteger prevBalance, BigInteger newBalance); @Override public native void onCoinsSent(Wallet wallet, Transaction tx, BigInteger prevBalance, BigInteger newBalance); @Override public native void onReorganize(Wallet wallet); @Override public native void onTransactionConfidenceChanged(Wallet wallet, Transaction tx); @Override public native void onWalletChanged(Wallet wallet); @Override public native void onKeysAdded(Wallet wallet, List<ECKey> keys); @Override public native void onScriptsAdded(Wallet wallet, List<Script> scripts); }<|fim▁end|>
* * Unless required by applicable law or agreed to in writing, software
<|file_name|>iter.rs<|end_file_name|><|fim▁begin|>/* * iter.rs: Iterator implementation for rbtree. * Copyright (C) 2019 Oddcoder * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ // Credits where credits goes! // https://codereview.stackexchange.com/questions/110161/binary-trees-in-rust-iterators use super::rbtree_wrapper::{Augment, RBTree}; /// Iterator for [RBtree] pub struct TreeIterator<K: Ord + Copy, A: Copy, V> { right: Vec<RBTree<K, A, V>>, current: Option<RBTree<K, A, V>>, } impl<K: Ord + Copy, A: Copy, V> TreeIterator<K, A, V> where RBTree<K, A, V>: Augment<A>, { pub(crate) fn new(root: RBTree<K, A, V>) -> TreeIterator<K, A, V> { let mut iter = TreeIterator { right: vec![], current: None }; iter.add_subtree(root); iter } fn add_subtree(&mut self, root: RBTree<K, A, V>) { let mut node: RBTree<K, A, V> = root; while node.is_node() { if node.right_ref().is_node() { self.right.push(node.right()); } if node.left_ref().is_node() { let tmp = node.left(); self.right.push(node); node = tmp; } else { break; } } self.current = if node.is_node() { Some(node) } else { None }; } } impl<K: Ord + Copy, A: Copy, V> Iterator for TreeIterator<K, A, V> where RBTree<K, A, V>: Augment<A>, { type Item = (K, A, V); fn next(&mut self) -> Option<(K, A, V)> { let result; if let Some(node) = self.current.take() { result = Some((node.key(), node.aug_data(), node.data())); } else { return None; } if let Some(node) = self.right.pop() { self.add_subtree(node); }<|fim▁hole|>}<|fim▁end|>
result }
<|file_name|>ship.d.ts<|end_file_name|><|fim▁begin|>import * as React from 'react'; import { IconBaseProps } from 'react-icon-base'; declare class FaShip extends React.Component<IconBaseProps> { }<|fim▁hole|><|fim▁end|>
export = FaShip;
<|file_name|>httpd.py<|end_file_name|><|fim▁begin|># coding=utf-8 from tornado.wsgi import WSGIContainer from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop from app import app<|fim▁hole|>if __name__ == "__main__": http_server = HTTPServer(WSGIContainer(app)) http_server.listen(5000) IOLoop.instance().start()<|fim▁end|>
<|file_name|>coniks.go<|end_file_name|><|fim▁begin|>// Copyright 2017 Google Inc. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package coniks provides hashing for maps. package coniks import ( "bytes" "crypto" "encoding/binary" "fmt" "github.com/golang/glog" "github.com/google/trillian" "github.com/google/trillian/merkle/hashers" ) func init() { hashers.RegisterMapHasher(trillian.HashStrategy_CONIKS_SHA512_256, Default) hashers.RegisterMapHasher(trillian.HashStrategy_CONIKS_SHA256, New(crypto.SHA256)) } // Domain separation prefixes var ( leafIdentifier = []byte("L") emptyIdentifier = []byte("E") // Default is the standard CONIKS hasher. Default = New(crypto.SHA512_256) // Some zeroes, to avoid allocating temporary slices. zeroes = make([]byte, 32) ) // hasher implements the sparse merkle tree hashing algorithm specified in the CONIKS paper. type hasher struct { crypto.Hash } // New creates a new hashers.TreeHasher using the passed in hash function. func New(h crypto.Hash) hashers.MapHasher { return &hasher{Hash: h} } // EmptyRoot returns the root of an empty tree. func (m *hasher) EmptyRoot() []byte { panic("EmptyRoot() not defined for coniks.Hasher") } // HashEmpty returns the hash of an empty branch at a given height. // A height of 0 indicates the hash of an empty leaf. // Empty branches within the tree are plain interior nodes e1 = H(e0, e0) etc. func (m *hasher) HashEmpty(treeID int64, index []byte, height int) []byte { depth := m.BitLen() - height buf := bytes.NewBuffer(make([]byte, 0, 32)) h := m.New() buf.Write(emptyIdentifier) binary.Write(buf, binary.BigEndian, uint64(treeID)) m.writeMaskedIndex(buf, index, depth) binary.Write(buf, binary.BigEndian, uint32(depth)) h.Write(buf.Bytes()) r := h.Sum(nil) if glog.V(5) { glog.Infof("HashEmpty(%x, %d): %x", index, depth, r) } return r } // HashLeaf calculate the merkle tree leaf value: // H(Identifier || treeID || depth || index || dataHash) func (m *hasher) HashLeaf(treeID int64, index []byte, leaf []byte) []byte { depth := m.BitLen() buf := bytes.NewBuffer(make([]byte, 0, 32+len(leaf))) h := m.New() buf.Write(leafIdentifier) binary.Write(buf, binary.BigEndian, uint64(treeID)) m.writeMaskedIndex(buf, index, depth) binary.Write(buf, binary.BigEndian, uint32(depth)) buf.Write(leaf) h.Write(buf.Bytes()) p := h.Sum(nil) if glog.V(5) { glog.Infof("HashLeaf(%x, %d, %s): %x", index, depth, leaf, p) } return p } // HashChildren returns the internal Merkle tree node hash of the the two child nodes l and r. // The hashed structure is H(l || r). func (m *hasher) HashChildren(l, r []byte) []byte { buf := bytes.NewBuffer(make([]byte, 0, 32+len(l)+len(r))) h := m.New() buf.Write(l) buf.Write(r) h.Write(buf.Bytes()) p := h.Sum(nil) if glog.V(5) { glog.Infof("HashChildren(%x, %x): %x", l, r, p) } return p } // BitLen returns the number of bits in the hash function. func (m *hasher) BitLen() int { return m.Size() * 8 } // leftmask contains bitmasks indexed such that the left x bits are set. It is // indexed by byte position from 0-7 0 is special cased to 0xFF since 8 mod 8 // is 0. leftmask is only used to mask the last byte. var leftmask = [8]byte{0xFF, 0x80, 0xC0, 0xE0, 0xF0, 0xF8, 0xFC, 0xFE} // writeMaskedIndex writes the left depth bits of index directly to a Buffer (which never // returns an error on writes). This is then padded with zero bits to the Size() // of the index values in use by this hashes. This avoids the need to allocate // space for and copy a value that will then be discarded immediately. func (m *hasher) writeMaskedIndex(b *bytes.Buffer, index []byte, depth int) { if got, want := len(index), m.Size(); got != want { panic(fmt.Sprintf("index len: %d, want %d", got, want)) } if got, want := depth, m.BitLen(); got < 0 || got > want { panic(fmt.Sprintf("depth: %d, want <= %d && >= 0", got, want)) } prevLen := b.Len() if depth > 0 { // Write the first depthBytes, if there are any complete bytes. depthBytes := depth >> 3 if depthBytes > 0 { b.Write(index[:depthBytes]) } // Mask off unwanted bits in the last byte, if there is an incomplete one. if depth%8 != 0 {<|fim▁hole|> // Pad to the correct length with zeros. Allow for future hashers that // might be > 256 bits. needZeros := prevLen + len(index) - b.Len() for needZeros > 0 { chunkSize := needZeros if chunkSize > 32 { chunkSize = 32 } b.Write(zeroes[:chunkSize]) needZeros -= chunkSize } }<|fim▁end|>
b.WriteByte(index[depthBytes] & leftmask[depth%8]) } }
<|file_name|>MainController.java<|end_file_name|><|fim▁begin|>package io.fidelcoria.ayfmap.controller; import java.util.HashMap; import java.util.Map; import org.springframework.stereotype.Component; import javafx.fxml.FXML; import javafx.scene.control.Label; import javafx.scene.control.Tab; import javafx.scene.control.TabPane; @Component public class MainController { @FXML Label actionHeaderBar; @FXML TabPane actionTabPane; @FXML private GenerateTabController generateTabController; @FXML<|fim▁hole|> @FXML private DataTabController dataTabController; private static final Map<String, String> tabTitles; static { tabTitles = new HashMap<>(); tabTitles.put("generate-tab", "Generate Documents"); tabTitles.put("import-tab", "Import Documents"); tabTitles.put("edit-tab", "Edit Data"); } /** * Update the actionHeaderBar to reflect the selected tab */ public void tabClicked() { for (Tab tab : actionTabPane.getTabs()) { if (tab.isSelected()) { String title = tabTitles.get(tab.getId()); actionHeaderBar.setText(title); break; } } } }<|fim▁end|>
private ImportTabController importTabController;
<|file_name|>mytaskprolog.py<|end_file_name|><|fim▁begin|># # Copyright 2019-2022 Ghent University # # This file is part of vsc-mympirun, # originally created by the HPC team of Ghent University (http://ugent.be/hpc/en), # with support of Ghent University (http://ugent.be/hpc), # the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be), # the Flemish Research Foundation (FWO) (http://www.fwo.be/en) # and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en). # # https://github.com/hpcugent/vsc-mympirun # # vsc-mympirun is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation v2. # # vsc-mympirun is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with vsc-mympirun. If not, see <http://www.gnu.org/licenses/>. # """ End-to-end tests for mypmirun """ import os import logging logging.basicConfig(level=logging.DEBUG) from pmi_utils import PMITest from vsc.utils.affinity import sched_getaffinity, sched_setaffinity class TaskPrologEnd2End(PMITest): def setUp(self):<|fim▁hole|> super(TaskPrologEnd2End, self).setUp() self.script = os.path.join(os.path.dirname(self.script), 'mytaskprolog.py') def test_simple(self): origaff = sched_getaffinity() aff = sched_getaffinity() aff.set_bits([1]) # only use first core (we can always assume there is one core sched_setaffinity(aff) self.pmirun([], pattern='export CUDA_VISIBLE_DEVICES=0') # restore sched_setaffinity(origaff)<|fim▁end|>
"""Prepare to run test."""
<|file_name|>ModelFactorySimpleTest.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sling.models.it; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertSame; import static org.junit.Assert.assertTrue; import javax.jcr.Node; import javax.jcr.Session; import org.apache.commons.lang.RandomStringUtils; import org.apache.sling.api.resource.Resource; import org.apache.sling.api.resource.ResourceResolver; import org.apache.sling.api.resource.ResourceResolverFactory; import org.apache.sling.junit.annotations.SlingAnnotationsTestRunner; import org.apache.sling.junit.annotations.TestReference; import org.apache.sling.models.factory.ModelClassException; import org.apache.sling.models.factory.ModelFactory; import org.apache.sling.models.it.models.ConstructorInjectionTestModel; import org.apache.sling.models.it.models.FieldInjectionTestModel; import org.apache.sling.models.it.models.InterfaceInjectionTestModel; import org.apache.sling.models.it.models.implextend.InvalidImplementsInterfacePropertyModel; import org.apache.sling.models.it.models.implextend.SampleServiceInterface; import org.junit.After; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @RunWith(SlingAnnotationsTestRunner.class) public class ModelFactorySimpleTest { @TestReference private ResourceResolverFactory rrFactory; @TestReference private ModelFactory modelFactory; private String value; private ResourceResolver resolver; private Resource resource; private Node createdNode; @Before public void setUp() throws Exception { value = RandomStringUtils.randomAlphanumeric(10); resolver = rrFactory.getAdministrativeResourceResolver(null); Session session = resolver.adaptTo(Session.class); Node rootNode = session.getRootNode(); createdNode = rootNode.addNode("test_" + RandomStringUtils.randomAlphanumeric(10)); createdNode.setProperty("testProperty", value); session.save(); resource = resolver.getResource(createdNode.getPath()); } @After public void tearDown() throws Exception { if (createdNode != null) { createdNode.remove(); } if (resolver != null) { resolver.close(); } } @Test public void testCreateModel() { FieldInjectionTestModel model = modelFactory.createModel(resource, FieldInjectionTestModel.class); assertNotNull("Model is null", model);<|fim▁hole|> private static final class DummyClass { } @Test public void testIsModelClass() { assertTrue("Model is not detected as such", modelFactory.isModelClass(ConstructorInjectionTestModel.class)); assertFalse("Dummy class incorrectly detected as model class", modelFactory.isModelClass(DummyClass.class)); assertFalse("Model with invalid adaptable incorrectly detected as model class" , modelFactory.isModelClass(InvalidImplementsInterfacePropertyModel.class)); assertTrue("Model is not detected as such", modelFactory.isModelClass(SampleServiceInterface.class)); // being provided by two adapters } @Test public void testCanCreateFromAdaptable() { assertTrue("Model is not detected as such", modelFactory.canCreateFromAdaptable(resource, ConstructorInjectionTestModel.class)); assertTrue("Model is not detected as such", modelFactory.canCreateFromAdaptable(resource, SampleServiceInterface.class)); assertFalse("Model is not detected as such", modelFactory.canCreateFromAdaptable(new String(), ConstructorInjectionTestModel.class)); // invalid adaptable } @Test(expected=ModelClassException.class) public void testCanCreateFromAdaptableWithModelExceptin() { modelFactory.canCreateFromAdaptable(resource, DummyClass.class); // no model class } }<|fim▁end|>
assertEquals("Test Property is not set correctly", value, model.getTestProperty()); assertNotNull("Filters is null", model.getFilters()); assertSame("Adaptable is not injected", resource, model.getResource()); }
<|file_name|>file.rs<|end_file_name|><|fim▁begin|>extern crate starplot; use starplot::app::App; use std::env; fn main() { // Collect the arguments let args: Vec<_> = env::args().collect(); if args.len() <= 1 { // if no argument panic!("Send the absolut path of the JSON configuration file as an argument"); } // Get the introduced filepath let filepath: String = args[1].clone(); // Creates a new Application instance and read the configuration file let mut app: App = App::new(700, 420); app.read_conf(filepath); // Preprocessing app.preproc(); // Start app.start();<|fim▁hole|>}<|fim▁end|>
<|file_name|>x2apic.rs<|end_file_name|><|fim▁begin|>//! x2APIC, the most recent APIC on x86 for large servers with more than 255 cores. use bit_field::BitField; use super::*; use crate::msr::{ rdmsr, wrmsr, IA32_APIC_BASE, IA32_TSC_DEADLINE, IA32_X2APIC_APICID, IA32_X2APIC_EOI, IA32_X2APIC_ESR, IA32_X2APIC_ICR, IA32_X2APIC_LDR, IA32_X2APIC_LVT_LINT0, IA32_X2APIC_LVT_TIMER, IA32_X2APIC_SELF_IPI, IA32_X2APIC_SIVR, IA32_X2APIC_VERSION, }; /// Represents an x2APIC driver instance. #[derive(Debug)] pub struct X2APIC { /// Initial BASE msr register value. base: u64, } impl Default for X2APIC { fn default() -> Self { unsafe { X2APIC { base: rdmsr(IA32_APIC_BASE), } } } } impl X2APIC { /// Create a new x2APIC driver object for the local core. pub fn new() -> X2APIC { Default::default() } /// Attach to APIC (enable x2APIC mode, initialize LINT0) pub fn attach(&mut self) { // Enable unsafe { // Enable x2APIC mode globally self.base = rdmsr(IA32_APIC_BASE); self.base.set_bit(10, true); // Enable x2APIC self.base.set_bit(11, true); // Enable xAPIC wrmsr(IA32_APIC_BASE, self.base); // Enable this XAPIC (set bit 8, spurious IRQ vector 15) let svr: u64 = 1 << 8 | 15; wrmsr(IA32_X2APIC_SIVR, svr); // TODO: Fix magic number? let lint0 = 1 << 16 | (1 << 15) | (0b111 << 8) | 0x20; wrmsr(IA32_X2APIC_LVT_LINT0, lint0); let _esr = rdmsr(IA32_X2APIC_ESR); } } /// Detach from APIC (disable x2APIC and xAPIC mode). pub fn detach(&mut self) { unsafe { self.base = rdmsr(IA32_APIC_BASE); self.base.set_bit(10, false); // x2APIC self.base.set_bit(11, false); // xAPIC wrmsr(IA32_APIC_BASE, self.base); } } /// Send an IPI to yourself. /// /// # Safety /// Will interrupt core with `vector`. pub unsafe fn send_self_ipi(&self, vector: u64) { wrmsr(IA32_X2APIC_SELF_IPI, vector); } } /// Abstracts common interface of APIC (x2APIC, xAPIC) hardware devices. impl ApicControl for X2APIC { /// Is a bootstrap processor? fn bsp(&self) -> bool { (self.base & (1 << 8)) > 0 } /// Read local x2APIC ID. fn id(&self) -> u32 { unsafe { rdmsr(IA32_X2APIC_APICID) as u32 } } /// In x2APIC mode, the 32-bit logical x2APIC ID, can be read from LDR. fn logical_id(&self) -> u32 { unsafe { rdmsr(IA32_X2APIC_LDR) as u32 } } /// Read APIC version. fn version(&self) -> u32 { unsafe { rdmsr(IA32_X2APIC_VERSION) as u32 } } /// Enable TSC timer fn tsc_enable(&mut self, vector: u8) { unsafe { wrmsr(IA32_TSC_DEADLINE, 0); let mut lvt: u64 = rdmsr(IA32_X2APIC_LVT_TIMER); lvt &= !0xff; lvt |= vector as u64; // Unmask timer IRQ lvt.set_bit(16, false); // Enable TSC deadline mode lvt.set_bit(17, false); lvt.set_bit(18, true); wrmsr(IA32_X2APIC_LVT_TIMER, lvt); } } /// Set tsc deadline. fn tsc_set(&self, value: u64) { unsafe { crate::fence::mfence(); wrmsr(IA32_TSC_DEADLINE, value); } } /// End Of Interrupt -- Acknowledge interrupt delivery. fn eoi(&mut self) { unsafe { wrmsr(IA32_X2APIC_EOI, 0); } } /// Send a INIT IPI to a core. unsafe fn ipi_init(&mut self, core: ApicId) { let icr = Icr::for_x2apic( 0, core, DestinationShorthand::NoShorthand, DeliveryMode::Init, DestinationMode::Physical, DeliveryStatus::Idle, Level::Assert, TriggerMode::Level, ); self.send_ipi(icr); } /// Deassert INIT IPI. unsafe fn ipi_init_deassert(&mut self) { let icr = Icr::for_x2apic( 0, ApicId::X2Apic(0), // INIT deassert is always sent to everyone, so we are supposed to specify: DestinationShorthand::AllIncludingSelf, DeliveryMode::Init, DestinationMode::Physical, DeliveryStatus::Idle, Level::Deassert, TriggerMode::Level, ); self.send_ipi(icr); } /// Send a STARTUP IPI to a core. unsafe fn ipi_startup(&mut self, core: ApicId, start_page: u8) { let icr = Icr::for_x2apic( start_page, core, DestinationShorthand::NoShorthand, DeliveryMode::StartUp, DestinationMode::Physical, DeliveryStatus::Idle, Level::Assert, TriggerMode::Edge, ); self.send_ipi(icr); } /// Send a generic IPI. unsafe fn send_ipi(&mut self, icr: Icr) { wrmsr(IA32_X2APIC_ESR, 0); wrmsr(IA32_X2APIC_ESR, 0); wrmsr(IA32_X2APIC_ICR, icr.0); loop { let icr = rdmsr(IA32_X2APIC_ICR); if (icr >> 12 & 0x1) == 0 { break; } if rdmsr(IA32_X2APIC_ESR) > 0 { break; }<|fim▁hole|> } } }<|fim▁end|>
<|file_name|>Map()LambdaFunction.py<|end_file_name|><|fim▁begin|><|fim▁hole|> @author: Mohtashim """ # Create a list of strings: spells spells = ["protego", "accio", "expecto patronum", "legilimens"] # Use map() to apply a lambda function over spells: shout_spells shout_spells = map(lambda item: item + '!!!', spells) # Convert shout_spells to a list: shout_spells_list shout_spells_list = list(shout_spells) # Convert shout_spells into a list and print it print(shout_spells_list)<|fim▁end|>
# -*- coding: utf-8 -*- """ Created on Mon Jan 30 20:12:17 2017
<|file_name|>mythril_leveldb_test.py<|end_file_name|><|fim▁begin|>import io import pytest from contextlib import redirect_stdout from mock import patch from mythril.mythril import MythrilLevelDB, MythrilConfig from mythril.exceptions import CriticalError @patch("mythril.ethereum.interface.leveldb.client.EthLevelDB.search") @patch("mythril.ethereum.interface.leveldb.client.ETH_DB", return_value=None) @patch("mythril.ethereum.interface.leveldb.client.LevelDBReader", return_value=None) @patch("mythril.ethereum.interface.leveldb.client.LevelDBWriter", return_value=None) def test_leveldb_code_search(mock_leveldb, f1, f2, f3): config = MythrilConfig() config.set_api_leveldb("some path") leveldb_search = MythrilLevelDB(leveldb=config.eth_db) leveldb_search.search_db("code#PUSH#") mock_leveldb.assert_called() @patch("mythril.ethereum.interface.leveldb.client.ETH_DB", return_value=None)<|fim▁hole|>@patch("mythril.ethereum.interface.leveldb.client.LevelDBWriter", return_value=None) def test_leveldb_hash_search_incorrect_input(f1, f2, f3): config = MythrilConfig() config.set_api_leveldb("some path") leveldb_search = MythrilLevelDB(leveldb=config.eth_db) with pytest.raises(CriticalError): leveldb_search.contract_hash_to_address("0x23") @patch( "mythril.ethereum.interface.leveldb.client.EthLevelDB.contract_hash_to_address", return_value="0xddbb615cb2ffaff7233d8a6f3601621de94795e1", ) @patch("mythril.ethereum.interface.leveldb.client.ETH_DB", return_value=None) @patch("mythril.ethereum.interface.leveldb.client.LevelDBReader", return_value=None) @patch("mythril.ethereum.interface.leveldb.client.LevelDBWriter", return_value=None) def test_leveldb_hash_search_correct_input(mock_hash_to_address, f1, f2, f3): config = MythrilConfig() config.set_api_leveldb("some path") leveldb_search = MythrilLevelDB(leveldb=config.eth_db) f = io.StringIO() with redirect_stdout(f): leveldb_search.contract_hash_to_address( "0x0464e651bcc40de28fc7fcde269218d16850bac9689da5f4a6bd640fd3cdf6aa" ) out = f.getvalue() mock_hash_to_address.assert_called() assert out == "0xddbb615cb2ffaff7233d8a6f3601621de94795e1\n"<|fim▁end|>
@patch("mythril.ethereum.interface.leveldb.client.LevelDBReader", return_value=None)
<|file_name|>FlowElection.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.cluster.coordination.flow; import org.apache.nifi.cluster.protocol.DataFlow; import org.apache.nifi.cluster.protocol.NodeIdentifier; /** * <p> * A FlowElection is responsible for examining multiple versions of a dataflow and determining which of * the versions is the "correct" version of the flow. * </p> */ public interface FlowElection { /** * Checks if the election has completed or not. * * @return <code>true</code> if the election has completed, <code>false</code> otherwise. */ boolean isElectionComplete(); /** * Returns <code>true</code> if a vote has already been counted for the given Node Identifier, <code>false</code> otherwise. * * @param nodeIdentifier the identifier of the node * @return <code>true</code> if a vote has already been counted for the given Node Identifier, <code>false</code> otherwise. */ boolean isVoteCounted(NodeIdentifier nodeIdentifier); /** * If the election has not yet completed, adds the given DataFlow to the list of candidates * (if it is not already in the running) and increments the number of votes for this DataFlow by 1. * If the election has completed, the given candidate is ignored, and the already-elected DataFlow * will be returned. If the election has not yet completed, a vote will be cast for the given * candidate and <code>null</code> will be returned, signifying that no candidate has yet been chosen. * * @param candidate the DataFlow to vote for and add to the pool of candidates if not already present<|fim▁hole|> * * @return the elected {@link DataFlow}, or <code>null</code> if no DataFlow has yet been elected */ DataFlow castVote(DataFlow candidate, NodeIdentifier nodeIdentifier); /** * Returns the DataFlow that has been elected as the "correct" version of the flow, or <code>null</code> * if the election has not yet completed. * * @return the DataFlow that has been elected as the "correct" version of the flow, or <code>null</code> * if the election has not yet completed. */ DataFlow getElectedDataFlow(); /** * Returns a human-readable description of the status of the election * * @return a human-readable description of the status of the election */ String getStatusDescription(); }<|fim▁end|>
* @param nodeIdentifier the identifier of the node casting the vote
<|file_name|>split_path_components.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*- # # GuessIt - A library for guessing information from filenames # Copyright (c) 2013 Nicolas Wack <[email protected]> # # GuessIt is free software; you can redistribute it and/or modify it under # the terms of the Lesser GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # GuessIt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Lesser GNU General Public License for more details. # # You should have received a copy of the Lesser GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import unicode_literals from guessit import fileutils import os.path import logging log = logging.getLogger(__name__) priority = 255 def process(mtree): """first split our path into dirs + basename + ext :return: the filename split into [ dir*, basename, ext ] """ components = fileutils.split_path(mtree.value) basename = components.pop(-1) components += list(os.path.splitext(basename)) components[-1] = components[-1][1:] # remove the '.' from the extension mtree.split_on_components(components)<|fim▁end|>
#!/usr/bin/env python
<|file_name|>todoCtrl.js<|end_file_name|><|fim▁begin|>/*global angular */ /** * The main controller for the app. The controller: * - retrieves and persists the model via the todoStorage service * - exposes the model to the template and provides event handlers */ angular.module('todomvc') .controller('TodoCtrl', function TodoCtrl($scope, $routeParams, $filter, store) { 'use strict'; var todos = $scope.todos = store.todos; $scope.newTodo = ''; $scope.editedTodo = null; $scope.$watch('todos', function () { $scope.remainingCount = $filter('filter')(todos, { completed: false }).length; $scope.completedCount = todos.length - $scope.remainingCount; $scope.allChecked = !$scope.remainingCount; }, true); // Monitor the current route for changes and adjust the filter accordingly. $scope.$on('$routeChangeSuccess', function () { var status = $scope.status = $routeParams.status || ''; $scope.statusFilter = (status === 'active') ? { completed: false } : (status === 'completed') ? { completed: true } : ''; }); $scope.addTodo = function () { var newTodo = { title: $scope.newTodo.trim(), completed: false }; if (!newTodo.title) { return; } $scope.saving = true; store.insert(newTodo) .then(function success() { $scope.newTodo = ''; })<|fim▁hole|> }; $scope.editTodo = function (todo) { $scope.editedTodo = todo; // Clone the original todo to restore it on demand. $scope.originalTodo = angular.extend({}, todo); }; $scope.saveEdits = function (todo, event) { // Blur events are automatically triggered after the form submit event. // This does some unfortunate logic handling to prevent saving twice. if (event === 'blur' && $scope.saveEvent === 'submit') { $scope.saveEvent = null; return; } $scope.saveEvent = event; if ($scope.reverted) { // Todo edits were reverted-- don't save. $scope.reverted = null; return; } todo.title = todo.title.trim(); if (todo.title === $scope.originalTodo.title) { $scope.editedTodo = null; return; } store[todo.title ? 'put' : 'delete'](todo) .then(function success() {}, function error() { todo.title = $scope.originalTodo.title; }) .finally(function () { $scope.editedTodo = null; }); }; $scope.revertEdits = function (todo) { todos[todos.indexOf(todo)] = $scope.originalTodo; $scope.editedTodo = null; $scope.originalTodo = null; $scope.reverted = true; }; $scope.removeTodo = function (todo) { store.delete(todo); }; $scope.saveTodo = function (todo) { store.put(todo); }; $scope.toggleCompleted = function (todo, completed) { if (angular.isDefined(completed)) { todo.completed = completed; } store.put(todo, todos.indexOf(todo)) .then(function success() {}, function error() { todo.completed = !todo.completed; }); }; $scope.clearCompletedTodos = function () { store.clearCompleted(); }; $scope.markAll = function (completed) { todos.forEach(function (todo) { if (todo.completed !== completed) { $scope.toggleCompleted(todo, completed); } }); }; });<|fim▁end|>
.finally(function () { $scope.saving = false; });
<|file_name|>download.py<|end_file_name|><|fim▁begin|># This file is part of the "upq" program used on springfiles.com to manage file # uploads, mirror distribution etc. It is published under the GPLv3. # #Copyright (C) 2011 Daniel Troeder (daniel #at# admin-box #dot# com) # #You should have received a copy of the GNU General Public License #along with this program. If not, see <http://www.gnu.org/licenses/>. <|fim▁hole|># downloads a file from upqjob import UpqJob from upqdb import UpqDB from time import time import os import shutil import requests class Download(UpqJob): """ "download url:$url" """ def run(self): url=self.jobdata['url'] filename=os.path.basename(url) tmpfile=os.path.join(self.getcfg('temppath', '/tmp'), filename) self.jobdata['file']=tmpfile self.logger.debug("going to download %s", url) try: response = requests.get(url, stream=True, verify=False) with open(tmpfile, 'wb') as out_file: shutil.copyfileobj(response.raw, out_file) del response self.logger.debug("downloaded to %s", tmpfile) except Exception as e: self.logger.error(str(e)) return False return True<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from pupa.scrape import Jurisdiction, Organization from .people import IDPersonScraper from .committees import IDCommitteeScraper from .bills import IDBillScraper class Idaho(Jurisdiction): """ IDAHO Scraper """ division_id = "ocd-division/country:us/state:id" classification = "government" name = "Idaho" url = "http://www.legislature.idaho.gov" scrapers = { 'people': IDPersonScraper, 'committees': IDCommitteeScraper, 'bills': IDBillScraper } parties = [ {'name': 'Republican'}, {'name': 'Democratic'} ] legislative_sessions = [ { "_scraped_name": "2011 Session", "classification": "primary", "end_date": "2011-04-07", "identifier": "2011", "name": "61st Legislature, 1st Regular Session (2011)", "start_date": "2011-01-10" }, { "_scraped_name": "2012 Session", "classification": "primary", "identifier": "2012", "name": "61st Legislature, 2nd Regular Session (2012)" }, { "_scraped_name": "2013 Session", "classification": "primary", "identifier": "2013", "name": "62nd Legislature, 1st Regular Session (2013)" }, { "_scraped_name": "2014 Session", "classification": "primary", "identifier": "2014", "name": "63nd Legislature, 1st Regular Session (2014)" }, { "_scraped_name": "2015 Session", "classification": "primary", "end_date": "2015-04-10", "identifier": "2015", "name": "64th Legislature, 1st Regular Session (2015)", "start_date": "2015-01-12" }, { "_scraped_name": "2015 Extraordinary Session", "classification": "special", "end_date": "2015-05-18", "identifier": "2015spcl", "name": "65th Legislature, 1st Extraordinary Session (2015)", "start_date": "2015-05-18"<|fim▁hole|> "end_date": "2016-03-25", "identifier": "2016", "name": "63rd Legislature, 2nd Regular Session (2016)", "start_date": "2016-01-11" }, { "_scraped_name": "2017 Session", "classification": "primary", "end_date": "2017-04-07", "identifier": "2017", "name": "64th Legislature, 1st Regular Session (2017)", "start_date": "2017-01-09" } ] ignored_scraped_sessions = [ "2010 Session", "2009 Session", "2008 Session", "2007 Session", "2006 Extraordinary Session", "2006 Session", "2005 Session", "2004 Session", "2003 Session", "2002 Session", "2001 Session", "2000 Extraordinary Session", "2000 Session", "1999 Session", "1998 Session" ] def get_organizations(self): legislature_name = "Idaho State Legislature" lower_chamber_name = "House" lower_seats = 35 lower_title = "Representative" upper_chamber_name = "Senate" upper_seats = 35 upper_title = "Senator" legislature = Organization(name=legislature_name, classification="legislature") upper = Organization(upper_chamber_name, classification='upper', parent_id=legislature._id) lower = Organization(lower_chamber_name, classification='lower', parent_id=legislature._id) for n in range(1, upper_seats+1): upper.add_post( label=str(n), role=upper_title, division_id='{}/sldu:{}'.format(self.division_id, n)) for n in range(1, lower_seats+1): lower.add_post( label=str(n), role=lower_title, division_id='{}/sldl:{}'.format(self.division_id, n)) yield legislature yield upper yield lower<|fim▁end|>
}, { "_scraped_name": "2016 Session", "classification": "primary",
<|file_name|>nl.rs<|end_file_name|><|fim▁begin|>impl_trait!( /// Trait marking constants valid for use in `Nlmsghdr.nl_type` NlType, u16 ); impl_var_trait!( /// Values for `nl_type` in `Nlmsghdr` Nlmsg, u16, NlType, Noop => libc::NLMSG_NOOP as u16, Error => libc::NLMSG_ERROR as u16, Done => libc::NLMSG_DONE as u16, Overrun => libc::NLMSG_OVERRUN as u16 ); impl_var_trait!( /// Values for `nl_type` in `Nlmsghdr` GenlId, u16, NlType, Ctrl => libc::GENL_ID_CTRL as u16, #[cfg(target_env="gnu")] VfsDquot => libc::GENL_ID_VFS_DQUOT as u16, #[cfg(target_env="gnu")] Pmcraid => libc::GENL_ID_PMCRAID as u16 ); impl_var_trait!( /// rtnetlink-related values for `nl_type` in `Nlmsghdr` Rtm, u16, NlType, Newlink => libc::RTM_NEWLINK, Dellink => libc::RTM_DELLINK, Getlink => libc::RTM_GETLINK, Setlink => libc::RTM_SETLINK, Newaddr => libc::RTM_NEWADDR, Deladdr => libc::RTM_DELADDR, Getaddr => libc::RTM_GETADDR, Newroute => libc::RTM_NEWROUTE, Delroute => libc::RTM_DELROUTE, Getroute => libc::RTM_GETROUTE, Newneigh => libc::RTM_NEWNEIGH, Delneigh => libc::RTM_DELNEIGH, Getneigh => libc::RTM_GETNEIGH, Newrule => libc::RTM_NEWRULE, Delrule => libc::RTM_DELRULE, Getrule => libc::RTM_GETRULE, Newqdisc=> libc::RTM_NEWQDISC, Delqdisc=> libc::RTM_DELQDISC, Getqdisc=> libc::RTM_GETQDISC, Newtclass => libc::RTM_NEWTCLASS, Deltclass => libc::RTM_DELTCLASS, Gettclass => libc::RTM_GETTCLASS, Newtfilter => libc::RTM_NEWTFILTER, Deltfilter => libc::RTM_DELTFILTER, Gettfilter => libc::RTM_GETTFILTER, Newaction => libc::RTM_NEWACTION, Delaction => libc::RTM_DELACTION, Getaction => libc::RTM_GETACTION, Newprefix => libc::RTM_NEWPREFIX, Getmulticast => libc::RTM_GETMULTICAST, Getanycast => libc::RTM_GETANYCAST, Newneightbl => libc::RTM_NEWNEIGHTBL, Getneightbl => libc::RTM_GETNEIGHTBL, Setneightbl => libc::RTM_SETNEIGHTBL, Newnduseropt => libc::RTM_NEWNDUSEROPT, Newaddrlabel => libc::RTM_NEWADDRLABEL, Deladdrlabel => libc::RTM_DELADDRLABEL, Getaddrlabel => libc::RTM_GETADDRLABEL, Getdcb => libc::RTM_GETDCB, Setdcb => libc::RTM_SETDCB, Newnetconf => libc::RTM_NEWNETCONF, Getnetconf => libc::RTM_GETNETCONF, Newmdb => libc::RTM_NEWMDB, Delmdb => libc::RTM_DELMDB, Getmdb => libc::RTM_GETMDB, Newnsid => libc::RTM_NEWNSID, Delnsid => libc::RTM_DELNSID, Getnsid => libc::RTM_GETNSID ); impl_var!( /// Values for `nl_flags` in `Nlmsghdr` NlmF, u16, Request => libc::NLM_F_REQUEST as u16, Multi => libc::NLM_F_MULTI as u16, Ack => libc::NLM_F_ACK as u16, Echo => libc::NLM_F_ECHO as u16, DumpIntr => libc::NLM_F_DUMP_INTR as u16, DumpFiltered => libc::NLM_F_DUMP_FILTERED as u16, Root => libc::NLM_F_ROOT as u16, Match => libc::NLM_F_MATCH as u16, Atomic => libc::NLM_F_ATOMIC as u16, Dump => libc::NLM_F_DUMP as u16,<|fim▁hole|>);<|fim▁end|>
Replace => libc::NLM_F_REPLACE as u16, Excl => libc::NLM_F_EXCL as u16, Create => libc::NLM_F_CREATE as u16, Append => libc::NLM_F_APPEND as u16
<|file_name|>PrimeSim.java<|end_file_name|><|fim▁begin|>package fastSim.data; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.ObjectInputStream; import java.io.ObjectOutputStream; import java.io.Serializable; import java.util.*; //import fanweizhu.fastSim.util.Config; //import fanweizhu.fastSim.util.IndexManager; //import fanweizhu.fastSim.util.KeyValuePair; //import fanweizhu.fastSim.util.MapCapacity; import fastSim.util.*; import fastSim.util.io.DataReader; import fastSim.util.io.DataWriter; public class PrimeSim implements Serializable{ /** * */ private static final long serialVersionUID = -7028575305146090045L; private List<Integer> hubs; protected Map<Integer, Map<Integer,Double>> map; protected boolean outG; protected List<Integer> meetingNodes; /*public PrimeSim(int capacity) { super(capacity); hubs = new ArrayList<Integer>(); }*/ public PrimeSim() { map = new HashMap<Integer, Map<Integer,Double>>(); hubs = new ArrayList<Integer>(); meetingNodes = new ArrayList<Integer>(); } public PrimeSim(int numNodes) { //need to change MapCapacity when double->Map? map = new HashMap<Integer, Map<Integer,Double>>(MapCapacity.compute(numNodes)); hubs = new ArrayList<Integer>(); } public Set<Integer> getLengths(){ return map.keySet(); } public int numHubs() { return hubs.size(); } public int numLength(){ return map.size(); } public Map<Integer,Map<Integer,Double>> getMap(){ return map; } public int getHubId(int index) { return hubs.get(index); } public List<Integer> getMeetingNodes(){ return meetingNodes; } public void addNewNode(Node h, String simType){ h.isVisited = true; if(h.isHub) hubs.add(h.id); if(simType=="in" && h.out.size()>1) //store meeting nodes for ingraphs //meetingnodes refer to >1 nodes (descendants) meetingNodes.add(h.id); } public void set(int l, Node n, double value) { // if (n.isVisited == false){ // if (n.isHub) // hubs.add(n.id); // if (graphType=="in" && n.in.size()>1) // meetingNodes.add(n.id); // } // Map<Integer, Double> nodesVal; if (map.get(l)!= null) { nodesVal = map.get(l); nodesVal.put(n.id, value); map.put(l, nodesVal); } else { nodesVal = new HashMap<Integer,Double>(); nodesVal.put(n.id, value); map.put(l, nodesVal); } } public void set(int l, Map<Integer,Double> nodeValuePairs){ //System.out.println(l); Map<Integer, Double> nodesVal = map.get(l); // for(Integer i:nodeValuePairs.keySet()) { // System.out.println("PS node: "+ i + " rea: " +nodeValuePairs.get(i)); // } if(nodesVal == null) { map.put(l, nodeValuePairs); } else{ System.out.println("####PrimeSim line108: should not go to here."); nodesVal.putAll(nodeValuePairs); map.put(l, nodesVal); } //System.out.println("length_Test:" + l + " Map_Size:" + map.get(l).size()); // for(Integer i: map.get(l).keySet()) // System.out.println(map.get(l).get(i)); } public long computeStorageInBytes() { long nodeIdSize = (1 + hubs.size()) * 4; long mapSize = (1 + map.size()) * 4 + map.size() * 8; return nodeIdSize + mapSize; } public String getCountInfo() { //int graphSize = map.size(); int hubSize = hubs.size(); int meetingNodesSize = meetingNodes.size(); return "hub size: " + hubSize + " meetingNodesSize: " + meetingNodesSize ; } public void trim(double clip) { Map<Integer, Map<Integer,Double>> newMap = new HashMap<Integer, Map<Integer,Double>>(); List<Integer> newHublist = new ArrayList<Integer>(); List<Integer> newXlist = new ArrayList<Integer>(); for (int l: map.keySet()){ Map<Integer, Double> pairMap =map.get(l); Map<Integer, Double> newPairs = new HashMap<Integer, Double>(); for (int nid: pairMap.keySet()){ double score = pairMap.get(nid); if (score > clip){ newPairs.put(nid, score); if(hubs.contains(nid) && !newHublist.contains(nid)) newHublist.add(nid); if(meetingNodes.contains(nid) && !newXlist.contains(nid)) newXlist.add(nid); } } newMap.put(l, newPairs); } this.map = newMap; this.hubs = newHublist; this.meetingNodes = newXlist; } public void saveToDisk(int id,String type,boolean doTrim) throws Exception { String path = ""; if(type == "out") //path = "./outSim/" + Integer.toString(id); path = IndexManager.getIndexDeepDir() + "out/" +Integer.toString(id); else if(type == "in") //path = "./inSim/" + Integer.toString(id); path = IndexManager.getIndexDeepDir() + "in/" +Integer.toString(id); else{ System.out.println("Type of prime graph should be either out or in."); System.exit(0); } // System.out.println(path+"/"+id); DataWriter out = new DataWriter(path); if (doTrim) trim(Config.clip); out.writeInteger(hubs.size()); for (int i : hubs) { out.writeInteger(i); } out.writeInteger(meetingNodes.size()); for(int i: meetingNodes){ out.writeInteger(i); }<|fim▁hole|> int pairNum = map.get(i).size(); Map<Integer,Double> pairMap = map.get(i); out.writeInteger(pairNum); for(int j: pairMap.keySet()){ out.writeInteger(j); out.writeDouble(pairMap.get(j)); } } out.close(); /*//test: read all the content DataReader in = new DataReader(path); while(true){ double oneNum =in.readDouble(); if (oneNum == -1.11) break; System.out.print(oneNum+"\t"); } System.out.println(); in.close();*/ } public void loadFromDisk(int id,String type) throws Exception { String path = ""; if(type == "out") path = IndexManager.getIndexDeepDir() + "out/" + Integer.toString(id); else if(type == "in") path = IndexManager.getIndexDeepDir() + "in/" + Integer.toString(id); else { System.out.println("Type of prime graph should be either out or in."); System.exit(0); } //============== DataReader in = new DataReader(path); int n = in.readInteger(); this.hubs = new ArrayList<Integer>(n); for (int i = 0; i < n; i++) this.hubs.add(in.readInteger()); int numM = in.readInteger(); this.meetingNodes=new ArrayList<Integer>(numM); for(int i =0; i<numM; i++) this.meetingNodes.add(in.readInteger()); int numL = in.readInteger(); for(int i=0; i<numL; i++){ int numPair = in.readInteger(); Map<Integer,Double> pairMap = new HashMap<Integer, Double>(); for(int j=0; j<numPair; j++){ int nodeId = in.readInteger(); double nodeScore = in.readDouble(); pairMap.put(nodeId, nodeScore); } this.map.put(i, pairMap); } in.close(); } public PrimeSim duplicate() { // TODO Auto-generated method stub PrimeSim sim = new PrimeSim(); sim.map.putAll(this.map); return sim; } public void addFrom(PrimeSim nextOut, Map<Integer, Double> oneHubValue) { // TODO Auto-generated method stub for (int lenToHub : oneHubValue.keySet()){ double hubScoreoflen = oneHubValue.get(lenToHub); for (int lenFromHub : nextOut.getMap().keySet()){ if(lenFromHub == 0){ // the new score of hub (over length==0) is just the score on prime graph continue; } int newLen = lenToHub + lenFromHub; if (!this.getMap().containsKey(newLen)) this.getMap().put(newLen, new HashMap<Integer,Double>()); for(int toNode: nextOut.getMap().get(lenFromHub).keySet()){ double oldValue = this.getMap().get(newLen).keySet() .contains(toNode) ? this.getMap().get(newLen).get(toNode): 0.0; //System.out.println(oldValue); double newValue = hubScoreoflen *nextOut.getMap().get(lenFromHub).get(toNode); // //added aug-29 // if (newValue<Config.epsilon) // continue; this.getMap().get(newLen).put(toNode, oldValue + newValue) ; // PrintInfor.printDoubleMap(this.getMap(), "assemble simout of the hub at length: " + lenFromHub +" node: "+ toNode ); // System.out.println(this.getMap()); } } } } public void addMeetingNodes(List<Integer> nodes){ for (int nid: nodes){ if (!this.meetingNodes.contains(nid)) this.meetingNodes.add(nid); } //System.out.println("====PrimeSim: line 195: meetingnodes Size " + this.meetingNodes.size()); } }<|fim▁end|>
out.writeInteger(map.size()); for(int i=0; i<map.size();i++){
<|file_name|>chiller_absorption.py<|end_file_name|><|fim▁begin|>""" Absorption chillers """ import cea.config import cea.inputlocator import pandas as pd import numpy as np from math import log, ceil import sympy from cea.constants import HEAT_CAPACITY_OF_WATER_JPERKGK from cea.analysis.costs.equations import calc_capex_annualized, calc_opex_annualized __author__ = "Shanshan Hsieh" __copyright__ = "Copyright 2015, Architecture and Building Systems - ETH Zurich" __credits__ = ["Shanshan Hsieh"] __license__ = "MIT" __version__ = "0.1" __maintainer__ = "Daren Thomas" __email__ = "[email protected]" __status__ = "Production" # technical model def calc_chiller_main(mdot_chw_kgpers, T_chw_sup_K, T_chw_re_K, T_hw_in_C, T_ground_K, absorption_chiller): """ This model calculates the operation conditions of the absorption chiller given the chilled water loads in evaporators and the hot water inlet temperature in the generator (desorber). This is an empirical model using characteristic equation method developed by _[Kuhn A. & Ziegler F., 2005]. The parameters of each absorption chiller can be derived from experiments or performance curves from manufacturer's catalog, more details are described in _[Puig-Arnavat M. et al, 2010]. Assumptions: constant external flow rates (chilled water at the evaporator, cooling water at the condenser and absorber, hot water at the generator). :param mdot_chw_kgpers: required chilled water flow rate :type mdot_chw_kgpers: float :param T_chw_sup_K: required chilled water supply temperature (outlet from the evaporator) :type T_chw_sup_K: float :param T_chw_re_K: required chilled water return temperature (inlet to the evaporator) :type T_chw_re_K: float :param T_hw_in_C: hot water inlet temperature to the generator :type T_hw_in_C: float :param T_ground_K: ground temperature :type T_ground_K: float :param locator: locator class :return: ..[Kuhn A. & Ziegler F., 2005] Operational results of a 10kW absorption chiller and adaptation of the characteristic equation. In: Proceedings of the interantional conference solar air conditioning. Bad Staffelstein, Germany: 2005. ..[Puig-Arnavat M. et al, 2010] Analysis and parameter identification for characteristic equations of single- and double-effect absorption chillers by means of multivariable regression. Int J Refrig: 2010. """ chiller_prop = absorption_chiller.chiller_prop # get data from the class # create a dict of input operating conditions input_conditions = {'T_chw_sup_K': T_chw_sup_K, 'T_chw_re_K': T_chw_re_K, 'T_hw_in_C': T_hw_in_C, 'T_ground_K': T_ground_K} mcp_chw_WperK = mdot_chw_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK q_chw_total_W = mcp_chw_WperK * (T_chw_re_K - T_chw_sup_K) if np.isclose(q_chw_total_W, 0.0): wdot_W = 0.0 q_cw_W = 0.0 q_hw_W = 0.0 T_hw_out_C = np.nan EER = 0.0 input_conditions['q_chw_W'] = 0.0 else: min_chiller_size_W = min(chiller_prop['cap_min'].values) max_chiller_size_W = max(chiller_prop['cap_max'].values) # get chiller properties and input conditions according to load if q_chw_total_W < min_chiller_size_W: # get chiller property according to load chiller_prop = chiller_prop[chiller_prop['cap_min'] == min_chiller_size_W] # operate at minimum load number_of_chillers_activated = 1.0 # only activate one chiller input_conditions['q_chw_W'] = chiller_prop['cap_min'].values # minimum load elif q_chw_total_W <= max_chiller_size_W: # get chiller property according to load chiller_prop = chiller_prop[(chiller_prop['cap_min'] <= q_chw_total_W) & (chiller_prop['cap_max'] >= q_chw_total_W)] # operate one chiller at the cooling load number_of_chillers_activated = 1.0 # only activate one chiller input_conditions['q_chw_W'] = q_chw_total_W # operate at the chilled water load else: # get chiller property according to load chiller_prop = chiller_prop[chiller_prop['cap_max'] == max_chiller_size_W] # distribute loads to multiple chillers number_of_chillers_activated = q_chw_total_W / max_chiller_size_W # operate at maximum load input_conditions['q_chw_W'] = max(chiller_prop['cap_max'].values) absorption_chiller.update_data(chiller_prop) operating_conditions = calc_operating_conditions(absorption_chiller, input_conditions) # calculate chiller outputs wdot_W = calc_power_demand(input_conditions['q_chw_W'], chiller_prop) * number_of_chillers_activated q_cw_W = operating_conditions['q_cw_W'] * number_of_chillers_activated q_hw_W = operating_conditions['q_hw_W'] * number_of_chillers_activated T_hw_out_C = operating_conditions['T_hw_out_C'] EER = q_chw_total_W / (q_hw_W + wdot_W) if T_hw_out_C < 0.0 : print ('T_hw_out_C = ', T_hw_out_C, ' incorrect condition, check absorption chiller script.') chiller_operation = {'wdot_W': wdot_W, 'q_cw_W': q_cw_W, 'q_hw_W': q_hw_W, 'T_hw_out_C': T_hw_out_C, 'q_chw_W': q_chw_total_W, 'EER': EER} return chiller_operation def calc_operating_conditions(absorption_chiller, input_conditions): """ Calculates chiller operating conditions at given input conditions by solving the characteristic equations and the energy balance equations. This method is adapted from _[Kuhn A. & Ziegler F., 2005]. The heat rejection to cooling tower is approximated with the energy balance: Q(condenser) + Q(absorber) = Q(generator) + Q(evaporator) :param AbsorptionChiller chiller_prop: parameters in the characteristic equations and the external flow rates. :param input_conditions: :type input_conditions: dict :return: a dict with operating conditions of the chilled water, cooling water and hot water loops in a absorption chiller. To improve speed, the system of equations was solved using sympy for the output variable ``q_hw_kW`` which is then used to compute the remaining output variables. The following code was used to create the expression to calculate ``q_hw_kW`` with:: # use symbolic computation to derive a formula for q_hw_kW: # first, make sure all the variables are sympy symbols: T_chw_in_C, T_chw_out_C, T_cw_in_C, T_hw_in_C, mcp_cw_kWperK, mcp_hw_kWperK, q_chw_kW = sympy.symbols( "T_chw_in_C, T_chw_out_C, T_cw_in_C, T_hw_in_C, mcp_cw_kWperK, mcp_hw_kWperK, q_chw_kW") T_hw_out_C, T_cw_out_C, q_hw_kW = sympy.symbols('T_hw_out_C, T_cw_out_C, q_hw_kW') a_e, a_g, e_e, e_g, r_e, r_g, s_e, s_g = sympy.symbols("a_e, a_g, e_e, e_g, r_e, r_g, s_e, s_g") ddt_e, ddt_g = sympy.symbols("ddt_e, ddt_g") # the system of equations: eq_e = s_e * ddt_e + r_e - q_chw_kW eq_ddt_e = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_e * (T_cw_in_C + T_cw_out_C) / 2.0 + e_e * (T_chw_in_C + T_chw_out_C) / 2.0 - ddt_e) eq_g = s_g * ddt_g + r_g - q_hw_kW eq_ddt_g = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_g * (T_cw_in_C + T_cw_out_C) / 2.0 + e_g * (T_chw_in_C + T_chw_out_C) / 2.0 - ddt_g) eq_bal_g = (T_hw_in_C - T_hw_out_C) - q_hw_kW / mcp_hw_kWperK # solve the system of equations with sympy eq_sys = [eq_e, eq_g, eq_bal_g, eq_ddt_e, eq_ddt_g] unknown_variables = (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g) a, b = sympy.linear_eq_to_matrix(eq_sys, unknown_variables) T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g = tuple(*sympy.linsolve(eq_sys, unknown_variables)) q_hw_kW.simplify() ..[Kuhn A. & Ziegler F., 2005] Operational results of a 10kW absorption chiller and adaptation of the characteristic equation. In: Proceedings of the interantional conference solar air conditioning. Bad Staffelstein, Germany: 2005. """ # external water circuits (e: chilled water, ac: cooling water, d: hot water) T_hw_in_C = input_conditions['T_hw_in_C'] T_cw_in_C = input_conditions['T_ground_K'] - 273.0 # condenser water inlet temperature T_chw_in_C = input_conditions['T_chw_re_K'] - 273.0 # inlet to the evaporator T_chw_out_C = input_conditions['T_chw_sup_K'] - 273.0 # outlet from the evaporator q_chw_kW = input_conditions['q_chw_W'] / 1000 # cooling load ata the evaporator m_cw_kgpers = absorption_chiller.m_cw_kgpers # external flow rate of cooling water at the condenser and absorber m_hw_kgpers = absorption_chiller.m_hw_kgpers # external flow rate of hot water at the generator mcp_cw_kWperK = m_cw_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK / 1000 mcp_hw_kWperK = m_hw_kgpers * HEAT_CAPACITY_OF_WATER_JPERKGK / 1000 # chiller_props (these are constants from the Absorption_chiller sheet in systems.xls) s_e = absorption_chiller.s_e r_e = absorption_chiller.r_e s_g = absorption_chiller.s_g r_g = absorption_chiller.r_g a_e = absorption_chiller.a_e e_e = absorption_chiller.e_e a_g = absorption_chiller.a_g e_g = absorption_chiller.e_g # variables to solve # T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g = sympy.symbols('T_hw_out_C T_cw_out_C q_hw_kW , ddt_e, ddt_g') # # # systems of equations to solve # eq_e = s_e * ddt_e + r_e - q_chw_kW # eq_ddt_e = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_e * (T_cw_in_C + T_cw_out_C) / 2.0 + e_e * (T_chw_in_C + T_chw_out_C) / 2.0 - ddt_e) # eq_g = s_g * ddt_g + r_g - q_hw_kW # eq_ddt_g = ((T_hw_in_C + T_hw_out_C) / 2.0 + a_g * (T_cw_in_C + T_cw_out_C) / 2.0 + e_g * (T_chw_in_C + T_chw_out_C) / 2.0- ddt_g) # eq_bal_g = (T_hw_in_C - T_hw_out_C) - q_hw_kW / mcp_hw_kWperK # # # solve the system of equations with sympy # eq_sys = [eq_e, eq_g, eq_bal_g, eq_ddt_e, eq_ddt_g] # unknown_variables = (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g) # (T_hw_out_C, T_cw_out_C, q_hw_kW, ddt_e, ddt_g) = tuple(*sympy.linsolve(eq_sys, unknown_variables)) # a = np.array([ # [0, 0, 0, s_e, 0], # [0, 0, -1, 0, s_g], # [-1, 0, -1 / mcp_hw_kWperK, 0, 0], # [0.5, 0, 0, -1, 0], # [0.5, 0, 0, 0, -1]]) # b = np.array([ # [q_chw_kW - r_e], # [-r_g], # [-T_hw_in_C], # [-0.5 * T_hw_in_C - 0.5 * e_e * (T_chw_in_C + T_chw_out_C)], # [-0.5 * T_hw_in_C - 0.5 * e_g * (T_chw_in_C + T_chw_out_C)]]) # the below equation for q_hw_kW was created with sympy.linsolve using symbols for all the variables. q_hw_kW = ((r_g * s_e * (0.5 * a_e * mcp_hw_kWperK + 0.25 * s_g * (a_e - a_g)) + s_g * (0.5 * a_g * mcp_hw_kWperK * (q_chw_kW - r_e) + s_e * (0.5 * mcp_hw_kWperK * (a_e * (0.5 * T_chw_in_C * e_g + 0.5 * T_chw_out_C * e_g + 0.5 * T_cw_in_C * a_g + 1.0 * T_hw_in_C) - a_g * (0.5 * T_chw_in_C * e_e + 0.5 * T_chw_out_C * e_e + 0.5 * T_cw_in_C * a_e + 1.0 * T_hw_in_C)) - 0.25 * r_g * (a_e - a_g)))) / (s_e * (0.5 * a_e * mcp_hw_kWperK + 0.25 * s_g * (a_e - a_g)))) # calculate results q_cw_kW = q_hw_kW + q_chw_kW # Q(condenser) + Q(absorber) T_hw_out_C = T_hw_in_C - q_hw_kW / mcp_hw_kWperK T_cw_out_C = T_cw_in_C + q_cw_kW / mcp_cw_kWperK # TODO: set upper bound of the chiller operation return {'T_hw_out_C': T_hw_out_C, 'T_cw_out_C': T_cw_out_C, 'q_chw_W': q_chw_kW * 1000, 'q_hw_W': q_hw_kW * 1000, 'q_cw_W': q_cw_kW * 1000} def calc_power_demand(q_chw_W, chiller_prop): """ Calculates the power demand of the solution and refrigeration pumps in absorption chillers. Linear equations derived from manufacturer's catalog _[Broad Air Conditioning, 2018]. :param q_chw_W: :param ACH_type: :return: ..[Broad Air Conditioning, 2018] BROAD XII NON-ELECTRIC CHILLER. (2018). etrieved from https://www.broadusa.net/en/wp-content/uploads/2018/12/BROAD-XII-US-Catalog2018-12.pdf """ ach_type = chiller_prop['type'].values[0] if ach_type == 'single': w_dot_W = 0.0028 + 2941 else: w_dot_W = 0.0021 * q_chw_W + 2757 # assuming the same for double and triple effect chillers return w_dot_W # Investment costs def calc_Cinv_ACH(Q_nom_W, Absorption_chiller_cost_data, ACH_type): """ Annualized investment costs for the vapor compressor chiller :type Q_nom_W : float :param Q_nom_W: peak cooling demand in [W] :returns InvCa: annualized chiller investment cost in CHF/a :rtype InvCa: float """ Capex_a_ACH_USD = 0 Opex_fixed_ACH_USD = 0 Capex_ACH_USD = 0 if Q_nom_W > 0: Absorption_chiller_cost_data = Absorption_chiller_cost_data[Absorption_chiller_cost_data['type'] == ACH_type] max_chiller_size = max(Absorption_chiller_cost_data['cap_max'].values) Q_nom_W = Absorption_chiller_cost_data['cap_min'].values.min() if Q_nom_W < Absorption_chiller_cost_data[ 'cap_min'].values.min() else Q_nom_W # minimum technology size if Q_nom_W <= max_chiller_size: Absorption_chiller_cost_data = Absorption_chiller_cost_data[ (Absorption_chiller_cost_data['cap_min'] <= Q_nom_W) & ( Absorption_chiller_cost_data[ 'cap_max'] > Q_nom_W)] # keep properties of the associated capacity Inv_a = Absorption_chiller_cost_data.iloc[0]['a'] Inv_b = Absorption_chiller_cost_data.iloc[0]['b'] Inv_c = Absorption_chiller_cost_data.iloc[0]['c'] Inv_d = Absorption_chiller_cost_data.iloc[0]['d'] Inv_e = Absorption_chiller_cost_data.iloc[0]['e'] Inv_IR = Absorption_chiller_cost_data.iloc[0]['IR_%'] Inv_LT = Absorption_chiller_cost_data.iloc[0]['LT_yr'] Inv_OM = Absorption_chiller_cost_data.iloc[0]['O&M_%'] / 100 InvC = Inv_a + Inv_b * (Q_nom_W) ** Inv_c + (Inv_d + Inv_e * Q_nom_W) * log(Q_nom_W) Capex_a_ACH_USD = calc_capex_annualized(InvC, Inv_IR, Inv_LT) Opex_fixed_ACH_USD = InvC * Inv_OM Capex_ACH_USD = InvC else: number_of_chillers = int(ceil(Q_nom_W / max_chiller_size)) Q_nom_each_chiller = Q_nom_W / number_of_chillers for i in range(number_of_chillers): Absorption_chiller_cost_data = Absorption_chiller_cost_data[ (Absorption_chiller_cost_data['cap_min'] <= Q_nom_each_chiller) & ( Absorption_chiller_cost_data[ 'cap_max'] > Q_nom_each_chiller)] # keep properties of the associated capacity Inv_a = Absorption_chiller_cost_data.iloc[0]['a'] Inv_b = Absorption_chiller_cost_data.iloc[0]['b'] Inv_c = Absorption_chiller_cost_data.iloc[0]['c'] Inv_d = Absorption_chiller_cost_data.iloc[0]['d'] Inv_e = Absorption_chiller_cost_data.iloc[0]['e'] Inv_IR = Absorption_chiller_cost_data.iloc[0]['IR_%'] Inv_LT = Absorption_chiller_cost_data.iloc[0]['LT_yr'] Inv_OM = Absorption_chiller_cost_data.iloc[0]['O&M_%'] / 100 InvC = Inv_a + Inv_b * (Q_nom_each_chiller) ** Inv_c + (Inv_d + Inv_e * Q_nom_each_chiller) * log(Q_nom_each_chiller) Capex_a1 = calc_capex_annualized(InvC, Inv_IR, Inv_LT) Capex_a_ACH_USD = Capex_a_ACH_USD + Capex_a1 Opex_fixed_ACH_USD = Opex_fixed_ACH_USD + InvC * Inv_OM Capex_ACH_USD = Capex_ACH_USD + InvC return Capex_a_ACH_USD, Opex_fixed_ACH_USD, Capex_ACH_USD class AbsorptionChiller(object): __slots__ = ["code", "chiller_prop", "m_cw_kgpers", "m_hw_kgpers", "s_e", "r_e", "s_g", "r_g", "a_e", "e_e", "a_g", "e_g"] def __init__(self, chiller_prop, ACH_type): self.chiller_prop = chiller_prop[chiller_prop['type'] == ACH_type] # copy first row to self for faster lookup (avoid pandas __getitem__ in tight loops) self.code = chiller_prop['code'].values[0] # external flow rate of cooling water at the condenser and absorber self.m_cw_kgpers = chiller_prop['m_cw'].values[0] # external flow rate of hot water at the generator self.m_hw_kgpers = chiller_prop['m_hw'].values[0] self.s_e = chiller_prop['s_e'].values[0]<|fim▁hole|> self.e_e = chiller_prop['e_e'].values[0] self.a_g = chiller_prop['a_g'].values[0] self.e_g = chiller_prop['e_g'].values[0] def update_data(self, chiller_prop): """Due to how AbsorptionChiller is currently used (FIXME: can we fix this?), we somedimes need to update the instance variables from the databaframe chiller_prop. """ if self.code != chiller_prop['code'].values[0]: # only update if new code... # print("Updating chiller_prop data! old code: {0}, new code: {1}".format(self.code, chiller_prop['code'].values[0])) self.code = chiller_prop['code'].values[0] self.m_cw_kgpers = chiller_prop['m_cw'].values[0] self.m_hw_kgpers = chiller_prop['m_hw'].values[0] self.s_e = chiller_prop['s_e'].values[0] self.r_e = chiller_prop['r_e'].values[0] self.s_g = chiller_prop['s_g'].values[0] self.r_g = chiller_prop['r_g'].values[0] self.a_e = chiller_prop['a_e'].values[0] self.e_e = chiller_prop['e_e'].values[0] self.a_g = chiller_prop['a_g'].values[0] self.e_g = chiller_prop['e_g'].values[0] def main(config): """ run the whole preprocessing routine test case 1) q_hw_W = 24213, q_chw_W = 20088, EER = 0.829, T_hw_out_C = 67.22 _[Kuhn, 2011] test case 2) q_hw_W = 824105, q_chw_W = 1163011, EER = 1.41, T_hw_out_C = 165.93 _[Shirazi, 2016] test case 3) q_hw_W = 623379, q_chw_W = 1163430, EER = 1.87, T_hw_out_C = 195.10 _[Shirazi, 2016] ..[Kuhn A., Ozgur-Popanda C., & Ziegler F., 2011] A 10kW Indirectly Fired Absorption Heat Pump: Concepts for a reversible operation. 10th International Heat Pump Conference, 2011. ..[Shirazi A., Taylor R.A., White S.D., Morrison G.L.] A systematic parametric study and feasibility assessment of solar-assisted single-effect, double-effect, and triple-effect absorption chillers for heating and cooling applications. Energy Conversion and Management, 2016 """ locator = cea.inputlocator.InputLocator(scenario=config.scenario) # Input parameters for test cases case_1_dict = {'mdot_chw_kgpers':0.8, 'T_chw_sup_K': 280.0, 'T_chw_re_K': 286.0, 'T_hw_in_C': 84.6, 'ACH_type': 'single'} case_2_dict = {'mdot_chw_kgpers': 39.7, 'T_chw_sup_K': 280.0, 'T_chw_re_K': 287.0, 'T_hw_in_C': 180, 'ACH_type': 'double'} case_3_dict = {'mdot_chw_kgpers': 55.6, 'T_chw_sup_K': 280.0, 'T_chw_re_K': 285.0, 'T_hw_in_C': 210, 'ACH_type': 'triple'} # Unpack parameters case_dict = case_1_dict mdot_chw_kgpers = case_dict['mdot_chw_kgpers'] T_chw_sup_K = case_dict['T_chw_sup_K'] T_chw_re_K = case_dict['T_chw_re_K'] T_hw_in_C = case_dict['T_hw_in_C'] T_ground_K = 300 ach_type = case_dict['ACH_type'] chiller_prop = AbsorptionChiller(pd.read_excel(locator.get_database_conversion_systems(), sheet_name="Absorption_chiller"), ach_type) chiller_operation = calc_chiller_main(mdot_chw_kgpers, T_chw_sup_K, T_chw_re_K, T_hw_in_C, T_ground_K, chiller_prop) print(chiller_operation) print('test_decentralized_buildings_cooling() succeeded. Please doubel check results in the description.') if __name__ == '__main__': main(cea.config.Configuration())<|fim▁end|>
self.r_e = chiller_prop['r_e'].values[0] self.s_g = chiller_prop['s_g'].values[0] self.r_g = chiller_prop['r_g'].values[0] self.a_e = chiller_prop['a_e'].values[0]
<|file_name|>TestG1TraceEagerReclaimHumongousObjects.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2014, 2016, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * @test TestG1TraceEagerReclaimHumongousObjects * @bug 8058801 8048179 * @summary Ensure that the output for a G1TraceEagerReclaimHumongousObjects * includes the expected necessary messages.<|fim▁hole|> */ import jdk.test.lib.ProcessTools; import jdk.test.lib.OutputAnalyzer; import java.util.LinkedList; public class TestG1TraceEagerReclaimHumongousObjects { public static void main(String[] args) throws Exception { ProcessBuilder pb = ProcessTools.createJavaProcessBuilder("-XX:+UseG1GC", "-Xms128M", "-Xmx128M", "-Xmn16M", "-XX:G1HeapRegionSize=1M", "-Xlog:gc+phases=trace,gc+humongous=trace", "-XX:+UnlockExperimentalVMOptions", GCWithHumongousObjectTest.class.getName()); OutputAnalyzer output = new OutputAnalyzer(pb.start()); // As G1ReclaimDeadHumongousObjectsAtYoungGC is set(default), below logs should be displayed. output.shouldContain("Humongous Reclaim"); output.shouldContain("Humongous Total"); output.shouldContain("Humongous Candidate"); output.shouldContain("Humongous Reclaimed"); // As G1TraceReclaimDeadHumongousObjectsAtYoungGC is set and GCWithHumongousObjectTest has humongous objects, // these logs should be displayed. output.shouldContain("Live humongous"); output.shouldContain("Dead humongous region"); output.shouldHaveExitValue(0); } static class GCWithHumongousObjectTest { public static final int M = 1024*1024; public static LinkedList<Object> garbageList = new LinkedList<Object>(); // A large object referenced by a static. static int[] filler = new int[10 * M]; public static void genGarbage() { for (int i = 0; i < 32*1024; i++) { garbageList.add(new int[100]); } garbageList.clear(); } public static void main(String[] args) { int[] large = new int[M]; Object ref = large; System.out.println("Creating garbage"); for (int i = 0; i < 100; i++) { // A large object that will be reclaimed eagerly. large = new int[6*M]; genGarbage(); // Make sure that the compiler cannot completely remove // the allocation of the large object until here. System.out.println(large); } // Keep the reference to the first object alive. System.out.println(ref); System.out.println("Done"); } } }<|fim▁end|>
* @key gc * @library /testlibrary * @modules java.base/sun.misc * java.management
<|file_name|>xrsystem.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at https://mozilla.org/MPL/2.0/. */ use crate::dom::bindings::cell::DomRefCell; use crate::dom::bindings::codegen::Bindings::XRSystemBinding::XRSessionInit; use crate::dom::bindings::codegen::Bindings::XRSystemBinding::{XRSessionMode, XRSystemMethods}; use crate::dom::bindings::conversions::{ConversionResult, FromJSValConvertible}; use crate::dom::bindings::error::Error; use crate::dom::bindings::inheritance::Castable; use crate::dom::bindings::refcounted::{Trusted, TrustedPromise}; use crate::dom::bindings::reflector::{reflect_dom_object, DomObject}; use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom}; use crate::dom::bindings::trace::RootedTraceableBox; use crate::dom::eventtarget::EventTarget; use crate::dom::gamepad::Gamepad; use crate::dom::promise::Promise; use crate::dom::window::Window; use crate::dom::xrsession::XRSession; use crate::dom::xrtest::XRTest; use crate::realms::InRealm; use crate::script_thread::ScriptThread; use crate::task_source::TaskSource; use dom_struct::dom_struct; use ipc_channel::ipc::{self as ipc_crate, IpcReceiver}; use ipc_channel::router::ROUTER; use msg::constellation_msg::PipelineId; use profile_traits::ipc; use servo_config::pref; use std::cell::Cell; use std::rc::Rc; use webxr_api::{Error as XRError, Frame, Session, SessionInit, SessionMode}; #[dom_struct] pub struct XRSystem { eventtarget: EventTarget, gamepads: DomRefCell<Vec<Dom<Gamepad>>>, pending_immersive_session: Cell<bool>, active_immersive_session: MutNullableDom<XRSession>, active_inline_sessions: DomRefCell<Vec<Dom<XRSession>>>, test: MutNullableDom<XRTest>, pipeline: PipelineId, } impl XRSystem { fn new_inherited(pipeline: PipelineId) -> XRSystem { XRSystem { eventtarget: EventTarget::new_inherited(), gamepads: DomRefCell::new(Vec::new()), pending_immersive_session: Cell::new(false), active_immersive_session: Default::default(), active_inline_sessions: DomRefCell::new(Vec::new()), test: Default::default(), pipeline, } } pub fn new(window: &Window) -> DomRoot<XRSystem> { reflect_dom_object( Box::new(XRSystem::new_inherited(window.pipeline_id())), window, ) } pub fn pending_or_active_session(&self) -> bool { self.pending_immersive_session.get() || self.active_immersive_session.get().is_some() } pub fn set_pending(&self) { self.pending_immersive_session.set(true) } pub fn set_active_immersive_session(&self, session: &XRSession) { // XXXManishearth when we support non-immersive (inline) sessions we should // ensure they never reach these codepaths self.pending_immersive_session.set(false); self.active_immersive_session.set(Some(session)) } /// https://immersive-web.github.io/webxr/#ref-for-eventdef-xrsession-end pub fn end_session(&self, session: &XRSession) { // Step 3 if let Some(active) = self.active_immersive_session.get() { if Dom::from_ref(&*active) == Dom::from_ref(session) { self.active_immersive_session.set(None); // Dirty the canvas, since it has been skipping this step whilst in immersive // mode session.dirty_layers(); } } self.active_inline_sessions .borrow_mut() .retain(|sess| Dom::from_ref(&**sess) != Dom::from_ref(session)); } } impl Into<SessionMode> for XRSessionMode { fn into(self) -> SessionMode { match self { XRSessionMode::Immersive_vr => SessionMode::ImmersiveVR, XRSessionMode::Immersive_ar => SessionMode::ImmersiveAR, XRSessionMode::Inline => SessionMode::Inline, } } } impl XRSystemMethods for XRSystem { /// https://immersive-web.github.io/webxr/#dom-xr-issessionsupported fn IsSessionSupported(&self, mode: XRSessionMode) -> Rc<Promise> { // XXXManishearth this should select an XR device first let promise = Promise::new(&self.global()); let mut trusted = Some(TrustedPromise::new(promise.clone())); let global = self.global(); let window = global.as_window(); let (task_source, canceller) = window .task_manager() .dom_manipulation_task_source_with_canceller(); let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); ROUTER.add_route( receiver.to_opaque(), Box::new(move |message| { // router doesn't know this is only called once let trusted = if let Some(trusted) = trusted.take() { trusted } else { error!("supportsSession callback called twice!"); return; }; let message: Result<(), webxr_api::Error> = if let Ok(message) = message.to() { message } else { error!("supportsSession callback given incorrect payload"); return; }; if let Ok(()) = message { let _ = task_source.queue_with_canceller(trusted.resolve_task(true), &canceller); } else { let _ = task_source.queue_with_canceller(trusted.resolve_task(false), &canceller); }; }), ); window .webxr_registry() .supports_session(mode.into(), sender); promise } /// https://immersive-web.github.io/webxr/#dom-xr-requestsession #[allow(unsafe_code)] fn RequestSession( &self, mode: XRSessionMode, init: RootedTraceableBox<XRSessionInit>, comp: InRealm, ) -> Rc<Promise> { let global = self.global(); let window = global.as_window(); let promise = Promise::new_in_current_realm(&global, comp); if mode != XRSessionMode::Inline { if !ScriptThread::is_user_interacting() { if pref!(dom.webxr.unsafe_assume_user_intent) { warn!("The dom.webxr.unsafe-assume-user-intent preference assumes user intent to enter WebXR."); } else { promise.reject_error(Error::Security); return promise; } } if self.pending_or_active_session() { promise.reject_error(Error::InvalidState); return promise; } self.set_pending(); } let mut required_features = vec![]; let mut optional_features = vec![]; let cx = global.get_cx(); // We are supposed to include "viewer" and on immersive devices "local" // by default here, but this is handled directly in requestReferenceSpace() if let Some(ref r) = init.requiredFeatures { for feature in r { unsafe { if let Ok(ConversionResult::Success(s)) = String::from_jsval(*cx, feature.handle(), ()) { required_features.push(s) } else { warn!("Unable to convert required feature to string"); if mode != XRSessionMode::Inline { self.pending_immersive_session.set(false); } promise.reject_error(Error::NotSupported); return promise; } } } } if let Some(ref o) = init.optionalFeatures { for feature in o { unsafe { if let Ok(ConversionResult::Success(s)) = String::from_jsval(*cx, feature.handle(), ()) { optional_features.push(s) } else { warn!("Unable to convert optional feature to string"); } } } } let init = SessionInit { required_features, optional_features, first_person_observer_view: pref!(dom.webxr.first_person_observer_view), }; let mut trusted = Some(TrustedPromise::new(promise.clone())); let this = Trusted::new(self); let (task_source, canceller) = window .task_manager() .dom_manipulation_task_source_with_canceller(); let (sender, receiver) = ipc::channel(global.time_profiler_chan().clone()).unwrap(); let (frame_sender, frame_receiver) = ipc_crate::channel().unwrap(); let mut frame_receiver = Some(frame_receiver); ROUTER.add_route( receiver.to_opaque(), Box::new(move |message| { // router doesn't know this is only called once let trusted = trusted.take().unwrap(); let this = this.clone(); let frame_receiver = frame_receiver.take().unwrap(); let message: Result<Session, webxr_api::Error> = if let Ok(message) = message.to() { message } else { error!("requestSession callback given incorrect payload"); return; }; let _ = task_source.queue_with_canceller( task!(request_session: move || { this.root().session_obtained(message, trusted.root(), mode, frame_receiver); }), &canceller, ); }), ); window .webxr_registry() .request_session(mode.into(), init, sender, frame_sender); promise } // https://github.com/immersive-web/webxr-test-api/blob/master/explainer.md<|fim▁hole|> } } impl XRSystem { fn session_obtained( &self, response: Result<Session, XRError>, promise: Rc<Promise>, mode: XRSessionMode, frame_receiver: IpcReceiver<Frame>, ) { let session = match response { Ok(session) => session, Err(e) => { warn!("Error requesting XR session: {:?}", e); if mode != XRSessionMode::Inline { self.pending_immersive_session.set(false); } promise.reject_error(Error::NotSupported); return; }, }; let session = XRSession::new(&self.global(), session, mode, frame_receiver); if mode == XRSessionMode::Inline { self.active_inline_sessions .borrow_mut() .push(Dom::from_ref(&*session)); } else { self.set_active_immersive_session(&session); } promise.resolve_native(&session); // https://github.com/immersive-web/webxr/issues/961 // This must be called _after_ the promise is resolved session.setup_initial_inputs(); } // https://github.com/immersive-web/navigation/issues/10 pub fn dispatch_sessionavailable(&self) { let xr = Trusted::new(self); let global = self.global(); let window = global.as_window(); window .task_manager() .dom_manipulation_task_source() .queue( task!(fire_sessionavailable_event: move || { // The sessionavailable event indicates user intent to enter an XR session let xr = xr.root(); let interacting = ScriptThread::is_user_interacting(); ScriptThread::set_user_interacting(true); xr.upcast::<EventTarget>().fire_bubbling_event(atom!("sessionavailable")); ScriptThread::set_user_interacting(interacting); }), window.upcast(), ) .unwrap(); } }<|fim▁end|>
fn Test(&self) -> DomRoot<XRTest> { self.test.or_init(|| XRTest::new(&self.global()))
<|file_name|>ifgt_test.cpp<|end_file_name|><|fim▁begin|>#include "gtest/gtest.h" #include "ifgt.hpp" #include "test/support.hpp" namespace fgt { TEST(Ifgt, Reference) { auto source = load_ascii_test_matrix("X.txt"); auto target = load_ascii_test_matrix("Y.txt"); double bandwidth = 0.5; double epsilon = 1e-4; auto expected = direct(source, target, bandwidth); auto actual = ifgt(source, target, bandwidth, epsilon); ASSERT_EQ(expected.size(), actual.size()); EXPECT_LT((expected - actual).array().abs().maxCoeff() / actual.size(), epsilon); } TEST(Ifgt, ClassBased) { auto source = load_ascii_test_matrix("X.txt"); auto target = load_ascii_test_matrix("Y.txt"); double bandwidth = 0.5; double epsilon = 1e-4; auto expected = direct(source, target, bandwidth); auto actual = Ifgt(source, bandwidth, epsilon).compute(target); ASSERT_EQ(expected.size(), actual.size()); EXPECT_LT((expected - actual).array().abs().maxCoeff() / actual.size(), epsilon); } TEST(Ifgt, ChooseParameters) { IfgtParameters params = ifgt_choose_parameters(2, 0.3, 1e-6, 189, 200); EXPECT_EQ(13, params.nclusters); EXPECT_NEAR(1.1151, params.cutoff_radius, 1e-4); } TEST(Ifgt, ChooseTruncationNumber) { size_t truncation_number = ifgt_choose_truncation_number(2, 0.3, 1e-6, 0.1, 200); EXPECT_EQ(9, truncation_number);<|fim▁hole|>} TEST(Ifgt, HighBandwidth) { auto source = load_ascii_test_matrix("X.txt"); auto target = load_ascii_test_matrix("Y.txt"); target = target.array() + 2; double bandwidth = 3.5; double epsilon = 1e-4; auto expected = direct(source, target, bandwidth); auto actual = ifgt(source, target, bandwidth, epsilon); ASSERT_EQ(expected.size(), actual.size()); EXPECT_LT((expected - actual).array().abs().maxCoeff() / actual.size(), epsilon); } TEST(Ifgt, UTM) { auto source = load_ascii_test_matrix("utm.txt"); auto target = source; ASSERT_THROW(ifgt(source, target, 100, 1e-4), ifgt_no_clusters); } TEST(Ifgt, ManyDimensionsManyPoints) { Matrix source = Matrix::Random(10, 60); ASSERT_THROW(Ifgt(source, 0.4, 1e-4), fgt_error); } }<|fim▁end|>
<|file_name|>Download.js<|end_file_name|><|fim▁begin|>var mtd = require('mt-downloader'); var fs = require('fs'); var util = require('util'); var EventEmitter = require('events').EventEmitter; var Download = function() { EventEmitter.call(this); this._reset(); this.url = ''; this.filePath = ''; this.options = {}; this.meta = {}; this._retryOptions = { _nbRetries: 0, maxRetries: 5, retryInterval: 5000 }; }; util.inherits(Download, EventEmitter); Download.prototype._reset = function(first_argument) { this.status = 0; // -3 = destroyed, -2 = stopped, -1 = error, 0 = not started, 1 = started (downloading), 2 = error, retrying, 3 = finished this.error = ''; this.stats = { time: { start: 0, end: 0 }, total: { size: 0, downloaded: 0, completed: 0 }, past: { downloaded: 0 }, present: { downloaded: 0, time: 0, speed: 0 }, future: { remaining: 0, eta: 0 }, threadStatus: { idle: 0, open: 0, closed: 0, failed: 0 } }; }; Download.prototype.setUrl = function(url) { this.url = url; return this; }; Download.prototype.setFilePath = function(filePath) { this.filePath = filePath; return this; }; Download.prototype.setOptions = function(options) { if(!options || options == {}) { return this.options = {}; } // The "options" object will be directly passed to mt-downloader, so we need to conform to his format //To set the total number of download threads this.options.count = options.threadsCount || options.count || 2; //HTTP method this.options.method = options.method || 'GET'; //HTTP port this.options.port = options.port || 80; //If no data is received the download times out. It is measured in seconds. this.options.timeout = options.timeout/1000 || 5; //Control the part of file that needs to be downloaded. this.options.range = options.range || '0-100'; // Support customized header fields this.options.headers = options.headers || {}; return this; }; Download.prototype.setRetryOptions = function(options) { this._retryOptions.maxRetries = options.maxRetries || 5; this._retryOptions.retryInterval = options.retryInterval || 2000; return this; }; Download.prototype.setMeta = function(meta) { this.meta = meta; return this; }; Download.prototype.setStatus = function(status) { this.status = status; return this; }; Download.prototype.setError = function(error) { this.error = error; return this; }; Download.prototype._computeDownloaded = function() { if(!this.meta.threads) { return 0; } var downloaded = 0; this.meta.threads.forEach(function(thread) { downloaded += thread.position - thread.start; }); return downloaded; }; // Should be called on start, set the start timestamp (in seconds) Download.prototype._computeStartTime = function() { this.stats.time.start = Math.floor(Date.now() / 1000); }; // Should be called on end, set the end timestamp (in seconds) Download.prototype._computeEndTime = function() { this.stats.time.end = Math.floor(Date.now() / 1000); }; // Should be called on start, count size already downloaded (eg. resumed download) Download.prototype._computePastDownloaded = function() { this.stats.past.downloaded = this._computeDownloaded(); }; // Should be called on start compute total size Download.prototype._computeTotalSize = function() { var threads = this.meta.threads; if(!threads) { return 0; } this.stats.total.size = threads[threads.length-1].end - threads[0].start; }; Download.prototype._computeStats = function() { this._computeTotalSize(); this._computeTotalDownloaded(); this._computePresentDownloaded(); this._computeTotalCompleted(); this._computeFutureRemaining(); // Only compute those stats when downloading if(this.status == 1) { this._computePresentTime(); this._computePresentSpeed(); this._computeFutureEta(); this._computeThreadStatus(); } }; Download.prototype._computePresentTime = function() { this.stats.present.time = Math.floor(Date.now() / 1000) - this.stats.time.start; }; Download.prototype._computeTotalDownloaded = function() { this.stats.total.downloaded = this._computeDownloaded(); }; Download.prototype._computePresentDownloaded = function() { this.stats.present.downloaded = this.stats.total.downloaded - this.stats.past.downloaded; }; Download.prototype._computeTotalCompleted = function() { this.stats.total.completed = Math.floor((this.stats.total.downloaded) * 1000 / this.stats.total.size) / 10; }; Download.prototype._computeFutureRemaining = function() { this.stats.future.remaining = this.stats.total.size - this.stats.total.downloaded; }; Download.prototype._computePresentSpeed = function() { this.stats.present.speed = this.stats.present.downloaded / this.stats.present.time; }; Download.prototype._computeFutureEta = function() { this.stats.future.eta = this.stats.future.remaining / this.stats.present.speed; }; Download.prototype._computeThreadStatus = function() { var self = this; <|fim▁hole|> failed: 0 }; this.meta.threads.forEach(function(thread) { self.stats.threadStatus[thread.connection]++; }); }; Download.prototype.getStats = function() { if(!this.meta.threads) { return this.stats; } this._computeStats(); return this.stats; }; Download.prototype._destroyThreads = function() { if(this.meta.threads) { this.meta.threads.forEach(function(i){ if(i.destroy) { i.destroy(); } }); } }; Download.prototype.stop = function() { this.setStatus(-2); this._destroyThreads(); this.emit('stopped', this); }; Download.prototype.destroy = function() { var self = this; this._destroyThreads(); this.setStatus(-3); var filePath = this.filePath; var tmpFilePath = filePath; if (!filePath.match(/\.mtd$/)) { tmpFilePath += '.mtd'; } else { filePath = filePath.replace(new RegExp('(.mtd)*$', 'g'), ''); } fs.unlink(filePath, function() { fs.unlink(tmpFilePath, function() { self.emit('destroyed', this); }); }); }; Download.prototype.start = function() { var self = this; self._reset(); self._retryOptions._nbRetries = 0; this.options.onStart = function(meta) { self.setStatus(1); self.setMeta(meta); self.setUrl(meta.url); self._computeStartTime(); self._computePastDownloaded(); self._computeTotalSize(); self.emit('start', self); }; this.options.onEnd = function(err, result) { // If stopped or destroyed, do nothing if(self.status == -2 || self.status == -3) { return; } // If we encountered an error and it's not an "Invalid file path" error, we try to resume download "maxRetries" times if(err && (''+err).indexOf('Invalid file path') == -1 && self._retryOptions._nbRetries < self._retryOptions.maxRetries) { self.setStatus(2); self._retryOptions._nbRetries++; setTimeout(function() { self.resume(); self.emit('retry', self); }, self._retryOptions.retryInterval); // "Invalid file path" or maxRetries reached, emit error } else if(err) { self._computeEndTime(); self.setError(err); self.setStatus(-1); self.emit('error', self); // No error, download ended successfully } else { self._computeEndTime(); self.setStatus(3); self.emit('end', self); } }; this._downloader = new mtd(this.filePath, this.url, this.options); this._downloader.start(); return this; }; Download.prototype.resume = function() { this._reset(); var filePath = this.filePath; if (!filePath.match(/\.mtd$/)) { filePath += '.mtd'; } this._downloader = new mtd(filePath, null, this.options); this._downloader.start(); return this; }; // For backward compatibility, will be removed in next releases Download.prototype.restart = util.deprecate(function() { return this.resume(); }, 'Download `restart()` is deprecated, please use `resume()` instead.'); module.exports = Download;<|fim▁end|>
this.stats.threadStatus = { idle: 0, open: 0, closed: 0,
<|file_name|>stix-text-array.component.ts<|end_file_name|><|fim▁begin|>import { Component, Input } from '@angular/core'; import { FormControl } from '@angular/forms'; @Component({ selector: 'stix-text-array', templateUrl: './stix-text-array.component.html' }) export class StixTextArrayComponent { @Input() public model: any; @Input() public propertyName: any; public addItemToArray(): void { if (!this.model.attributes[this.propertyName]) { this.model.attributes[this.propertyName] = []; } this.model.attributes[this.propertyName].unshift(''); } public removeItemFromArray(index: number): void { this.model.attributes[this.propertyName].splice(index, 1); } public trackByIndex(index: number, obj: any): any { return index; } public makePlaceholder(prop: string) { let retVal = prop.replace(/e?s$/, '');<|fim▁hole|> return word; } return g1.toUpperCase() + g2; }); return retVal; } }<|fim▁end|>
retVal = retVal.replace(/\b([a-z])(\w+)/g, (_, g1, g2) => { let word = g1.concat(g2); if (word === 'and' || word === 'or' || word === 'the') {
<|file_name|>app-routing.module.ts<|end_file_name|><|fim▁begin|>/** * Created by jardiml on 7/05/17. */ import {NgModule} from '@angular/core'; import {Routes, RouterModule, PreloadAllModules} from '@angular/router'; import {CallbackComponent} from './shared/callback/callback.component'; import { PageNotFoundComponent } from './shared/page-not-found/page-not-found.component'; import { LoginComponent } from './core/login/login.component'; import {AuthGuardService} from './shared/auth/auth-guard.service'; // canActivate: [AuthGuardService], const appRoutes: Routes = [ { path: '', redirectTo: '/login', pathMatch: 'full' }, { path: 'login', component: LoginComponent }, { path: 'students', canActivate: [AuthGuardService], loadChildren: './student/student.module#StudentModule' }, { path: 'callback', component: CallbackComponent },<|fim▁hole|> } ]; @NgModule({ imports: [RouterModule.forRoot(appRoutes, {preloadingStrategy: PreloadAllModules})], exports: [RouterModule] }) export class AppRoutingModule { }<|fim▁end|>
{ path: '**', component: PageNotFoundComponent
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django.forms import ModelForm from .models import DistributionRequests <|fim▁hole|>class DistributionRequestForm(ModelForm): class Meta: model = DistributionRequests<|fim▁end|>
<|file_name|>controller.js<|end_file_name|><|fim▁begin|>/** * @author: Alberto Cerqueira * @email: [email protected] */ jQuery.controller = function() { var controllerClass = function() { this.init = (function(){ $("#gravar").click(function(){ _self.gravar(); return false; }); }); this.gravar = (function() { $("#manterEntidade").ajaxSubmit({ url : systemURL, dataType : "json", success : (function(jsonReturn){ var consequence = jsonReturn.consequence; if (consequence == "ERRO") { alert(jsonReturn.message); } else if (consequence == "SUCESSO") { alert(jsonReturn.message + ": " + jsonReturn.dado.toString()); } else if(consequence == "MUITOS_ERROS"){ var mensagem = ['']; jQuery.each(jsonReturn.dado, function(i, dado) { mensagem.push(dado.localizedMessage + "\n"); }); alert(mensagem.join(''));<|fim▁hole|> //location.reload(); }), error : (function(XMLHttpRequest, textStatus, errorThrown){ alert(errorConexao); }) }); }); var _self = this; }; return new controllerClass(); };<|fim▁end|>
}
<|file_name|>Distribute Candies.py<|end_file_name|><|fim▁begin|>class Solution(object): def distributeCandies(self, candies): """ :type candies: List[int] :rtype: int """ result = 0 kind = list(set(candies)) if len(kind) > len(candies)/2: result = len(candies)/2 else: result = len(kind)<|fim▁hole|><|fim▁end|>
return result
<|file_name|>getDoc.js<|end_file_name|><|fim▁begin|>import { check } from "meteor/check"; import processDoc from "./processDoc"; /** * getDoc * fetch repo profile from github and store in RepoData collection * @param {Object} doc - mongo style selector for the doc * @returns {undefined} returns */ function getDoc(options) { check(options, Object); // get repo details const docRepo = ReDoc.Collections.Repos.findOne({ repo: options.repo }); // we need to have a repo<|fim▁hole|> console.log(`redoc/getDocSet: Failed to load repo data for ${options.repo}`); return false; } // TOC item for this doc const tocItem = ReDoc.Collections.TOC.findOne({ alias: options.alias, repo: options.repo }); processDoc({ branch: options.branch, repo: options.repo, alias: options.alias, docRepo, tocItem }); } export default getDoc; export { flushDocCache };<|fim▁end|>
if (!docRepo) {
<|file_name|>lexeme.rs<|end_file_name|><|fim▁begin|>/*** * Enum Lexeme * -> lexeme tokens to be extract from the source code */ #[allow(dead_code)] #[derive(Debug, Clone)] pub enum Lexeme {<|fim▁hole|> Plus, Minus, Multiply, Divide, Modulo, Equals, Comma, SemiColon, LBrace, RBrace, LParen, RParen, LBracket, RBracket, And, Or, Not, Identifier(String), StringLiteral(String), Number(String), }<|fim▁end|>
Let, Out, In, Stdout, Stdin, For, While, Do, If, ElseIf, Else, Continue, Break, Return, True, False, Fn, Greater, GreaterEqual, Less, LessEqual, IsEqual, IsNotEqual,
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>""" Copyright 2016 Jacob C. Wimberley. This file is part of Weathredds. Weathredds is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. Weathredds is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with Weathredds. If not, see <http://www.gnu.org/licenses/>. """ from django.conf.urls import url, include from django.contrib.auth import views as auth_views from . import views from .views import ChangeEvent, ChangeThread urlpatterns = [ url(r'^$', views.home, name='home'), url(r'weathredds/$', views.home, name='home'), url(r'home/$', views.home, name='_home'), url(r'accounts/', include('django.contrib.auth.urls')), url(r'login/$', auth_views.login, {'template_name': 'registration/login.html'}, name='login'), url(r'logout/$', auth_views.logout_then_login, name='logout'), #url(r'^discussions/(\d{8}_\d{4})/(\d{8}_\d{4})/$', views.discussionRange), #url(r'^discussions/$', views.allDiscussions), url(r'extendThread/(\d+)$', views.extendThread, name='extendThread'), url(r'newEvent/$', views.newEvent, name='newEvent'),<|fim▁hole|> url(r'newThread/$', views.newThread, name='newThread'), url(r'newThreadInEvent/(\d+)$', views.newThread, name='newThreadInEvent'), url(r'event/(\d+)$', views.singleEvent, name='singleEvent'), url(r'thread/(\d+)$', views.singleThread, name='singleThread'), url(r'changeEvent/(?P<pk>\d+)$', ChangeEvent.as_view(), name='changeEvent'), url(r'changeThread/(?P<pk>\d+)$', ChangeThread.as_view(), name='changeThread'), url(r'tag/([^,\\\']+)$', views.singleTag, name='singleTag'), url(r'find/$', views.find, name='find'), url(r'async/togglePin$', views.asyncTogglePin, name='togglePin'), url(r'async/toggleTag$', views.asyncToggleTag, name='toggleTag'), url(r'async/toggleFrozen$', views.asyncToggleFrozen, name='toggleFrozen'), url(r'async/threadsForPeriod$', views.asyncThreadsForPeriod, name='threadsForPeriod'), url(r'async/eventsAtTime$', views.asyncEventsAtTime, name='eventsAtTime'), url(r'async/associateEventsWithThread$', views.asyncAssociateEventsWithThread, name='associateEventsWithThread'), ]<|fim▁end|>