prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>proxy.py<|end_file_name|><|fim▁begin|>"""An HTTP proxy that supports IPv6 as well as the HTTP CONNECT method, among other things.""" # Standard libary imports import socket import thread import select __version__ = '0.1.0 Draft 1' BUFFER_LENGTH = 8192 VERSION = 'Python Proxy/{}'.format(__version__) HTTP_VERSION = 'HTTP/1.1' class ConnectionHandler(object): """Handles connections between the HTTP client and HTTP server.""" def __init__(self, connection, _, timeout): self.client = connection self.client_buffer = '' self.timeout = timeout self.target = None method, path, protocol = self.get_base_header() if method == 'CONNECT': self.method_connect(path) else: self.method_others(method, path, protocol) def get_base_header(self): """Return a tuple of (method, path, protocol) from the recieved message.""" while 1: self.client_buffer += self.client.recv(BUFFER_LENGTH) end = self.client_buffer.find('\n') if end != -1: break print '{}'.format(self.client_buffer[:end]) data = (self.client_buffer[:end+1]).split() self.client_buffer = self.client_buffer[end+1:] return data def method_connect(self, path): """Handle HTTP CONNECT messages.""" self._connect_target(path) self.client.send('{http_version} 200 Connection established\n' 'Proxy-agent: {version}\n\n'.format( http_version=HTTP_VERSION, version=VERSION))<|fim▁hole|> """Handle all non-HTTP CONNECT messages.""" path = path[7:] i = path.find('/') host = path[:i] path = path[i:] self._connect_target(host) self.target.send('{method} {path} {protocol}\n{client_buffer}'.format( method=method, path=path, protocol=protocol, client_buffer=self.client_buffer)) self.client_buffer = '' self._read_write() def _connect_target(self, host): """Create a connection to the HTTP server specified by *host*.""" i = host.find(':') if i != -1: port = int(host[i+1:]) host = host[:i] else: port = 80 (soc_family, _, _, _, address) = socket.getaddrinfo(host, port)[0] self.target = socket.socket(soc_family) self.target.connect(address) def _read_write(self): """Read data from client connection and forward to server connection.""" time_out_max = self.timeout/3 socs = [self.client, self.target] count = 0 while 1: count += 1 (recv, _, error) = select.select(socs, [], socs, 3) if error: break if recv: for in_ in recv: data = in_.recv(BUFFER_LENGTH) if in_ is self.client: out = self.target else: out = self.client if data: out.send(data) count = 0 if count == time_out_max: break self.client.close() self.target.close() def start_server(host='localhost', port=8080, ipv_6=False, timeout=60, handler=ConnectionHandler): """Start the HTTP proxy server.""" if ipv_6: soc_type = socket.AF_INET6 else: soc_type = socket.AF_INET soc = socket.socket(soc_type) soc.bind((host, port)) print 'Serving on {0}:{1}.'.format(host, port) soc.listen(0) while 1: thread.start_new_thread(handler, soc.accept()+(timeout,)) if __name__ == '__main__': start_server()<|fim▁end|>
self.client_buffer = '' self._read_write() def method_others(self, method, path, protocol):
<|file_name|>nodeUtils.ts<|end_file_name|><|fim▁begin|>/* MIT License Copyright (c) 2021 Looker Data Sciences, Inc. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ import * as fs from 'fs' import type { ExecSyncOptionsWithStringEncoding } from 'child_process' import { execSync } from 'child_process' import path from 'path' import { warn } from '@looker/sdk-codegen-utils' const utf8: BufferEncoding = 'utf-8' export const utf8Encoding = { encoding: utf8 } /** * Abstraction of reading a file so all refs go to one place * @param filePath name of file * @param encoding character encoding. defaults to utf-8 * @returns {string} */ export const readFileSync = ( filePath: string, encoding: BufferEncoding = utf8 ) => fs.readFileSync(filePath, { encoding: encoding }) export const writeFileSync = ( filePath: string, data: any, encoding: BufferEncoding = utf8 ) => fs.writeFileSync(filePath, data, { encoding: encoding }) export const isDirSync = (filePath: string) => { try { return fs.statSync(filePath).isDirectory() } catch (e: any) { if (e.code === 'ENOENT') { return false } else { throw e } } } const homeToRoost = '../../../' export const getRootPath = () => path.join(__dirname, homeToRoost) export const rootFile = (fileName = '') => path.join(getRootPath(), fileName) /** * Creates the directory if needed, converts content to JSON string, writes file * * @param fileName to write that may include a relative path * @param {object | string} content to convert to a JSON string * @returns name of file written */ export const createJsonFile = ( fileName: string, content: Record<string, unknown> | string ) => { const fullName = rootFile(fileName) const dir = path.dirname(fullName) const data = typeof content === 'string' ? content : JSON.stringify(content) if (!isDirSync(dir)) fs.mkdirSync(dir, { recursive: true }) fs.writeFileSync(fullName, data, utf8Encoding) return fullName } export const isFileSync = (filePath: string) => { try { return fs.statSync(filePath).isFile() } catch (e: any) { if (e.code === 'ENOENT') { return false } else { throw e } } } export const quit = (err?: Error | string) => { if (err) { if (typeof err === 'string') { const message = err err = new Error('Failure') err.message = message } console.error(`Error: ${err.name}, ${err.message}`) console.error(err.stack) process.exit(1) } else { process.exit(0) } return '' // spoof return type for TypeScript to not complain } export const fail = (name: string, message: string) => { const err = new Error(message) err.name = name return quit(err) } export const run = (<|fim▁hole|> warning = false ) => { // https://nodejs.org/api/child_process.html#child_process_child_process_execsync_command_options const options: ExecSyncOptionsWithStringEncoding = { encoding: 'utf8', maxBuffer: 1024 * 2048, timeout: 300 * 1000, windowsHide: true, } try { // const result = await spawnSync(command, args, options) command += ' ' + args.join(' ') return execSync(command, options) } catch (e: any) { if (warning) { warn(errMsg) return '' } else { return quit(errMsg || e) } } }<|fim▁end|>
command: string, args: string[], errMsg?: string,
<|file_name|>ha_myisam.cc<|end_file_name|><|fim▁begin|>/* Copyright (c) 2000, 2012, Oracle and/or its affiliates. All rights reserved. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; version 2 of the License. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA */ #define MYSQL_SERVER 1 #include "sql_priv.h" #include "probes_mysql.h" #include "key.h" // key_copy #include "sql_plugin.h" #include <m_ctype.h> #include <my_bit.h> #include <myisampack.h> #include "ha_myisam.h" #include <stdarg.h> #include "myisamdef.h" #include "rt_index.h" #include "sql_table.h" // tablename_to_filename #include "sql_class.h" // THD #include <algorithm> using std::min; using std::max; ulonglong myisam_recover_options; static ulong opt_myisam_block_size; /* Interface to mysqld, to check system tables supported by SE */ static bool myisam_is_supported_system_table(const char *db, const char *table_name, bool is_sql_layer_system_table); /* bits in myisam_recover_options */ const char *myisam_recover_names[] = { "DEFAULT", "BACKUP", "FORCE", "QUICK", "OFF", NullS}; TYPELIB myisam_recover_typelib= {array_elements(myisam_recover_names)-1,"", myisam_recover_names, NULL}; const char *myisam_stats_method_names[] = {"nulls_unequal", "nulls_equal", "nulls_ignored", NullS}; TYPELIB myisam_stats_method_typelib= { array_elements(myisam_stats_method_names) - 1, "", myisam_stats_method_names, NULL}; static MYSQL_SYSVAR_ULONG(block_size, opt_myisam_block_size, PLUGIN_VAR_NOSYSVAR | PLUGIN_VAR_RQCMDARG, "Block size to be used for MyISAM index pages", NULL, NULL, MI_KEY_BLOCK_LENGTH, MI_MIN_KEY_BLOCK_LENGTH, MI_MAX_KEY_BLOCK_LENGTH, MI_MIN_KEY_BLOCK_LENGTH); static MYSQL_SYSVAR_ULONG(data_pointer_size, myisam_data_pointer_size, PLUGIN_VAR_RQCMDARG, "Default pointer size to be used for MyISAM tables", NULL, NULL, 6, 2, 7, 1); #define MB (1024*1024) static MYSQL_SYSVAR_ULONGLONG(max_sort_file_size, myisam_max_temp_length, PLUGIN_VAR_RQCMDARG, "Don't use the fast sort index method to created " "index if the temporary file would get bigger than this", NULL, NULL, LONG_MAX/MB*MB, 0, MAX_FILE_SIZE, MB); static MYSQL_SYSVAR_SET(recover_options, myisam_recover_options, PLUGIN_VAR_OPCMDARG|PLUGIN_VAR_READONLY, "Syntax: myisam-recover-options[=option[,option...]], where option can be " "DEFAULT, BACKUP, FORCE, QUICK, or OFF", NULL, NULL, 0, &myisam_recover_typelib); static MYSQL_THDVAR_ULONG(repair_threads, PLUGIN_VAR_RQCMDARG, "If larger than 1, when repairing a MyISAM table all indexes will be " "created in parallel, with one thread per index. The value of 1 " "disables parallel repair", NULL, NULL, 1, 1, ULONG_MAX, 1); static MYSQL_THDVAR_ULONGLONG(sort_buffer_size, PLUGIN_VAR_RQCMDARG, "The buffer that is allocated when sorting the index when doing " "a REPAIR or when creating indexes with CREATE INDEX or ALTER TABLE", NULL, NULL, 8192 * 1024, (long) (MIN_SORT_BUFFER + MALLOC_OVERHEAD), SIZE_T_MAX, 1); static MYSQL_SYSVAR_BOOL(use_mmap, opt_myisam_use_mmap, PLUGIN_VAR_NOCMDARG, "Use memory mapping for reading and writing MyISAM tables", NULL, NULL, FALSE); static MYSQL_SYSVAR_ULONGLONG(mmap_size, myisam_mmap_size, PLUGIN_VAR_RQCMDARG|PLUGIN_VAR_READONLY, "Restricts the total memory " "used for memory mapping of MySQL tables", NULL, NULL, SIZE_T_MAX, MEMMAP_EXTRA_MARGIN, SIZE_T_MAX, 1); static MYSQL_THDVAR_ENUM(stats_method, PLUGIN_VAR_RQCMDARG, "Specifies how MyISAM index statistics collection code should " "treat NULLs. Possible values of name are NULLS_UNEQUAL (default " "behavior for 4.1 and later), NULLS_EQUAL (emulate 4.0 behavior), " "and NULLS_IGNORED", NULL, NULL, MI_STATS_METHOD_NULLS_NOT_EQUAL, &myisam_stats_method_typelib); #ifndef DBUG_OFF /** Causes the thread to wait in a spin lock for a query kill signal. This function is used by the test frame work to identify race conditions. The signal is caught and ignored and the thread is not killed. */ static void debug_wait_for_kill(const char *info) { DBUG_ENTER("debug_wait_for_kill"); const char *prev_info; THD *thd; thd= current_thd; prev_info= thd_proc_info(thd, info); while(!thd->killed) my_sleep(1000); DBUG_PRINT("info", ("Exit debug_wait_for_kill")); thd_proc_info(thd, prev_info); DBUG_VOID_RETURN; } #endif /***************************************************************************** ** MyISAM tables *****************************************************************************/ static handler *myisam_create_handler(handlerton *hton, TABLE_SHARE *table, MEM_ROOT *mem_root) { return new (mem_root) ha_myisam(hton, table); } // collect errors printed by mi_check routines static void mi_check_print_msg(MI_CHECK *param, const char* msg_type, const char *fmt, va_list args) { THD* thd = (THD*)param->thd; Protocol *protocol= thd->protocol; size_t length, msg_length; char msgbuf[MI_MAX_MSG_BUF]; char name[NAME_LEN*2+2]; msg_length= my_vsnprintf(msgbuf, sizeof(msgbuf), fmt, args); msgbuf[sizeof(msgbuf) - 1] = 0; // healthy paranoia DBUG_PRINT(msg_type,("message: %s",msgbuf)); if (!thd->vio_ok()) { sql_print_error("%s", msgbuf); return; } if (param->testflag & (T_CREATE_MISSING_KEYS | T_SAFE_REPAIR | T_AUTO_REPAIR)) { my_message(ER_NOT_KEYFILE,msgbuf,MYF(MY_WME)); return; } length=(uint) (strxmov(name, param->db_name,".",param->table_name,NullS) - name); /* TODO: switch from protocol to push_warning here. The main reason we didn't it yet is parallel repair. Due to following trace: mi_check_print_msg/push_warning/sql_alloc/my_pthread_getspecific_ptr. Also we likely need to lock mutex here (in both cases with protocol and push_warning). */ if (param->need_print_msg_lock) mysql_mutex_lock(&param->print_msg_mutex); protocol->prepare_for_resend(); protocol->store(name, length, system_charset_info); protocol->store(param->op_name, system_charset_info); protocol->store(msg_type, system_charset_info); protocol->store(msgbuf, msg_length, system_charset_info); if (protocol->write()) sql_print_error("Failed on my_net_write, writing to stderr instead: %s\n", msgbuf); if (param->need_print_msg_lock) mysql_mutex_unlock(&param->print_msg_mutex); return; } /* Convert TABLE object to MyISAM key and column definition SYNOPSIS table2myisam() table_arg in TABLE object. keydef_out out MyISAM key definition. recinfo_out out MyISAM column definition. records_out out Number of fields. DESCRIPTION This function will allocate and initialize MyISAM key and column definition for further use in mi_create or for a check for underlying table conformance in merge engine. The caller needs to free *recinfo_out after use. Since *recinfo_out and *keydef_out are allocated with a my_multi_malloc, *keydef_out is freed automatically when *recinfo_out is freed. RETURN VALUE 0 OK !0 error code */ int table2myisam(TABLE *table_arg, MI_KEYDEF **keydef_out, MI_COLUMNDEF **recinfo_out, uint *records_out) { uint i, j, recpos, minpos, fieldpos, temp_length, length; enum ha_base_keytype type= HA_KEYTYPE_BINARY; uchar *record; KEY *pos; MI_KEYDEF *keydef; MI_COLUMNDEF *recinfo, *recinfo_pos; HA_KEYSEG *keyseg; TABLE_SHARE *share= table_arg->s; uint options= share->db_options_in_use; DBUG_ENTER("table2myisam"); if (!(my_multi_malloc(MYF(MY_WME), recinfo_out, (share->fields * 2 + 2) * sizeof(MI_COLUMNDEF), keydef_out, share->keys * sizeof(MI_KEYDEF), &keyseg, (share->key_parts + share->keys) * sizeof(HA_KEYSEG), NullS))) DBUG_RETURN(HA_ERR_OUT_OF_MEM); /* purecov: inspected */ keydef= *keydef_out; recinfo= *recinfo_out; pos= table_arg->key_info; for (i= 0; i < share->keys; i++, pos++) { keydef[i].flag= ((uint16) pos->flags & (HA_NOSAME | HA_FULLTEXT | HA_SPATIAL)); keydef[i].key_alg= pos->algorithm == HA_KEY_ALG_UNDEF ? (pos->flags & HA_SPATIAL ? HA_KEY_ALG_RTREE : HA_KEY_ALG_BTREE) : pos->algorithm; keydef[i].block_length= pos->block_size; keydef[i].seg= keyseg; keydef[i].keysegs= pos->user_defined_key_parts; for (j= 0; j < pos->user_defined_key_parts; j++) { Field *field= pos->key_part[j].field; type= field->key_type(); keydef[i].seg[j].flag= pos->key_part[j].key_part_flag; if (options & HA_OPTION_PACK_KEYS || (pos->flags & (HA_PACK_KEY | HA_BINARY_PACK_KEY | HA_SPACE_PACK_USED))) { if (pos->key_part[j].length > 8 && (type == HA_KEYTYPE_TEXT || type == HA_KEYTYPE_NUM || (type == HA_KEYTYPE_BINARY && !field->zero_pack()))) { /* No blobs here */ if (j == 0) keydef[i].flag|= HA_PACK_KEY; if (!(field->flags & ZEROFILL_FLAG) && (field->type() == MYSQL_TYPE_STRING || field->type() == MYSQL_TYPE_VAR_STRING || ((int) (pos->key_part[j].length - field->decimals())) >= 4)) keydef[i].seg[j].flag|= HA_SPACE_PACK; } else if (j == 0 && (!(pos->flags & HA_NOSAME) || pos->key_length > 16)) keydef[i].flag|= HA_BINARY_PACK_KEY; } keydef[i].seg[j].type= (int) type; keydef[i].seg[j].start= pos->key_part[j].offset; keydef[i].seg[j].length= pos->key_part[j].length; keydef[i].seg[j].bit_start= keydef[i].seg[j].bit_end= keydef[i].seg[j].bit_length= 0; keydef[i].seg[j].bit_pos= 0; keydef[i].seg[j].language= field->charset_for_protocol()->number; if (field->real_maybe_null()) { keydef[i].seg[j].null_bit= field->null_bit; keydef[i].seg[j].null_pos= field->null_offset(); } else { keydef[i].seg[j].null_bit= 0; keydef[i].seg[j].null_pos= 0; } if (field->type() == MYSQL_TYPE_BLOB || field->type() == MYSQL_TYPE_GEOMETRY) { keydef[i].seg[j].flag|= HA_BLOB_PART; /* save number of bytes used to pack length */ keydef[i].seg[j].bit_start= (uint) (field->pack_length() - portable_sizeof_char_ptr); } else if (field->type() == MYSQL_TYPE_BIT) { keydef[i].seg[j].bit_length= ((Field_bit *) field)->bit_len; keydef[i].seg[j].bit_start= ((Field_bit *) field)->bit_ofs; keydef[i].seg[j].bit_pos= (uint) (((Field_bit *) field)->bit_ptr - (uchar*) table_arg->record[0]); } } keyseg+= pos->user_defined_key_parts; } if (table_arg->found_next_number_field) keydef[share->next_number_index].flag|= HA_AUTO_KEY; record= table_arg->record[0]; recpos= 0; recinfo_pos= recinfo; while (recpos < (uint) share->reclength) { Field **field, *found= 0; minpos= share->reclength; length= 0; for (field= table_arg->field; *field; field++) { if ((fieldpos= (*field)->offset(record)) >= recpos && fieldpos <= minpos) { /* skip null fields */ if (!(temp_length= (*field)->pack_length_in_rec())) continue; /* Skip null-fields */ if (! found || fieldpos < minpos || (fieldpos == minpos && temp_length < length)) { minpos= fieldpos; found= *field; length= temp_length; } } } DBUG_PRINT("loop", ("found: 0x%lx recpos: %d minpos: %d length: %d", (long) found, recpos, minpos, length)); if (recpos != minpos) { // Reserved space (Null bits?) memset(recinfo_pos, 0, sizeof(*recinfo_pos)); recinfo_pos->type= (int) FIELD_NORMAL; recinfo_pos++->length= (uint16) (minpos - recpos); } if (!found) break; if (found->flags & BLOB_FLAG) recinfo_pos->type= (int) FIELD_BLOB; else if (found->type() == MYSQL_TYPE_VARCHAR) recinfo_pos->type= FIELD_VARCHAR; else if (!(options & HA_OPTION_PACK_RECORD)) recinfo_pos->type= (int) FIELD_NORMAL; else if (found->zero_pack()) recinfo_pos->type= (int) FIELD_SKIP_ZERO; else recinfo_pos->type= (int) ((length <= 3 || (found->flags & ZEROFILL_FLAG)) ? FIELD_NORMAL : found->type() == MYSQL_TYPE_STRING || found->type() == MYSQL_TYPE_VAR_STRING ? FIELD_SKIP_ENDSPACE : FIELD_SKIP_PRESPACE); if (found->real_maybe_null()) { recinfo_pos->null_bit= found->null_bit; recinfo_pos->null_pos= found->null_offset(); } else { recinfo_pos->null_bit= 0; recinfo_pos->null_pos= 0; } (recinfo_pos++)->length= (uint16) length; recpos= minpos + length; DBUG_PRINT("loop", ("length: %d type: %d", recinfo_pos[-1].length,recinfo_pos[-1].type)); } *records_out= (uint) (recinfo_pos - recinfo); DBUG_RETURN(0); } /* Check for underlying table conformance SYNOPSIS check_definition() t1_keyinfo in First table key definition t1_recinfo in First table record definition t1_keys in Number of keys in first table t1_recs in Number of records in first table t2_keyinfo in Second table key definition t2_recinfo in Second table record definition t2_keys in Number of keys in second table t2_recs in Number of records in second table strict in Strict check switch table in handle to the table object DESCRIPTION This function compares two MyISAM definitions. By intention it was done to compare merge table definition against underlying table definition. It may also be used to compare dot-frm and MYI definitions of MyISAM table as well to compare different MyISAM table definitions. For merge table it is not required that number of keys in merge table must exactly match number of keys in underlying table. When calling this function for underlying table conformance check, 'strict' flag must be set to false, and converted merge definition must be passed as t1_*. Otherwise 'strict' flag must be set to 1 and it is not required to pass converted dot-frm definition as t1_*. For compatibility reasons we relax some checks, specifically: - 4.0 (and earlier versions) always set key_alg to 0. - 4.0 (and earlier versions) have the same language for all keysegs. RETURN VALUE 0 - Equal definitions. 1 - Different definitions. TODO - compare FULLTEXT keys; - compare SPATIAL keys; - compare FIELD_SKIP_ZERO which is converted to FIELD_NORMAL correctly (should be corretly detected in table2myisam). */ int check_definition(MI_KEYDEF *t1_keyinfo, MI_COLUMNDEF *t1_recinfo, uint t1_keys, uint t1_recs, MI_KEYDEF *t2_keyinfo, MI_COLUMNDEF *t2_recinfo, uint t2_keys, uint t2_recs, bool strict, TABLE *table_arg) { uint i, j; DBUG_ENTER("check_definition"); my_bool mysql_40_compat= table_arg && table_arg->s->frm_version < FRM_VER_TRUE_VARCHAR; if ((strict ? t1_keys != t2_keys : t1_keys > t2_keys)) { DBUG_PRINT("error", ("Number of keys differs: t1_keys=%u, t2_keys=%u", t1_keys, t2_keys)); DBUG_RETURN(1); } if (t1_recs != t2_recs) { DBUG_PRINT("error", ("Number of recs differs: t1_recs=%u, t2_recs=%u", t1_recs, t2_recs)); DBUG_RETURN(1); } for (i= 0; i < t1_keys; i++) { HA_KEYSEG *t1_keysegs= t1_keyinfo[i].seg; HA_KEYSEG *t2_keysegs= t2_keyinfo[i].seg; if (t1_keyinfo[i].flag & HA_FULLTEXT && t2_keyinfo[i].flag & HA_FULLTEXT) continue; else if (t1_keyinfo[i].flag & HA_FULLTEXT || t2_keyinfo[i].flag & HA_FULLTEXT) { DBUG_PRINT("error", ("Key %d has different definition", i)); DBUG_PRINT("error", ("t1_fulltext= %d, t2_fulltext=%d", test(t1_keyinfo[i].flag & HA_FULLTEXT), test(t2_keyinfo[i].flag & HA_FULLTEXT))); DBUG_RETURN(1); } if (t1_keyinfo[i].flag & HA_SPATIAL && t2_keyinfo[i].flag & HA_SPATIAL) continue; else if (t1_keyinfo[i].flag & HA_SPATIAL || t2_keyinfo[i].flag & HA_SPATIAL) { DBUG_PRINT("error", ("Key %d has different definition", i)); DBUG_PRINT("error", ("t1_spatial= %d, t2_spatial=%d", test(t1_keyinfo[i].flag & HA_SPATIAL), test(t2_keyinfo[i].flag & HA_SPATIAL))); DBUG_RETURN(1); } if ((!mysql_40_compat && t1_keyinfo[i].key_alg != t2_keyinfo[i].key_alg) || t1_keyinfo[i].keysegs != t2_keyinfo[i].keysegs) { DBUG_PRINT("error", ("Key %d has different definition", i)); DBUG_PRINT("error", ("t1_keysegs=%d, t1_key_alg=%d", t1_keyinfo[i].keysegs, t1_keyinfo[i].key_alg)); DBUG_PRINT("error", ("t2_keysegs=%d, t2_key_alg=%d", t2_keyinfo[i].keysegs, t2_keyinfo[i].key_alg)); DBUG_RETURN(1); } for (j= t1_keyinfo[i].keysegs; j--;) { uint8 t1_keysegs_j__type= t1_keysegs[j].type; /* Table migration from 4.1 to 5.1. In 5.1 a *TEXT key part is always HA_KEYTYPE_VARTEXT2. In 4.1 we had only the equivalent of HA_KEYTYPE_VARTEXT1. Since we treat both the same on MyISAM level, we can ignore a mismatch between these types. */ if ((t1_keysegs[j].flag & HA_BLOB_PART) && (t2_keysegs[j].flag & HA_BLOB_PART)) { if ((t1_keysegs_j__type == HA_KEYTYPE_VARTEXT2) && (t2_keysegs[j].type == HA_KEYTYPE_VARTEXT1)) t1_keysegs_j__type= HA_KEYTYPE_VARTEXT1; /* purecov: tested */ else if ((t1_keysegs_j__type == HA_KEYTYPE_VARBINARY2) && (t2_keysegs[j].type == HA_KEYTYPE_VARBINARY1)) t1_keysegs_j__type= HA_KEYTYPE_VARBINARY1; /* purecov: inspected */ } if ((!mysql_40_compat && t1_keysegs[j].language != t2_keysegs[j].language) || t1_keysegs_j__type != t2_keysegs[j].type || t1_keysegs[j].null_bit != t2_keysegs[j].null_bit || t1_keysegs[j].length != t2_keysegs[j].length || t1_keysegs[j].start != t2_keysegs[j].start) { DBUG_PRINT("error", ("Key segment %d (key %d) has different " "definition", j, i)); DBUG_PRINT("error", ("t1_type=%d, t1_language=%d, t1_null_bit=%d, " "t1_length=%d", t1_keysegs[j].type, t1_keysegs[j].language, t1_keysegs[j].null_bit, t1_keysegs[j].length)); DBUG_PRINT("error", ("t2_type=%d, t2_language=%d, t2_null_bit=%d, " "t2_length=%d", t2_keysegs[j].type, t2_keysegs[j].language, t2_keysegs[j].null_bit, t2_keysegs[j].length)); DBUG_RETURN(1); } } } for (i= 0; i < t1_recs; i++) { MI_COLUMNDEF *t1_rec= &t1_recinfo[i]; MI_COLUMNDEF *t2_rec= &t2_recinfo[i]; /* FIELD_SKIP_ZERO can be changed to FIELD_NORMAL in mi_create, see NOTE1 in mi_create.c */ if ((t1_rec->type != t2_rec->type && !(t1_rec->type == (int) FIELD_SKIP_ZERO && t1_rec->length == 1 && t2_rec->type == (int) FIELD_NORMAL)) || t1_rec->length != t2_rec->length || t1_rec->null_bit != t2_rec->null_bit) { DBUG_PRINT("error", ("Field %d has different definition", i)); DBUG_PRINT("error", ("t1_type=%d, t1_length=%d, t1_null_bit=%d", t1_rec->type, t1_rec->length, t1_rec->null_bit)); DBUG_PRINT("error", ("t2_type=%d, t2_length=%d, t2_null_bit=%d", t2_rec->type, t2_rec->length, t2_rec->null_bit)); DBUG_RETURN(1); } } DBUG_RETURN(0); } extern "C" { volatile int *killed_ptr(MI_CHECK *param) { /* In theory Unsafe conversion, but should be ok for now */ return (int*) &(((THD *)(param->thd))->killed); } void mi_check_print_error(MI_CHECK *param, const char *fmt,...) { param->error_printed|=1; param->out_flag|= O_DATA_LOST; va_list args; va_start(args, fmt); mi_check_print_msg(param, "error", fmt, args); va_end(args); } void mi_check_print_info(MI_CHECK *param, const char *fmt,...) { va_list args; va_start(args, fmt); mi_check_print_msg(param, "info", fmt, args); va_end(args); } void mi_check_print_warning(MI_CHECK *param, const char *fmt,...) { param->warning_printed=1; param->out_flag|= O_DATA_LOST; va_list args; va_start(args, fmt); mi_check_print_msg(param, "warning", fmt, args); va_end(args); } /** Report list of threads (and queries) accessing a table, thread_id of a thread that detected corruption, ource file name and line number where this corruption was detected, optional extra information (string). This function is intended to be used when table corruption is detected. @param[in] file MI_INFO object. @param[in] message Optional error message. @param[in] sfile Name of source file. @param[in] sline Line number in source file. @return void */ void _mi_report_crashed(MI_INFO *file, const char *message, const char *sfile, uint sline) { THD *cur_thd; LIST *element; char buf[1024]; mysql_mutex_lock(&file->s->intern_lock); if ((cur_thd= (THD*) file->in_use.data)) sql_print_error("Got an error from thread_id=%lu, %s:%d", cur_thd->thread_id, sfile, sline); else sql_print_error("Got an error from unknown thread, %s:%d", sfile, sline); if (message) sql_print_error("%s", message);<|fim▁hole|> sql_print_error("%s", thd ? thd_security_context(thd, buf, sizeof(buf), 0) : "Unknown thread accessing table"); } mysql_mutex_unlock(&file->s->intern_lock); } } ha_myisam::ha_myisam(handlerton *hton, TABLE_SHARE *table_arg) :handler(hton, table_arg), file(0), int_table_flags(HA_NULL_IN_KEY | HA_CAN_FULLTEXT | HA_CAN_SQL_HANDLER | HA_BINLOG_ROW_CAPABLE | HA_BINLOG_STMT_CAPABLE | HA_DUPLICATE_POS | HA_CAN_INDEX_BLOBS | HA_AUTO_PART_KEY | HA_FILE_BASED | HA_CAN_GEOMETRY | HA_NO_TRANSACTIONS | HA_CAN_INSERT_DELAYED | HA_CAN_BIT_FIELD | HA_CAN_RTREEKEYS | HA_HAS_RECORDS | HA_STATS_RECORDS_IS_EXACT | HA_CAN_REPAIR), can_enable_indexes(1) {} handler *ha_myisam::clone(const char *name, MEM_ROOT *mem_root) { ha_myisam *new_handler= static_cast <ha_myisam *>(handler::clone(name, mem_root)); if (new_handler) new_handler->file->state= file->state; return new_handler; } static const char *ha_myisam_exts[] = { ".MYI", ".MYD", NullS }; const char **ha_myisam::bas_ext() const { return ha_myisam_exts; } /** @brief Check if the given db.tablename is a system table for this SE. @param db Database name to check. @param table_name table name to check. @param is_sql_layer_system_table if the supplied db.table_name is a SQL layer system table. @note Currently, only MYISAM engine supports all the SQL layer system tables, and hence it returns true, when is_sql_layer_system_table is set. @note In case there is a need to define MYISAM specific system database, then please see reference implementation in ha_example.cc. @return @retval TRUE Given db.table_name is supported system table. @retval FALSE Given db.table_name is not a supported system table. */ static bool myisam_is_supported_system_table(const char *db, const char *table_name, bool is_sql_layer_system_table) { // Does MYISAM support "ALL" SQL layer system tables ? if (is_sql_layer_system_table) return true; /* Currently MYISAM does not support any other SE specific system tables. If in future it does, please see ha_example.cc for reference implementation */ return false; } const char *ha_myisam::index_type(uint key_number) { return ((table->key_info[key_number].flags & HA_FULLTEXT) ? "FULLTEXT" : (table->key_info[key_number].flags & HA_SPATIAL) ? "SPATIAL" : (table->key_info[key_number].algorithm == HA_KEY_ALG_RTREE) ? "RTREE" : "BTREE"); } /* Name is here without an extension */ int ha_myisam::open(const char *name, int mode, uint test_if_locked) { MI_KEYDEF *keyinfo; MI_COLUMNDEF *recinfo= 0; uint recs; uint i; /* If the user wants to have memory mapped data files, add an open_flag. Do not memory map temporary tables because they are expected to be inserted and thus extended a lot. Memory mapping is efficient for files that keep their size, but very inefficient for growing files. Using an open_flag instead of calling mi_extra(... HA_EXTRA_MMAP ...) after mi_open() has the advantage that the mapping is not repeated for every open, but just done on the initial open, when the MyISAM share is created. Everytime the server requires to open a new instance of a table it calls this method. We will always supply HA_OPEN_MMAP for a permanent table. However, the MyISAM storage engine will ignore this flag if this is a secondary open of a table that is in use by other threads already (if the MyISAM share exists already). */ if (!(test_if_locked & HA_OPEN_TMP_TABLE) && opt_myisam_use_mmap) test_if_locked|= HA_OPEN_MMAP; if (!(file=mi_open(name, mode, test_if_locked | HA_OPEN_FROM_SQL_LAYER))) return (my_errno ? my_errno : -1); if (!table->s->tmp_table) /* No need to perform a check for tmp table */ { if ((my_errno= table2myisam(table, &keyinfo, &recinfo, &recs))) { /* purecov: begin inspected */ DBUG_PRINT("error", ("Failed to convert TABLE object to MyISAM " "key and column definition")); goto err; /* purecov: end */ } if (check_definition(keyinfo, recinfo, table->s->keys, recs, file->s->keyinfo, file->s->rec, file->s->base.keys, file->s->base.fields, true, table)) { /* purecov: begin inspected */ my_errno= HA_ERR_CRASHED; goto err; /* purecov: end */ } } if (test_if_locked & (HA_OPEN_IGNORE_IF_LOCKED | HA_OPEN_TMP_TABLE)) (void) mi_extra(file, HA_EXTRA_NO_WAIT_LOCK, 0); info(HA_STATUS_NO_LOCK | HA_STATUS_VARIABLE | HA_STATUS_CONST); if (!(test_if_locked & HA_OPEN_WAIT_IF_LOCKED)) (void) mi_extra(file, HA_EXTRA_WAIT_LOCK, 0); if (!table->s->db_record_offset) int_table_flags|=HA_REC_NOT_IN_SEQ; if (file->s->options & (HA_OPTION_CHECKSUM | HA_OPTION_COMPRESS_RECORD)) int_table_flags|=HA_HAS_CHECKSUM; for (i= 0; i < table->s->keys; i++) { plugin_ref parser= table->key_info[i].parser; if (table->key_info[i].flags & HA_USES_PARSER) file->s->keyinfo[i].parser= (struct st_mysql_ftparser *)plugin_decl(parser)->info; table->key_info[i].block_size= file->s->keyinfo[i].block_length; } my_errno= 0; goto end; err: this->close(); end: /* Both recinfo and keydef are allocated by my_multi_malloc(), thus only recinfo must be freed. */ if (recinfo) my_free(recinfo); return my_errno; } int ha_myisam::close(void) { MI_INFO *tmp=file; file=0; return mi_close(tmp); } int ha_myisam::write_row(uchar *buf) { ha_statistic_increment(&SSV::ha_write_count); /* If we have an auto_increment column and we are writing a changed row or a new row, then update the auto_increment value in the record. */ if (table->next_number_field && buf == table->record[0]) { int error; if ((error= update_auto_increment())) return error; } return mi_write(file,buf); } int ha_myisam::check(THD* thd, HA_CHECK_OPT* check_opt) { if (!file) return HA_ADMIN_INTERNAL_ERROR; int error; MI_CHECK param; MYISAM_SHARE* share = file->s; const char *old_proc_info=thd->proc_info; thd_proc_info(thd, "Checking table"); myisamchk_init(&param); param.thd = thd; param.op_name = "check"; param.db_name= table->s->db.str; param.table_name= table->alias; param.testflag = check_opt->flags | T_CHECK | T_SILENT; param.stats_method= (enum_mi_stats_method)THDVAR(thd, stats_method); if (!(table->db_stat & HA_READ_ONLY)) param.testflag|= T_STATISTICS; param.using_global_keycache = 1; if (!mi_is_crashed(file) && (((param.testflag & T_CHECK_ONLY_CHANGED) && !(share->state.changed & (STATE_CHANGED | STATE_CRASHED | STATE_CRASHED_ON_REPAIR)) && share->state.open_count == 0) || ((param.testflag & T_FAST) && (share->state.open_count == (uint) (share->global_changed ? 1 : 0))))) return HA_ADMIN_ALREADY_DONE; error = chk_status(&param, file); // Not fatal error = chk_size(&param, file); if (!error) error |= chk_del(&param, file, param.testflag); if (!error) error = chk_key(&param, file); if (!error) { if ((!(param.testflag & T_QUICK) && ((share->options & (HA_OPTION_PACK_RECORD | HA_OPTION_COMPRESS_RECORD)) || (param.testflag & (T_EXTEND | T_MEDIUM)))) || mi_is_crashed(file)) { uint old_testflag=param.testflag; param.testflag|=T_MEDIUM; if (!(error= init_io_cache(&param.read_cache, file->dfile, my_default_record_cache_size, READ_CACHE, share->pack.header_length, 1, MYF(MY_WME)))) { error= chk_data_link(&param, file, param.testflag & T_EXTEND); end_io_cache(&(param.read_cache)); } param.testflag= old_testflag; } } if (!error) { if ((share->state.changed & (STATE_CHANGED | STATE_CRASHED_ON_REPAIR | STATE_CRASHED | STATE_NOT_ANALYZED)) || (param.testflag & T_STATISTICS) || mi_is_crashed(file)) { file->update|=HA_STATE_CHANGED | HA_STATE_ROW_CHANGED; mysql_mutex_lock(&share->intern_lock); share->state.changed&= ~(STATE_CHANGED | STATE_CRASHED | STATE_CRASHED_ON_REPAIR); if (!(table->db_stat & HA_READ_ONLY)) error=update_state_info(&param,file,UPDATE_TIME | UPDATE_OPEN_COUNT | UPDATE_STAT); mysql_mutex_unlock(&share->intern_lock); info(HA_STATUS_NO_LOCK | HA_STATUS_TIME | HA_STATUS_VARIABLE | HA_STATUS_CONST); } } else if (!mi_is_crashed(file) && !thd->killed) { mi_mark_crashed(file); file->update |= HA_STATE_CHANGED | HA_STATE_ROW_CHANGED; } thd_proc_info(thd, old_proc_info); return error ? HA_ADMIN_CORRUPT : HA_ADMIN_OK; } /* analyze the key distribution in the table As the table may be only locked for read, we have to take into account that two threads may do an analyze at the same time! */ int ha_myisam::analyze(THD *thd, HA_CHECK_OPT* check_opt) { int error=0; MI_CHECK param; MYISAM_SHARE* share = file->s; myisamchk_init(&param); param.thd = thd; param.op_name= "analyze"; param.db_name= table->s->db.str; param.table_name= table->alias; param.testflag= (T_FAST | T_CHECK | T_SILENT | T_STATISTICS | T_DONT_CHECK_CHECKSUM); param.using_global_keycache = 1; param.stats_method= (enum_mi_stats_method)THDVAR(thd, stats_method); if (!(share->state.changed & STATE_NOT_ANALYZED)) return HA_ADMIN_ALREADY_DONE; error = chk_key(&param, file); if (!error) { mysql_mutex_lock(&share->intern_lock); error=update_state_info(&param,file,UPDATE_STAT); mysql_mutex_unlock(&share->intern_lock); } else if (!mi_is_crashed(file) && !thd->killed) mi_mark_crashed(file); return error ? HA_ADMIN_CORRUPT : HA_ADMIN_OK; } int ha_myisam::repair(THD* thd, HA_CHECK_OPT *check_opt) { int error; MI_CHECK param; ha_rows start_records; if (!file) return HA_ADMIN_INTERNAL_ERROR; myisamchk_init(&param); param.thd = thd; param.op_name= "repair"; param.testflag= ((check_opt->flags & ~(T_EXTEND)) | T_SILENT | T_FORCE_CREATE | T_CALC_CHECKSUM | (check_opt->flags & T_EXTEND ? T_REP : T_REP_BY_SORT)); param.sort_buffer_length= THDVAR(thd, sort_buffer_size); start_records=file->state->records; while ((error=repair(thd,param,0)) && param.retry_repair) { param.retry_repair=0; if (test_all_bits(param.testflag, (uint) (T_RETRY_WITHOUT_QUICK | T_QUICK))) { param.testflag&= ~T_RETRY_WITHOUT_QUICK; sql_print_information("Retrying repair of: '%s' without quick", table->s->path.str); continue; } param.testflag&= ~T_QUICK; if ((param.testflag & T_REP_BY_SORT)) { param.testflag= (param.testflag & ~T_REP_BY_SORT) | T_REP; sql_print_information("Retrying repair of: '%s' with keycache", table->s->path.str); continue; } break; } if (!error && start_records != file->state->records && !(check_opt->flags & T_VERY_SILENT)) { char llbuff[22],llbuff2[22]; sql_print_information("Found %s of %s rows when repairing '%s'", llstr(file->state->records, llbuff), llstr(start_records, llbuff2), table->s->path.str); } return error; } int ha_myisam::optimize(THD* thd, HA_CHECK_OPT *check_opt) { int error; if (!file) return HA_ADMIN_INTERNAL_ERROR; MI_CHECK param; myisamchk_init(&param); param.thd = thd; param.op_name= "optimize"; param.testflag= (check_opt->flags | T_SILENT | T_FORCE_CREATE | T_REP_BY_SORT | T_STATISTICS | T_SORT_INDEX); param.sort_buffer_length= THDVAR(thd, sort_buffer_size); if ((error= repair(thd,param,1)) && param.retry_repair) { sql_print_warning("Warning: Optimize table got errno %d on %s.%s, retrying", my_errno, param.db_name, param.table_name); param.testflag&= ~T_REP_BY_SORT; error= repair(thd,param,1); } return error; } int ha_myisam::repair(THD *thd, MI_CHECK &param, bool do_optimize) { int error=0; uint local_testflag=param.testflag; bool optimize_done= !do_optimize, statistics_done=0; bool has_old_locks= thd->locked_tables_mode || file->lock_type != F_UNLCK; const char *old_proc_info=thd->proc_info; char fixed_name[FN_REFLEN]; MYISAM_SHARE* share = file->s; ha_rows rows= file->state->records; DBUG_ENTER("ha_myisam::repair"); param.db_name= table->s->db.str; param.table_name= table->alias; param.tmpfile_createflag = O_RDWR | O_TRUNC; param.using_global_keycache = 1; param.thd= thd; param.tmpdir= &mysql_tmpdir_list; param.out_flag= 0; strmov(fixed_name,file->filename); // Release latches since this can take a long time ha_release_temporary_latches(thd); // Don't lock tables if we have used LOCK TABLE or already locked. if (!has_old_locks && mi_lock_database(file, table->s->tmp_table ? F_EXTRA_LCK : F_WRLCK)) { char errbuf[MYSYS_STRERROR_SIZE]; mi_check_print_error(&param, ER(ER_CANT_LOCK), my_errno, my_strerror(errbuf, sizeof(errbuf), my_errno)); DBUG_RETURN(HA_ADMIN_FAILED); } if (!do_optimize || ((file->state->del || share->state.split != file->state->records) && (!(param.testflag & T_QUICK) || !(share->state.changed & STATE_NOT_OPTIMIZED_KEYS)))) { ulonglong key_map= ((local_testflag & T_CREATE_MISSING_KEYS) ? mi_get_mask_all_keys_active(share->base.keys) : share->state.key_map); uint testflag=param.testflag; #ifdef HAVE_MMAP bool remap= test(share->file_map); /* mi_repair*() functions family use file I/O even if memory mapping is available. Since mixing mmap I/O and file I/O may cause various artifacts, memory mapping must be disabled. */ if (remap) mi_munmap_file(file); #endif if (mi_test_if_sort_rep(file,file->state->records,key_map,0) && (local_testflag & T_REP_BY_SORT)) { local_testflag|= T_STATISTICS; param.testflag|= T_STATISTICS; // We get this for free statistics_done=1; if (THDVAR(thd, repair_threads)>1) { char buf[40]; /* TODO: respect myisam_repair_threads variable */ my_snprintf(buf, 40, "Repair with %d threads", my_count_bits(key_map)); thd_proc_info(thd, buf); error = mi_repair_parallel(&param, file, fixed_name, param.testflag & T_QUICK); thd_proc_info(thd, "Repair done"); // to reset proc_info, as // it was pointing to local buffer } else { thd_proc_info(thd, "Repair by sorting"); error = mi_repair_by_sort(&param, file, fixed_name, param.testflag & T_QUICK); } } else { thd_proc_info(thd, "Repair with keycache"); param.testflag &= ~T_REP_BY_SORT; error= mi_repair(&param, file, fixed_name, param.testflag & T_QUICK); } #ifdef HAVE_MMAP if (remap) mi_dynmap_file(file, file->state->data_file_length); #endif param.testflag=testflag; optimize_done=1; } if (!error) { if ((local_testflag & T_SORT_INDEX) && (share->state.changed & STATE_NOT_SORTED_PAGES)) { optimize_done=1; thd_proc_info(thd, "Sorting index"); error=mi_sort_index(&param,file,fixed_name); } if (!statistics_done && (local_testflag & T_STATISTICS)) { if (share->state.changed & STATE_NOT_ANALYZED) { optimize_done=1; thd_proc_info(thd, "Analyzing"); error = chk_key(&param, file); } else local_testflag&= ~T_STATISTICS; // Don't update statistics } } thd_proc_info(thd, "Saving state"); if (!error) { if ((share->state.changed & STATE_CHANGED) || mi_is_crashed(file)) { share->state.changed&= ~(STATE_CHANGED | STATE_CRASHED | STATE_CRASHED_ON_REPAIR); file->update|=HA_STATE_CHANGED | HA_STATE_ROW_CHANGED; } /* the following 'if', thought conceptually wrong, is a useful optimization nevertheless. */ if (file->state != &file->s->state.state) file->s->state.state = *file->state; if (file->s->base.auto_key) update_auto_increment_key(&param, file, 1); if (optimize_done) error = update_state_info(&param, file, UPDATE_TIME | UPDATE_OPEN_COUNT | (local_testflag & T_STATISTICS ? UPDATE_STAT : 0)); info(HA_STATUS_NO_LOCK | HA_STATUS_TIME | HA_STATUS_VARIABLE | HA_STATUS_CONST); if (rows != file->state->records && ! (param.testflag & T_VERY_SILENT)) { char llbuff[22],llbuff2[22]; mi_check_print_warning(&param,"Number of rows changed from %s to %s", llstr(rows,llbuff), llstr(file->state->records,llbuff2)); } } else { mi_mark_crashed_on_repair(file); file->update |= HA_STATE_CHANGED | HA_STATE_ROW_CHANGED; update_state_info(&param, file, 0); } thd_proc_info(thd, old_proc_info); if (!has_old_locks) mi_lock_database(file,F_UNLCK); DBUG_RETURN(error ? HA_ADMIN_FAILED : !optimize_done ? HA_ADMIN_ALREADY_DONE : HA_ADMIN_OK); } /* Assign table indexes to a specific key cache. */ int ha_myisam::assign_to_keycache(THD* thd, HA_CHECK_OPT *check_opt) { KEY_CACHE *new_key_cache= check_opt->key_cache; const char *errmsg= 0; int error= HA_ADMIN_OK; ulonglong map; TABLE_LIST *table_list= table->pos_in_table_list; DBUG_ENTER("ha_myisam::assign_to_keycache"); table->keys_in_use_for_query.clear_all(); if (table_list->process_index_hints(table)) DBUG_RETURN(HA_ADMIN_FAILED); map= ~(ulonglong) 0; if (!table->keys_in_use_for_query.is_clear_all()) /* use all keys if there's no list specified by the user through hints */ map= table->keys_in_use_for_query.to_ulonglong(); if ((error= mi_assign_to_key_cache(file, map, new_key_cache))) { char buf[STRING_BUFFER_USUAL_SIZE]; my_snprintf(buf, sizeof(buf), "Failed to flush to index file (errno: %d)", error); errmsg= buf; error= HA_ADMIN_CORRUPT; } if (error != HA_ADMIN_OK) { /* Send error to user */ MI_CHECK param; myisamchk_init(&param); param.thd= thd; param.op_name= "assign_to_keycache"; param.db_name= table->s->db.str; param.table_name= table->s->table_name.str; param.testflag= 0; mi_check_print_error(&param, errmsg); } DBUG_RETURN(error); } /* Preload pages of the index file for a table into the key cache. */ int ha_myisam::preload_keys(THD* thd, HA_CHECK_OPT *check_opt) { int error; const char *errmsg; ulonglong map; TABLE_LIST *table_list= table->pos_in_table_list; my_bool ignore_leaves= table_list->ignore_leaves; char buf[MYSQL_ERRMSG_SIZE]; DBUG_ENTER("ha_myisam::preload_keys"); table->keys_in_use_for_query.clear_all(); if (table_list->process_index_hints(table)) DBUG_RETURN(HA_ADMIN_FAILED); map= ~(ulonglong) 0; /* Check validity of the index references */ if (!table->keys_in_use_for_query.is_clear_all()) /* use all keys if there's no list specified by the user through hints */ map= table->keys_in_use_for_query.to_ulonglong(); mi_extra(file, HA_EXTRA_PRELOAD_BUFFER_SIZE, (void *) &thd->variables.preload_buff_size); if ((error= mi_preload(file, map, ignore_leaves))) { switch (error) { case HA_ERR_NON_UNIQUE_BLOCK_SIZE: errmsg= "Indexes use different block sizes"; break; case HA_ERR_OUT_OF_MEM: errmsg= "Failed to allocate buffer"; break; default: my_snprintf(buf, sizeof(buf), "Failed to read from index file (errno: %d)", my_errno); errmsg= buf; } error= HA_ADMIN_FAILED; goto err; } DBUG_RETURN(HA_ADMIN_OK); err: { MI_CHECK param; myisamchk_init(&param); param.thd= thd; param.op_name= "preload_keys"; param.db_name= table->s->db.str; param.table_name= table->s->table_name.str; param.testflag= 0; mi_check_print_error(&param, errmsg); DBUG_RETURN(error); } } /* Disable indexes, making it persistent if requested. SYNOPSIS disable_indexes() mode mode of operation: HA_KEY_SWITCH_NONUNIQ disable all non-unique keys HA_KEY_SWITCH_ALL disable all keys HA_KEY_SWITCH_NONUNIQ_SAVE dis. non-uni. and make persistent HA_KEY_SWITCH_ALL_SAVE dis. all keys and make persistent IMPLEMENTATION HA_KEY_SWITCH_NONUNIQ is not implemented. HA_KEY_SWITCH_ALL_SAVE is not implemented. RETURN 0 ok HA_ERR_WRONG_COMMAND mode not implemented. */ int ha_myisam::disable_indexes(uint mode) { int error; if (mode == HA_KEY_SWITCH_ALL) { /* call a storage engine function to switch the key map */ error= mi_disable_indexes(file); } else if (mode == HA_KEY_SWITCH_NONUNIQ_SAVE) { mi_extra(file, HA_EXTRA_NO_KEYS, 0); info(HA_STATUS_CONST); // Read new key info error= 0; } else { /* mode not implemented */ error= HA_ERR_WRONG_COMMAND; } return error; } /* Enable indexes, making it persistent if requested. SYNOPSIS enable_indexes() mode mode of operation: HA_KEY_SWITCH_NONUNIQ enable all non-unique keys HA_KEY_SWITCH_ALL enable all keys HA_KEY_SWITCH_NONUNIQ_SAVE en. non-uni. and make persistent HA_KEY_SWITCH_ALL_SAVE en. all keys and make persistent DESCRIPTION Enable indexes, which might have been disabled by disable_index() before. The modes without _SAVE work only if both data and indexes are empty, since the MyISAM repair would enable them persistently. To be sure in these cases, call handler::delete_all_rows() before. IMPLEMENTATION HA_KEY_SWITCH_NONUNIQ is not implemented. HA_KEY_SWITCH_ALL_SAVE is not implemented. RETURN 0 ok !=0 Error, among others: HA_ERR_CRASHED data or index is non-empty. Delete all rows and retry. HA_ERR_WRONG_COMMAND mode not implemented. */ int ha_myisam::enable_indexes(uint mode) { int error; DBUG_EXECUTE_IF("wait_in_enable_indexes", debug_wait_for_kill("wait_in_enable_indexes"); ); if (mi_is_all_keys_active(file->s->state.key_map, file->s->base.keys)) { /* All indexes are enabled already. */ return 0; } if (mode == HA_KEY_SWITCH_ALL) { error= mi_enable_indexes(file); /* Do not try to repair on error, as this could make the enabled state persistent, but mode==HA_KEY_SWITCH_ALL forbids it. */ } else if (mode == HA_KEY_SWITCH_NONUNIQ_SAVE) { THD *thd=current_thd; MI_CHECK param; const char *save_proc_info=thd->proc_info; thd_proc_info(thd, "Creating index"); myisamchk_init(&param); param.op_name= "recreating_index"; param.testflag= (T_SILENT | T_REP_BY_SORT | T_QUICK | T_CREATE_MISSING_KEYS); param.myf_rw&= ~MY_WAIT_IF_FULL; param.sort_buffer_length= THDVAR(thd, sort_buffer_size); param.stats_method= (enum_mi_stats_method)THDVAR(thd, stats_method); param.tmpdir=&mysql_tmpdir_list; if ((error= (repair(thd,param,0) != HA_ADMIN_OK)) && param.retry_repair) { sql_print_warning("Warning: Enabling keys got errno %d on %s.%s, retrying", my_errno, param.db_name, param.table_name); /* Repairing by sort failed. Now try standard repair method. Still we want to fix only index file. If data file corruption was detected (T_RETRY_WITHOUT_QUICK), we shouldn't do much here. Let implicit repair do this job. */ if (!(param.testflag & T_RETRY_WITHOUT_QUICK)) { param.testflag&= ~T_REP_BY_SORT; error= (repair(thd,param,0) != HA_ADMIN_OK); } /* If the standard repair succeeded, clear all error messages which might have been set by the first repair. They can still be seen with SHOW WARNINGS then. */ if (! error) thd->clear_error(); } info(HA_STATUS_CONST); thd_proc_info(thd, save_proc_info); } else { /* mode not implemented */ error= HA_ERR_WRONG_COMMAND; } return error; } /* Test if indexes are disabled. SYNOPSIS indexes_are_disabled() no parameters RETURN 0 indexes are not disabled 1 all indexes are disabled [2 non-unique indexes are disabled - NOT YET IMPLEMENTED] */ int ha_myisam::indexes_are_disabled(void) { return mi_indexes_are_disabled(file); } /* prepare for a many-rows insert operation e.g. - disable indexes (if they can be recreated fast) or activate special bulk-insert optimizations SYNOPSIS start_bulk_insert(rows) rows Rows to be inserted 0 if we don't know NOTICE Do not forget to call end_bulk_insert() later! */ void ha_myisam::start_bulk_insert(ha_rows rows) { DBUG_ENTER("ha_myisam::start_bulk_insert"); THD *thd= current_thd; ulong size= min(thd->variables.read_buff_size, (ulong) (table->s->avg_row_length*rows)); DBUG_PRINT("info",("start_bulk_insert: rows %lu size %lu", (ulong) rows, size)); /* don't enable row cache if too few rows */ if (! rows || (rows > MI_MIN_ROWS_TO_USE_WRITE_CACHE)) mi_extra(file, HA_EXTRA_WRITE_CACHE, (void*) &size); can_enable_indexes= mi_is_all_keys_active(file->s->state.key_map, file->s->base.keys); /* Only disable old index if the table was empty and we are inserting a lot of rows. Note that in end_bulk_insert() we may truncate the table if enable_indexes() failed, thus it's essential that indexes are disabled ONLY for an empty table. */ if (file->state->records == 0 && can_enable_indexes && (!rows || rows >= MI_MIN_ROWS_TO_DISABLE_INDEXES)) mi_disable_non_unique_index(file,rows); else if (!file->bulk_insert && (!rows || rows >= MI_MIN_ROWS_TO_USE_BULK_INSERT)) { mi_init_bulk_insert(file, thd->variables.bulk_insert_buff_size, rows); } DBUG_VOID_RETURN; } /* end special bulk-insert optimizations, which have been activated by start_bulk_insert(). SYNOPSIS end_bulk_insert() no arguments RETURN 0 OK != 0 Error */ int ha_myisam::end_bulk_insert() { mi_end_bulk_insert(file); int err=mi_extra(file, HA_EXTRA_NO_CACHE, 0); if (!err) { if (can_enable_indexes) { /* Truncate the table when enable index operation is killed. After truncating the table we don't need to enable the indexes, because the last repair operation is aborted after setting the indexes as active and trying to recreate them. */ if (((err= enable_indexes(HA_KEY_SWITCH_NONUNIQ_SAVE)) != 0) && current_thd->killed) { delete_all_rows(); /* not crashed, despite being killed during repair */ file->s->state.changed&= ~(STATE_CRASHED|STATE_CRASHED_ON_REPAIR); } } } return err; } bool ha_myisam::check_and_repair(THD *thd) { int error=0; int marked_crashed; HA_CHECK_OPT check_opt; DBUG_ENTER("ha_myisam::check_and_repair"); check_opt.init(); check_opt.flags= T_MEDIUM | T_AUTO_REPAIR; // Don't use quick if deleted rows if (!file->state->del && (myisam_recover_options & HA_RECOVER_QUICK)) check_opt.flags|=T_QUICK; sql_print_warning("Checking table: '%s'",table->s->path.str); const CSET_STRING query_backup= thd->query_string; thd->set_query(table->s->table_name.str, (uint) table->s->table_name.length, system_charset_info); if ((marked_crashed= mi_is_crashed(file)) || check(thd, &check_opt)) { sql_print_warning("Recovering table: '%s'",table->s->path.str); check_opt.flags= ((myisam_recover_options & HA_RECOVER_BACKUP ? T_BACKUP_DATA : 0) | (marked_crashed ? 0 : T_QUICK) | (myisam_recover_options & HA_RECOVER_FORCE ? 0 : T_SAFE_REPAIR) | T_AUTO_REPAIR); if (repair(thd, &check_opt)) error=1; } thd->set_query(query_backup); DBUG_RETURN(error); } bool ha_myisam::is_crashed() const { return (file->s->state.changed & STATE_CRASHED || (my_disable_locking && file->s->state.open_count)); } int ha_myisam::update_row(const uchar *old_data, uchar *new_data) { ha_statistic_increment(&SSV::ha_update_count); return mi_update(file,old_data,new_data); } int ha_myisam::delete_row(const uchar *buf) { ha_statistic_increment(&SSV::ha_delete_count); return mi_delete(file,buf); } C_MODE_START ICP_RESULT index_cond_func_myisam(void *arg) { ha_myisam *h= (ha_myisam*)arg; if (h->end_range && h->compare_key_icp(h->end_range) > 0) return ICP_OUT_OF_RANGE; /* caller should return HA_ERR_END_OF_FILE already */ return (ICP_RESULT) test(h->pushed_idx_cond->val_int()); } C_MODE_END int ha_myisam::index_init(uint idx, bool sorted) { active_index=idx; if (pushed_idx_cond_keyno == idx) mi_set_index_cond_func(file, index_cond_func_myisam, this); return 0; } int ha_myisam::index_end() { active_index=MAX_KEY; //pushed_idx_cond_keyno= MAX_KEY; mi_set_index_cond_func(file, NULL, 0); in_range_check_pushed_down= FALSE; ds_mrr.dsmrr_close(); return 0; } int ha_myisam::rnd_end() { ds_mrr.dsmrr_close(); return 0; } int ha_myisam::index_read_map(uchar *buf, const uchar *key, key_part_map keypart_map, enum ha_rkey_function find_flag) { MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); DBUG_ASSERT(inited==INDEX); ha_statistic_increment(&SSV::ha_read_key_count); int error=mi_rkey(file, buf, active_index, key, keypart_map, find_flag); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::index_read_idx_map(uchar *buf, uint index, const uchar *key, key_part_map keypart_map, enum ha_rkey_function find_flag) { DBUG_ASSERT(pushed_idx_cond == NULL); DBUG_ASSERT(pushed_idx_cond_keyno == MAX_KEY); MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); ha_statistic_increment(&SSV::ha_read_key_count); int error=mi_rkey(file, buf, index, key, keypart_map, find_flag); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::index_read_last_map(uchar *buf, const uchar *key, key_part_map keypart_map) { MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); DBUG_ENTER("ha_myisam::index_read_last"); DBUG_ASSERT(inited==INDEX); ha_statistic_increment(&SSV::ha_read_key_count); int error=mi_rkey(file, buf, active_index, key, keypart_map, HA_READ_PREFIX_LAST); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); DBUG_RETURN(error); } int ha_myisam::index_next(uchar *buf) { MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); DBUG_ASSERT(inited==INDEX); ha_statistic_increment(&SSV::ha_read_next_count); int error=mi_rnext(file,buf,active_index); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::index_prev(uchar *buf) { MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); DBUG_ASSERT(inited==INDEX); ha_statistic_increment(&SSV::ha_read_prev_count); int error=mi_rprev(file,buf, active_index); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::index_first(uchar *buf) { MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); DBUG_ASSERT(inited==INDEX); ha_statistic_increment(&SSV::ha_read_first_count); int error=mi_rfirst(file, buf, active_index); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::index_last(uchar *buf) { MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); DBUG_ASSERT(inited==INDEX); ha_statistic_increment(&SSV::ha_read_last_count); int error=mi_rlast(file, buf, active_index); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::index_next_same(uchar *buf, const uchar *key __attribute__((unused)), uint length __attribute__((unused))) { int error; DBUG_ASSERT(inited==INDEX); MYSQL_INDEX_READ_ROW_START(table_share->db.str, table_share->table_name.str); ha_statistic_increment(&SSV::ha_read_next_count); do { error= mi_rnext_same(file,buf); } while (error == HA_ERR_RECORD_DELETED); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_INDEX_READ_ROW_DONE(error); return error; } int ha_myisam::rnd_init(bool scan) { if (scan) return mi_scan_init(file); return mi_reset(file); // Free buffers } int ha_myisam::rnd_next(uchar *buf) { MYSQL_READ_ROW_START(table_share->db.str, table_share->table_name.str, TRUE); ha_statistic_increment(&SSV::ha_read_rnd_next_count); int error=mi_scan(file, buf); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_READ_ROW_DONE(error); return error; } int ha_myisam::restart_rnd_next(uchar *buf, uchar *pos) { return rnd_pos(buf,pos); } int ha_myisam::rnd_pos(uchar *buf, uchar *pos) { MYSQL_READ_ROW_START(table_share->db.str, table_share->table_name.str, FALSE); ha_statistic_increment(&SSV::ha_read_rnd_count); int error=mi_rrnd(file, buf, my_get_ptr(pos,ref_length)); table->status=error ? STATUS_NOT_FOUND: 0; MYSQL_READ_ROW_DONE(error); return error; } void ha_myisam::position(const uchar *record) { my_off_t row_position= mi_position(file); my_store_ptr(ref, ref_length, row_position); } int ha_myisam::info(uint flag) { MI_ISAMINFO misam_info; char name_buff[FN_REFLEN]; (void) mi_status(file,&misam_info,flag); if (flag & HA_STATUS_VARIABLE) { stats.records= misam_info.records; stats.deleted= misam_info.deleted; stats.data_file_length= misam_info.data_file_length; stats.index_file_length= misam_info.index_file_length; stats.delete_length= misam_info.delete_length; stats.check_time= (ulong) misam_info.check_time; stats.mean_rec_length= misam_info.mean_reclength; } if (flag & HA_STATUS_CONST) { TABLE_SHARE *share= table->s; stats.max_data_file_length= misam_info.max_data_file_length; stats.max_index_file_length= misam_info.max_index_file_length; stats.create_time= misam_info.create_time; /* We want the value of stats.mrr_length_per_rec to be platform independent. The size of the chunk at the end of the join buffer used for MRR needs is calculated now basing on the values passed in the stats structure. The remaining part of the join buffer is used for records. A different number of records in the buffer results in a different number of buffer refills and in a different order of records in the result set. */ stats.mrr_length_per_rec= misam_info.reflength + 8; // 8=max(sizeof(void *)) ref_length= misam_info.reflength; share->db_options_in_use= misam_info.options; stats.block_size= myisam_block_size; /* record block size */ /* Update share. lock_shared_ha_data is slighly abused here, since there is no other way of locking the TABLE_SHARE. */ lock_shared_ha_data(); share->keys_in_use.set_prefix(share->keys); share->keys_in_use.intersect_extended(misam_info.key_map); share->keys_for_keyread.intersect(share->keys_in_use); share->db_record_offset= misam_info.record_offset; unlock_shared_ha_data(); if (share->key_parts) memcpy((char*) table->key_info[0].rec_per_key, (char*) misam_info.rec_per_key, sizeof(table->key_info[0].rec_per_key[0])*share->key_parts); /* Set data_file_name and index_file_name to point at the symlink value if table is symlinked (Ie; Real name is not same as generated name) */ data_file_name= index_file_name= 0; fn_format(name_buff, file->filename, "", MI_NAME_DEXT, MY_APPEND_EXT | MY_UNPACK_FILENAME); if (strcmp(name_buff, misam_info.data_file_name)) data_file_name=misam_info.data_file_name; fn_format(name_buff, file->filename, "", MI_NAME_IEXT, MY_APPEND_EXT | MY_UNPACK_FILENAME); if (strcmp(name_buff, misam_info.index_file_name)) index_file_name=misam_info.index_file_name; } if (flag & HA_STATUS_ERRKEY) { errkey = misam_info.errkey; my_store_ptr(dup_ref, ref_length, misam_info.dupp_key_pos); } if (flag & HA_STATUS_TIME) stats.update_time = (ulong) misam_info.update_time; if (flag & HA_STATUS_AUTO) stats.auto_increment_value= misam_info.auto_increment; return 0; } int ha_myisam::extra(enum ha_extra_function operation) { if (operation == HA_EXTRA_MMAP && !opt_myisam_use_mmap) return 0; return mi_extra(file, operation, 0); } int ha_myisam::reset(void) { /* Reset MyISAM specific part for index condition pushdown */ DBUG_ASSERT(pushed_idx_cond == NULL); DBUG_ASSERT(pushed_idx_cond_keyno == MAX_KEY); mi_set_index_cond_func(file, NULL, 0); ds_mrr.reset(); return mi_reset(file); } /* To be used with WRITE_CACHE and EXTRA_CACHE */ int ha_myisam::extra_opt(enum ha_extra_function operation, ulong cache_size) { return mi_extra(file, operation, (void*) &cache_size); } int ha_myisam::delete_all_rows() { return mi_delete_all_rows(file); } /* Intended to support partitioning. Allows a particular partition to be truncated. */ int ha_myisam::truncate() { int error= delete_all_rows(); return error ? error : reset_auto_increment(0); } int ha_myisam::reset_auto_increment(ulonglong value) { file->s->state.auto_increment= value; return 0; } int ha_myisam::delete_table(const char *name) { return mi_delete_table(name); } int ha_myisam::external_lock(THD *thd, int lock_type) { file->in_use.data= thd; return mi_lock_database(file, !table->s->tmp_table ? lock_type : ((lock_type == F_UNLCK) ? F_UNLCK : F_EXTRA_LCK)); } THR_LOCK_DATA **ha_myisam::store_lock(THD *thd, THR_LOCK_DATA **to, enum thr_lock_type lock_type) { if (lock_type != TL_IGNORE && file->lock.type == TL_UNLOCK) file->lock.type=lock_type; *to++= &file->lock; return to; } void ha_myisam::update_create_info(HA_CREATE_INFO *create_info) { ha_myisam::info(HA_STATUS_AUTO | HA_STATUS_CONST); if (!(create_info->used_fields & HA_CREATE_USED_AUTO)) { create_info->auto_increment_value= stats.auto_increment_value; } create_info->data_file_name=data_file_name; create_info->index_file_name=index_file_name; } int ha_myisam::create(const char *name, register TABLE *table_arg, HA_CREATE_INFO *ha_create_info) { int error; uint create_flags= 0, records, i; char buff[FN_REFLEN]; MI_KEYDEF *keydef; MI_COLUMNDEF *recinfo; MI_CREATE_INFO create_info; TABLE_SHARE *share= table_arg->s; uint options= share->db_options_in_use; DBUG_ENTER("ha_myisam::create"); for (i= 0; i < share->keys; i++) { if (table_arg->key_info[i].flags & HA_USES_PARSER) { create_flags|= HA_CREATE_RELIES_ON_SQL_LAYER; break; } } if ((error= table2myisam(table_arg, &keydef, &recinfo, &records))) DBUG_RETURN(error); /* purecov: inspected */ memset(&create_info, 0, sizeof(create_info)); create_info.max_rows= share->max_rows; create_info.reloc_rows= share->min_rows; create_info.with_auto_increment= share->next_number_key_offset == 0; create_info.auto_increment= (ha_create_info->auto_increment_value ? ha_create_info->auto_increment_value -1 : (ulonglong) 0); create_info.data_file_length= ((ulonglong) share->max_rows * share->avg_row_length); create_info.language= share->table_charset->number; #ifdef HAVE_READLINK if (my_use_symdir) { create_info.data_file_name= ha_create_info->data_file_name; create_info.index_file_name= ha_create_info->index_file_name; } else #endif /* HAVE_READLINK */ { if (ha_create_info->data_file_name) push_warning_printf(table_arg->in_use, Sql_condition::WARN_LEVEL_WARN, WARN_OPTION_IGNORED, ER(WARN_OPTION_IGNORED), "DATA DIRECTORY"); if (ha_create_info->index_file_name) push_warning_printf(table_arg->in_use, Sql_condition::WARN_LEVEL_WARN, WARN_OPTION_IGNORED, ER(WARN_OPTION_IGNORED), "INDEX DIRECTORY"); } if (ha_create_info->options & HA_LEX_CREATE_TMP_TABLE) create_flags|= HA_CREATE_TMP_TABLE; if (ha_create_info->options & HA_CREATE_KEEP_FILES) create_flags|= HA_CREATE_KEEP_FILES; if (options & HA_OPTION_PACK_RECORD) create_flags|= HA_PACK_RECORD; if (options & HA_OPTION_CHECKSUM) create_flags|= HA_CREATE_CHECKSUM; if (options & HA_OPTION_DELAY_KEY_WRITE) create_flags|= HA_CREATE_DELAY_KEY_WRITE; /* TODO: Check that the following fn_format is really needed */ error= mi_create(fn_format(buff, name, "", "", MY_UNPACK_FILENAME|MY_APPEND_EXT), share->keys, keydef, records, recinfo, 0, (MI_UNIQUEDEF*) 0, &create_info, create_flags); my_free(recinfo); DBUG_RETURN(error); } int ha_myisam::rename_table(const char * from, const char * to) { return mi_rename(from,to); } void ha_myisam::get_auto_increment(ulonglong offset, ulonglong increment, ulonglong nb_desired_values, ulonglong *first_value, ulonglong *nb_reserved_values) { ulonglong nr; int error; uchar key[MI_MAX_KEY_LENGTH]; if (!table->s->next_number_key_offset) { // Autoincrement at key-start ha_myisam::info(HA_STATUS_AUTO); *first_value= stats.auto_increment_value; /* MyISAM has only table-level lock, so reserves to +inf */ *nb_reserved_values= ULONGLONG_MAX; return; } /* it's safe to call the following if bulk_insert isn't on */ mi_flush_bulk_insert(file, table->s->next_number_index); (void) extra(HA_EXTRA_KEYREAD); key_copy(key, table->record[0], table->key_info + table->s->next_number_index, table->s->next_number_key_offset); error= mi_rkey(file, table->record[1], (int) table->s->next_number_index, key, make_prev_keypart_map(table->s->next_number_keypart), HA_READ_PREFIX_LAST); if (error) nr= 1; else { /* Get data from record[1] */ nr= ((ulonglong) table->next_number_field-> val_int_offset(table->s->rec_buff_length)+1); } extra(HA_EXTRA_NO_KEYREAD); *first_value= nr; /* MySQL needs to call us for next row: assume we are inserting ("a",null) here, we return 3, and next this statement will want to insert ("b",null): there is no reason why ("b",3+1) would be the good row to insert: maybe it already exists, maybe 3+1 is too large... */ *nb_reserved_values= 1; } /* Find out how many rows there is in the given range SYNOPSIS records_in_range() inx Index to use min_key Start of range. Null pointer if from first key max_key End of range. Null pointer if to last key NOTES min_key.flag can have one of the following values: HA_READ_KEY_EXACT Include the key in the range HA_READ_AFTER_KEY Don't include key in range max_key.flag can have one of the following values: HA_READ_BEFORE_KEY Don't include key in range HA_READ_AFTER_KEY Include all 'end_key' values in the range RETURN HA_POS_ERROR Something is wrong with the index tree. 0 There is no matching keys in the given range number > 0 There is approximately 'number' matching rows in the range. */ ha_rows ha_myisam::records_in_range(uint inx, key_range *min_key, key_range *max_key) { return (ha_rows) mi_records_in_range(file, (int) inx, min_key, max_key); } int ha_myisam::ft_read(uchar *buf) { int error; if (!ft_handler) return -1; thread_safe_increment(table->in_use->status_var.ha_read_next_count, &LOCK_status); // why ? error=ft_handler->please->read_next(ft_handler,(char*) buf); table->status=error ? STATUS_NOT_FOUND: 0; return error; } uint ha_myisam::checksum() const { return (uint)file->state->checksum; } bool ha_myisam::check_if_incompatible_data(HA_CREATE_INFO *info, uint table_changes) { uint options= table->s->db_options_in_use; if (info->auto_increment_value != stats.auto_increment_value || info->data_file_name != data_file_name || info->index_file_name != index_file_name || table_changes == IS_EQUAL_NO || table_changes & IS_EQUAL_PACK_LENGTH) // Not implemented yet return COMPATIBLE_DATA_NO; if ((options & (HA_OPTION_PACK_RECORD | HA_OPTION_CHECKSUM | HA_OPTION_DELAY_KEY_WRITE)) != (info->table_options & (HA_OPTION_PACK_RECORD | HA_OPTION_CHECKSUM | HA_OPTION_DELAY_KEY_WRITE))) return COMPATIBLE_DATA_NO; return COMPATIBLE_DATA_YES; } extern int mi_panic(enum ha_panic_function flag); int myisam_panic(handlerton *hton, ha_panic_function flag) { return mi_panic(flag); } static int myisam_init(void *p) { handlerton *myisam_hton; #ifdef HAVE_PSI_INTERFACE init_myisam_psi_keys(); #endif /* Set global variables based on startup options */ if (myisam_recover_options) ha_open_options|=HA_OPEN_ABORT_IF_CRASHED; else myisam_recover_options= HA_RECOVER_OFF; myisam_block_size=(uint) 1 << my_bit_log2(opt_myisam_block_size); myisam_hton= (handlerton *)p; myisam_hton->state= SHOW_OPTION_YES; myisam_hton->db_type= DB_TYPE_MYISAM; myisam_hton->create= myisam_create_handler; myisam_hton->panic= myisam_panic; myisam_hton->flags= HTON_CAN_RECREATE | HTON_SUPPORT_LOG_TABLES; myisam_hton->is_supported_system_table= myisam_is_supported_system_table; return 0; } /**************************************************************************** * MyISAM MRR implementation: use DS-MRR ***************************************************************************/ int ha_myisam::multi_range_read_init(RANGE_SEQ_IF *seq, void *seq_init_param, uint n_ranges, uint mode, HANDLER_BUFFER *buf) { return ds_mrr.dsmrr_init(this, seq, seq_init_param, n_ranges, mode, buf); } int ha_myisam::multi_range_read_next(char **range_info) { return ds_mrr.dsmrr_next(range_info); } ha_rows ha_myisam::multi_range_read_info_const(uint keyno, RANGE_SEQ_IF *seq, void *seq_init_param, uint n_ranges, uint *bufsz, uint *flags, Cost_estimate *cost) { /* This call is here because there is no location where this->table would already be known. TODO: consider moving it into some per-query initialization call. */ ds_mrr.init(this, table); return ds_mrr.dsmrr_info_const(keyno, seq, seq_init_param, n_ranges, bufsz, flags, cost); } ha_rows ha_myisam::multi_range_read_info(uint keyno, uint n_ranges, uint keys, uint *bufsz, uint *flags, Cost_estimate *cost) { ds_mrr.init(this, table); return ds_mrr.dsmrr_info(keyno, n_ranges, keys, bufsz, flags, cost); } /* MyISAM MRR implementation ends */ /* Index condition pushdown implementation*/ Item *ha_myisam::idx_cond_push(uint keyno_arg, Item* idx_cond_arg) { /* Check if the key contains a blob field. If it does then MyISAM should not accept the pushed index condition since MyISAM will not read the blob field from the index entry during evaluation of the pushed index condition and the BLOB field might be part of the range evaluation done by the ICP code. */ const KEY *key= &table_share->key_info[keyno_arg]; for (uint k= 0; k < key->user_defined_key_parts; ++k) { const KEY_PART_INFO *key_part= &key->key_part[k]; if (key_part->key_part_flag & HA_BLOB_PART) { /* Let the server handle the index condition */ return idx_cond_arg; } } pushed_idx_cond_keyno= keyno_arg; pushed_idx_cond= idx_cond_arg; in_range_check_pushed_down= TRUE; if (active_index == pushed_idx_cond_keyno) mi_set_index_cond_func(file, index_cond_func_myisam, this); return NULL; } static struct st_mysql_sys_var* myisam_sysvars[]= { MYSQL_SYSVAR(block_size), MYSQL_SYSVAR(data_pointer_size), MYSQL_SYSVAR(max_sort_file_size), MYSQL_SYSVAR(recover_options), MYSQL_SYSVAR(repair_threads), MYSQL_SYSVAR(sort_buffer_size), MYSQL_SYSVAR(use_mmap), MYSQL_SYSVAR(mmap_size), MYSQL_SYSVAR(stats_method), 0 }; struct st_mysql_storage_engine myisam_storage_engine= { MYSQL_HANDLERTON_INTERFACE_VERSION }; mysql_declare_plugin(myisam) { MYSQL_STORAGE_ENGINE_PLUGIN, &myisam_storage_engine, "MyISAM", "MySQL AB", "MyISAM storage engine", PLUGIN_LICENSE_GPL, myisam_init, /* Plugin Init */ NULL, /* Plugin Deinit */ 0x0100, /* 1.0 */ NULL, /* status variables */ myisam_sysvars, /* system variables */ NULL, 0, } mysql_declare_plugin_end; #ifdef HAVE_QUERY_CACHE /** @brief Register a named table with a call back function to the query cache. @param thd The thread handle @param table_key A pointer to the table name in the table cache @param key_length The length of the table name @param[out] engine_callback The pointer to the storage engine call back function, currently 0 @param[out] engine_data Engine data will be set to 0. @note Despite the name of this function, it is used to check each statement before it is cached and not to register a table or callback function. @see handler::register_query_cache_table @return The error code. The engine_data and engine_callback will be set to 0. @retval TRUE Success @retval FALSE An error occured */ my_bool ha_myisam::register_query_cache_table(THD *thd, char *table_name, uint table_name_len, qc_engine_callback *engine_callback, ulonglong *engine_data) { DBUG_ENTER("ha_myisam::register_query_cache_table"); /* No call back function is needed to determine if a cached statement is valid or not. */ *engine_callback= 0; /* No engine data is needed. */ *engine_data= 0; if (file->s->concurrent_insert) { /* If a concurrent INSERT has happened just before the currently processed SELECT statement, the total size of the table is unknown. To determine if the table size is known, the current thread's snap shot of the table size with the actual table size are compared. If the table size is unknown the SELECT statement can't be cached. When concurrent inserts are disabled at table open, mi_open() does not assign a get_status() function. In this case the local ("current") status is never updated. We would wrongly think that we cannot cache the statement. */ ulonglong actual_data_file_length; ulonglong current_data_file_length; /* POSIX visibility rules specify that "2. Whatever memory values a thread can see when it unlocks a mutex <...> can also be seen by any thread that later locks the same mutex". In this particular case, concurrent insert thread had modified the data_file_length in MYISAM_SHARE before it has unlocked (or even locked) structure_guard_mutex. So, here we're guaranteed to see at least that value after we've locked the same mutex. We can see a later value (modified by some other thread) though, but it's ok, as we only want to know if the variable was changed, the actual new value doesn't matter */ actual_data_file_length= file->s->state.state.data_file_length; current_data_file_length= file->save_state.data_file_length; if (current_data_file_length != actual_data_file_length) { /* Don't cache current statement. */ DBUG_RETURN(FALSE); } } /* This query execution might have started after the query cache was flushed by a concurrent INSERT. In this case, don't cache this statement as the data file length difference might not be visible yet if the tables haven't been unlocked by the concurrent insert thread. */ if (file->state->uncacheable) DBUG_RETURN(FALSE); /* It is ok to try to cache current statement. */ DBUG_RETURN(TRUE); } #endif<|fim▁end|>
for (element= file->s->in_use; element; element= list_rest(element)) { THD *thd= (THD*) element->data;
<|file_name|>urls.py<|end_file_name|><|fim▁begin|><|fim▁hole|># You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from django.conf.urls.defaults import * import re urlpatterns = patterns(re.sub(r'[^.]*$', "views", __name__), (r'^$', 'index'), (r'^(?P<admin>admin)/(?P<user>.*?)/$', 'index'), (r'^((?P<event_key>.*?)/)?edit/$', 'edit'), (r'^(?P<ref_key>.*?)/((?P<event_key>.*?)/)?edit/event/$', 'editPureEvent'), #(r'^(?P<location_key>\w+)/update/$', 'update'), # Uncomment this for admin: # (r'^admin/', include('django.contrib.admin.urls')), )<|fim▁end|>
# Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License.
<|file_name|>oc_secret.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # pylint: disable=too-many-lines # ___ ___ _ _ ___ ___ _ _____ ___ ___ # / __| __| \| | __| _ \ /_\_ _| __| \ # | (_ | _|| .` | _|| / / _ \| | | _|| |) | # \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____ # | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _| # | |) | (_) | | .` | (_) || | | _|| |) | | | | # |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_| ''' OpenShiftCLI class that wraps the oc commands in a subprocess ''' # pylint: disable=too-many-lines import atexit import json import os import re import shutil import subprocess import ruamel.yaml as yaml #import yaml # ## This is here because of a bug that causes yaml ## to incorrectly handle timezone info on timestamps #def timestamp_constructor(_, node): # '''return timestamps as strings''' # return str(node.value) #yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor) class OpenShiftCLIError(Exception): '''Exception class for openshiftcli''' pass # pylint: disable=too-few-public-methods class OpenShiftCLI(object): ''' Class to wrap the command line tools ''' def __init__(self, namespace, kubeconfig='/etc/origin/master/admin.kubeconfig', verbose=False, all_namespaces=False): ''' Constructor for OpenshiftCLI ''' self.namespace = namespace self.verbose = verbose self.kubeconfig = kubeconfig self.all_namespaces = all_namespaces # Pylint allows only 5 arguments to be passed. # pylint: disable=too-many-arguments def _replace_content(self, resource, rname, content, force=False, sep='.'): ''' replace the current object with the content ''' res = self._get(resource, rname) if not res['results']: return res fname = '/tmp/%s' % rname yed = Yedit(fname, res['results'][0], separator=sep) changes = [] for key, value in content.items(): changes.append(yed.put(key, value)) if any([change[0] for change in changes]): yed.write() atexit.register(Utils.cleanup, [fname]) return self._replace(fname, force) return {'returncode': 0, 'updated': False} def _replace(self, fname, force=False): '''return all pods ''' cmd = ['-n', self.namespace, 'replace', '-f', fname] if force: cmd.append('--force') return self.openshift_cmd(cmd) def _create_from_content(self, rname, content): '''return all pods ''' fname = '/tmp/%s' % rname yed = Yedit(fname, content=content) yed.write() atexit.register(Utils.cleanup, [fname]) return self._create(fname) def _create(self, fname): '''return all pods ''' return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace]) def _delete(self, resource, rname, selector=None): '''return all pods ''' cmd = ['delete', resource, rname, '-n', self.namespace] if selector: cmd.append('--selector=%s' % selector) return self.openshift_cmd(cmd) def _process(self, template_name, create=False, params=None, template_data=None): '''return all pods ''' cmd = ['process', '-n', self.namespace] if template_data: cmd.extend(['-f', '-']) else: cmd.append(template_name) if params: param_str = ["%s=%s" % (key, value) for key, value in params.items()] cmd.append('-v') cmd.extend(param_str) results = self.openshift_cmd(cmd, output=True, input_data=template_data) if results['returncode'] != 0 or not create: return results fname = '/tmp/%s' % template_name yed = Yedit(fname, results['results']) yed.write() atexit.register(Utils.cleanup, [fname]) return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname]) def _get(self, resource, rname=None, selector=None): '''return a resource by name ''' cmd = ['get', resource] if selector: cmd.append('--selector=%s' % selector) if self.all_namespaces: cmd.extend(['--all-namespaces']) elif self.namespace: cmd.extend(['-n', self.namespace]) cmd.extend(['-o', 'json']) if rname: cmd.append(rname) rval = self.openshift_cmd(cmd, output=True) # Ensure results are retuned in an array if rval.has_key('items'): rval['results'] = rval['items'] elif not isinstance(rval['results'], list): rval['results'] = [rval['results']] return rval def _schedulable(self, node=None, selector=None, schedulable=True): ''' perform oadm manage-node scheduable ''' cmd = ['manage-node'] if node: cmd.extend(node)<|fim▁hole|> return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') def _list_pods(self, node=None, selector=None, pod_selector=None): ''' perform oadm manage-node evacuate ''' cmd = ['manage-node'] if node: cmd.extend(node) else: cmd.append('--selector=%s' % selector) if pod_selector: cmd.append('--pod-selector=%s' % pod_selector) cmd.extend(['--list-pods', '-o', 'json']) return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') #pylint: disable=too-many-arguments def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False): ''' perform oadm manage-node evacuate ''' cmd = ['manage-node'] if node: cmd.extend(node) else: cmd.append('--selector=%s' % selector) if dry_run: cmd.append('--dry-run') if pod_selector: cmd.append('--pod-selector=%s' % pod_selector) if grace_period: cmd.append('--grace-period=%s' % int(grace_period)) if force: cmd.append('--force') cmd.append('--evacuate') return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw') def _import_image(self, url=None, name=None, tag=None): ''' perform image import ''' cmd = ['import-image'] image = '{0}'.format(name) if tag: image += ':{0}'.format(tag) cmd.append(image) if url: cmd.append('--from={0}/{1}'.format(url, image)) cmd.append('-n{0}'.format(self.namespace)) cmd.append('--confirm') return self.openshift_cmd(cmd) #pylint: disable=too-many-arguments def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None): '''Base command for oc ''' cmds = [] if oadm: cmds = ['/usr/bin/oc', 'adm'] else: cmds = ['/usr/bin/oc'] cmds.extend(cmd) rval = {} results = '' err = None if self.verbose: print ' '.join(cmds) proc = subprocess.Popen(cmds, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env={'KUBECONFIG': self.kubeconfig}) stdout, stderr = proc.communicate(input_data) rval = {"returncode": proc.returncode, "results": results, "cmd": ' '.join(cmds), } if proc.returncode == 0: if output: if output_type == 'json': try: rval['results'] = json.loads(stdout) except ValueError as err: if "No JSON object could be decoded" in err.message: err = err.message elif output_type == 'raw': rval['results'] = stdout if self.verbose: print stdout print stderr if err: rval.update({"err": err, "stderr": stderr, "stdout": stdout, "cmd": cmds }) else: rval.update({"stderr": stderr, "stdout": stdout, "results": {}, }) return rval class Utils(object): ''' utilities for openshiftcli modules ''' @staticmethod def create_file(rname, data, ftype='yaml'): ''' create a file in tmp with name and contents''' path = os.path.join('/tmp', rname) with open(path, 'w') as fds: if ftype == 'yaml': fds.write(yaml.dump(data, Dumper=yaml.RoundTripDumper)) elif ftype == 'json': fds.write(json.dumps(data)) else: fds.write(data) # Register cleanup when module is done atexit.register(Utils.cleanup, [path]) return path @staticmethod def create_files_from_contents(content, content_type=None): '''Turn an array of dict: filename, content into a files array''' if not isinstance(content, list): content = [content] files = [] for item in content: path = Utils.create_file(item['path'], item['data'], ftype=content_type) files.append({'name': os.path.basename(path), 'path': path}) return files @staticmethod def cleanup(files): '''Clean up on exit ''' for sfile in files: if os.path.exists(sfile): if os.path.isdir(sfile): shutil.rmtree(sfile) elif os.path.isfile(sfile): os.remove(sfile) @staticmethod def exists(results, _name): ''' Check to see if the results include the name ''' if not results: return False if Utils.find_result(results, _name): return True return False @staticmethod def find_result(results, _name): ''' Find the specified result by name''' rval = None for result in results: if result.has_key('metadata') and result['metadata']['name'] == _name: rval = result break return rval @staticmethod def get_resource_file(sfile, sfile_type='yaml'): ''' return the service file ''' contents = None with open(sfile) as sfd: contents = sfd.read() if sfile_type == 'yaml': contents = yaml.load(contents, yaml.RoundTripLoader) elif sfile_type == 'json': contents = json.loads(contents) return contents # Disabling too-many-branches. This is a yaml dictionary comparison function # pylint: disable=too-many-branches,too-many-return-statements,too-many-statements @staticmethod def check_def_equal(user_def, result_def, skip_keys=None, debug=False): ''' Given a user defined definition, compare it with the results given back by our query. ''' # Currently these values are autogenerated and we do not need to check them skip = ['metadata', 'status'] if skip_keys: skip.extend(skip_keys) for key, value in result_def.items(): if key in skip: continue # Both are lists if isinstance(value, list): if not user_def.has_key(key): if debug: print 'User data does not have key [%s]' % key print 'User data: %s' % user_def return False if not isinstance(user_def[key], list): if debug: print 'user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key]) return False if len(user_def[key]) != len(value): if debug: print "List lengths are not equal." print "key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value)) print "user_def: %s" % user_def[key] print "value: %s" % value return False for values in zip(user_def[key], value): if isinstance(values[0], dict) and isinstance(values[1], dict): if debug: print 'sending list - list' print type(values[0]) print type(values[1]) result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug) if not result: print 'list compare returned false' return False elif value != user_def[key]: if debug: print 'value should be identical' print value print user_def[key] return False # recurse on a dictionary elif isinstance(value, dict): if not user_def.has_key(key): if debug: print "user_def does not have key [%s]" % key return False if not isinstance(user_def[key], dict): if debug: print "dict returned false: not instance of dict" return False # before passing ensure keys match api_values = set(value.keys()) - set(skip) user_values = set(user_def[key].keys()) - set(skip) if api_values != user_values: if debug: print "keys are not equal in dict" print api_values print user_values return False result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug) if not result: if debug: print "dict returned false" print result return False # Verify each key, value pair is the same else: if not user_def.has_key(key) or value != user_def[key]: if debug: print "value not equal; user_def does not have key" print key print value if user_def.has_key(key): print user_def[key] return False if debug: print 'returning true' return True class OpenShiftCLIConfig(object): '''Generic Config''' def __init__(self, rname, namespace, kubeconfig, options): self.kubeconfig = kubeconfig self.name = rname self.namespace = namespace self._options = options @property def config_options(self): ''' return config options ''' return self._options def to_option_list(self): '''return all options as a string''' return self.stringify() def stringify(self): ''' return the options hash as cli params in a string ''' rval = [] for key, data in self.config_options.items(): if data['include'] \ and (data['value'] or isinstance(data['value'], int)): rval.append('--%s=%s' % (key.replace('_', '-'), data['value'])) return rval class YeditException(Exception): ''' Exception class for Yedit ''' pass class Yedit(object): ''' Class to modify yaml files ''' re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$" re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)" com_sep = set(['.', '#', '|', ':']) # pylint: disable=too-many-arguments def __init__(self, filename=None, content=None, content_type='yaml', separator='.', backup=False): self.content = content self._separator = separator self.filename = filename self.__yaml_dict = content self.content_type = content_type self.backup = backup self.load(content_type=self.content_type) if self.__yaml_dict == None: self.__yaml_dict = {} @property def separator(self): ''' getter method for yaml_dict ''' return self._separator @separator.setter def separator(self): ''' getter method for yaml_dict ''' return self._separator @property def yaml_dict(self): ''' getter method for yaml_dict ''' return self.__yaml_dict @yaml_dict.setter def yaml_dict(self, value): ''' setter method for yaml_dict ''' self.__yaml_dict = value @staticmethod def parse_key(key, sep='.'): '''parse the key allowing the appropriate separator''' common_separators = list(Yedit.com_sep - set([sep])) return re.findall(Yedit.re_key % ''.join(common_separators), key) @staticmethod def valid_key(key, sep='.'): '''validate the incoming key''' common_separators = list(Yedit.com_sep - set([sep])) if not re.match(Yedit.re_valid_key % ''.join(common_separators), key): return False return True @staticmethod def remove_entry(data, key, sep='.'): ''' remove data at location key ''' if key == '' and isinstance(data, dict): data.clear() return True elif key == '' and isinstance(data, list): del data[:] return True if not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)): return None key_indexes = Yedit.parse_key(key, sep) for arr_ind, dict_key in key_indexes[:-1]: if dict_key and isinstance(data, dict): data = data.get(dict_key, None) elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1: data = data[int(arr_ind)] else: return None # process last index for remove # expected list entry if key_indexes[-1][0]: if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: del data[int(key_indexes[-1][0])] return True # expected dict entry elif key_indexes[-1][1]: if isinstance(data, dict): del data[key_indexes[-1][1]] return True @staticmethod def add_entry(data, key, item=None, sep='.'): ''' Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a#b return c ''' if key == '': pass elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)): return None key_indexes = Yedit.parse_key(key, sep) for arr_ind, dict_key in key_indexes[:-1]: if dict_key: if isinstance(data, dict) and data.has_key(dict_key) and data[dict_key]: data = data[dict_key] continue elif data and not isinstance(data, dict): raise YeditException("Unexpected item type found while going through key " + "path: {} (at key: {})".format(key, dict_key)) data[dict_key] = {} data = data[dict_key] elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1: data = data[int(arr_ind)] else: raise YeditException("Unexpected item type found while going through key path: {}".format(key)) if key == '': data = item # process last index for add # expected list entry elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1: data[int(key_indexes[-1][0])] = item # expected dict entry elif key_indexes[-1][1] and isinstance(data, dict): data[key_indexes[-1][1]] = item # didn't add/update to an existing list, nor add/update key to a dict # so we must have been provided some syntax like a.b.c[<int>] = "data" for a # non-existent array else: raise YeditException("Error adding data to object at path: {}".format(key)) return data @staticmethod def get_entry(data, key, sep='.'): ''' Get an item from a dictionary with key notation a.b.c d = {'a': {'b': 'c'}}} key = a.b return c ''' if key == '': pass elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)): return None key_indexes = Yedit.parse_key(key, sep) for arr_ind, dict_key in key_indexes: if dict_key and isinstance(data, dict): data = data.get(dict_key, None) elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1: data = data[int(arr_ind)] else: return None return data def write(self): ''' write to file ''' if not self.filename: raise YeditException('Please specify a filename.') if self.backup and self.file_exists(): shutil.copy(self.filename, self.filename + '.orig') tmp_filename = self.filename + '.yedit' try: with open(tmp_filename, 'w') as yfd: # pylint: disable=no-member,maybe-no-member if hasattr(self.yaml_dict, 'fa'): self.yaml_dict.fa.set_block_style() yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper)) except Exception as err: raise YeditException(err.message) os.rename(tmp_filename, self.filename) return (True, self.yaml_dict) def read(self): ''' read from file ''' # check if it exists if self.filename == None or not self.file_exists(): return None contents = None with open(self.filename) as yfd: contents = yfd.read() return contents def file_exists(self): ''' return whether file exists ''' if os.path.exists(self.filename): return True return False def load(self, content_type='yaml'): ''' return yaml file ''' contents = self.read() if not contents and not self.content: return None if self.content: if isinstance(self.content, dict): self.yaml_dict = self.content return self.yaml_dict elif isinstance(self.content, str): contents = self.content # check if it is yaml try: if content_type == 'yaml' and contents: self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader) # pylint: disable=no-member,maybe-no-member if hasattr(self.yaml_dict, 'fa'): self.yaml_dict.fa.set_block_style() elif content_type == 'json' and contents: self.yaml_dict = json.loads(contents) except yaml.YAMLError as err: # Error loading yaml or json raise YeditException('Problem with loading yaml file. %s' % err) return self.yaml_dict def get(self, key): ''' get a specified key''' try: entry = Yedit.get_entry(self.yaml_dict, key, self.separator) except KeyError as _: entry = None return entry def pop(self, path, key_or_item): ''' remove a key, value pair from a dict or an item for a list''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError as _: entry = None if entry == None: return (False, self.yaml_dict) if isinstance(entry, dict): # pylint: disable=no-member,maybe-no-member if entry.has_key(key_or_item): entry.pop(key_or_item) return (True, self.yaml_dict) return (False, self.yaml_dict) elif isinstance(entry, list): # pylint: disable=no-member,maybe-no-member ind = None try: ind = entry.index(key_or_item) except ValueError: return (False, self.yaml_dict) entry.pop(ind) return (True, self.yaml_dict) return (False, self.yaml_dict) def delete(self, path): ''' remove path from a dict''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError as _: entry = None if entry == None: return (False, self.yaml_dict) result = Yedit.remove_entry(self.yaml_dict, path, self.separator) if not result: return (False, self.yaml_dict) return (True, self.yaml_dict) def exists(self, path, value): ''' check if value exists at path''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError as _: entry = None if isinstance(entry, list): if value in entry: return True return False elif isinstance(entry, dict): if isinstance(value, dict): rval = False for key, val in value.items(): if entry[key] != val: rval = False break else: rval = True return rval return value in entry return entry == value def append(self, path, value): '''append value to a list''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError as _: entry = None if entry is None: self.put(path, []) entry = Yedit.get_entry(self.yaml_dict, path, self.separator) if not isinstance(entry, list): return (False, self.yaml_dict) # pylint: disable=no-member,maybe-no-member entry.append(value) return (True, self.yaml_dict) # pylint: disable=too-many-arguments def update(self, path, value, index=None, curr_value=None): ''' put path, value into a dict ''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError as _: entry = None if isinstance(entry, dict): # pylint: disable=no-member,maybe-no-member if not isinstance(value, dict): raise YeditException('Cannot replace key, value entry in dict with non-dict type.' \ ' value=[%s] [%s]' % (value, type(value))) entry.update(value) return (True, self.yaml_dict) elif isinstance(entry, list): # pylint: disable=no-member,maybe-no-member ind = None if curr_value: try: ind = entry.index(curr_value) except ValueError: return (False, self.yaml_dict) elif index != None: ind = index if ind != None and entry[ind] != value: entry[ind] = value return (True, self.yaml_dict) # see if it exists in the list try: ind = entry.index(value) except ValueError: # doesn't exist, append it entry.append(value) return (True, self.yaml_dict) #already exists, return if ind != None: return (False, self.yaml_dict) return (False, self.yaml_dict) def put(self, path, value): ''' put path, value into a dict ''' try: entry = Yedit.get_entry(self.yaml_dict, path, self.separator) except KeyError as _: entry = None if entry == value: return (False, self.yaml_dict) # deepcopy didn't work tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) # pylint: disable=no-member if hasattr(self.yaml_dict, 'fa'): tmp_copy.fa.set_block_style() result = Yedit.add_entry(tmp_copy, path, value, self.separator) if not result: return (False, self.yaml_dict) self.yaml_dict = tmp_copy return (True, self.yaml_dict) def create(self, path, value): ''' create a yaml file ''' if not self.file_exists(): # deepcopy didn't work tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader) # pylint: disable=no-member if hasattr(self.yaml_dict, 'fa'): tmp_copy.fa.set_block_style() result = Yedit.add_entry(tmp_copy, path, value, self.separator) if result: self.yaml_dict = tmp_copy return (True, self.yaml_dict) return (False, self.yaml_dict) import base64 # pylint: disable=too-many-arguments class Secret(OpenShiftCLI): ''' Class to wrap the oc command line tools ''' def __init__(self, namespace, secret_name=None, decode=False, kubeconfig='/etc/origin/master/admin.kubeconfig', verbose=False): ''' Constructor for OpenshiftOC ''' super(Secret, self).__init__(namespace, kubeconfig) self.namespace = namespace self.name = secret_name self.kubeconfig = kubeconfig self.decode = decode self.verbose = verbose def get(self): '''return a secret by name ''' results = self._get('secrets', self.name) results['decoded'] = {} results['exists'] = False if results['returncode'] == 0 and results['results'][0]: results['exists'] = True if self.decode: if results['results'][0].has_key('data'): for sname, value in results['results'][0]['data'].items(): results['decoded'][sname] = base64.decodestring(value) if results['returncode'] != 0 and '"%s" not found' % self.name in results['stderr']: results['returncode'] = 0 return results def delete(self): '''delete a secret by name''' return self._delete('secrets', self.name) def create(self, files=None, contents=None, content_type=None): '''Create a secret ''' if not files: files = Utils.create_files_from_contents(contents, content_type=content_type) secrets = ["%s=%s" % (sfile['name'], sfile['path']) for sfile in files] cmd = ['-n%s' % self.namespace, 'secrets', 'new', self.name] cmd.extend(secrets) return self.openshift_cmd(cmd) def update(self, files, force=False): '''run update secret This receives a list of file names and converts it into a secret. The secret is then written to disk and passed into the `oc replace` command. ''' secret = self.prep_secret(files) if secret['returncode'] != 0: return secret sfile_path = '/tmp/%s' % self.name with open(sfile_path, 'w') as sfd: sfd.write(json.dumps(secret['results'])) atexit.register(Utils.cleanup, [sfile_path]) return self._replace(sfile_path, force=force) def prep_secret(self, files=None, contents=None): ''' return what the secret would look like if created This is accomplished by passing -ojson. This will most likely change in the future ''' if not files: files = Utils.create_files_from_contents(contents) secrets = ["%s=%s" % (sfile['name'], sfile['path']) for sfile in files] cmd = ['-ojson', '-n%s' % self.namespace, 'secrets', 'new', self.name] cmd.extend(secrets) return self.openshift_cmd(cmd, output=True) # pylint: disable=too-many-branches def main(): ''' ansible oc module for secrets ''' module = AnsibleModule( argument_spec=dict( kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'), state=dict(default='present', type='str', choices=['present', 'absent', 'list']), debug=dict(default=False, type='bool'), namespace=dict(default='default', type='str'), name=dict(default=None, type='str'), files=dict(default=None, type='list'), delete_after=dict(default=False, type='bool'), contents=dict(default=None, type='list'), content_type=dict(default='raw', choices=['yaml', 'json', 'raw'], type='str'), force=dict(default=False, type='bool'), decode=dict(default=False, type='bool'), ), mutually_exclusive=[["contents", "files"]], supports_check_mode=True, ) occmd = Secret(module.params['namespace'], module.params['name'], module.params['decode'], kubeconfig=module.params['kubeconfig'], verbose=module.params['debug']) state = module.params['state'] api_rval = occmd.get() ##### # Get ##### if state == 'list': module.exit_json(changed=False, results=api_rval, state="list") if not module.params['name']: module.fail_json(msg='Please specify a name when state is absent|present.') ######## # Delete ######## if state == 'absent': if not Utils.exists(api_rval['results'], module.params['name']): module.exit_json(changed=False, state="absent") if module.check_mode: module.exit_json(changed=False, msg='Would have performed a delete.') api_rval = occmd.delete() module.exit_json(changed=True, results=api_rval, state="absent") if state == 'present': if module.params['files']: files = module.params['files'] elif module.params['contents']: files = Utils.create_files_from_contents(module.params['contents']) else: module.fail_json(msg='Either specify files or contents.') ######## # Create ######## if not Utils.exists(api_rval['results'], module.params['name']): if module.check_mode: module.exit_json(changed=False, msg='Would have performed a create.') api_rval = occmd.create(module.params['files'], module.params['contents']) # Remove files if files and module.params['delete_after']: Utils.cleanup([ftmp['path'] for ftmp in files]) if api_rval['returncode'] != 0: module.fail_json(msg=api_rval) module.exit_json(changed=True, results=api_rval, state="present") ######## # Update ######## secret = occmd.prep_secret(module.params['files'], module.params['contents']) if secret['returncode'] != 0: module.fail_json(msg=secret) if Utils.check_def_equal(secret['results'], api_rval['results'][0]): # Remove files if files and module.params['delete_after']: Utils.cleanup([ftmp['path'] for ftmp in files]) module.exit_json(changed=False, results=secret['results'], state="present") if module.check_mode: module.exit_json(changed=False, msg='Would have performed an update.') api_rval = occmd.update(files, force=module.params['force']) # Remove files if secret and module.params['delete_after']: Utils.cleanup([ftmp['path'] for ftmp in files]) if api_rval['returncode'] != 0: module.fail_json(msg=api_rval) module.exit_json(changed=True, results=api_rval, state="present") module.exit_json(failed=True, changed=False, results='Unknown state passed. %s' % state, state="unknown") # pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled # import module snippets. This are required from ansible.module_utils.basic import * main()<|fim▁end|>
else: cmd.append('--selector=%s' % selector) cmd.append('--schedulable=%s' % schedulable)
<|file_name|>0018_auto_20141219_1711.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): <|fim▁hole|> ] operations = [ migrations.RenameField( model_name='commission', old_name='estimated_price', new_name='estimated_display_price', ), migrations.AlterField( model_name='commission', name='customer', field=models.ForeignKey(related_name='commissions', to='public.Customer'), preserve_default=True, ), ]<|fim▁end|>
dependencies = [ ('public', '0017_auto_20141218_1813'),
<|file_name|>StateGraphVisualizationDirective.js<|end_file_name|><|fim▁begin|>// Copyright 2014 The Oppia Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS-IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.<|fim▁hole|>// limitations under the License. /** * @fileoverview Directive for the state graph visualization. */ /* eslint-disable angular/directive-restrict */ oppia.directive('stateGraphViz', [ 'UrlInterpolationService', function(UrlInterpolationService) { return { // Note: This directive is used as attribute because pannability does not // work when directive is used as element. (Convention in the codebase // is to use directive as element.) restrict: 'A', scope: { allowPanning: '@', centerAtCurrentState: '@', currentStateId: '&', // A function returning an object with these keys: // - 'nodes': An object whose keys are node ids and whose values are // node labels // - 'links': A list of objects with keys: // 'source': id of source node // 'target': id of target node // 'linkProperty': property of link which determines how // it is styled (styles in linkPropertyMapping). If // linkProperty or corresponding linkPropertyMatching // is undefined, link style defaults to the gray arrow. // - 'initStateId': The initial state id // - 'finalStateIds': The list of ids corresponding to terminal states // (i.e., those whose interactions are terminal). graphData: '&', // Object whose keys are ids of nodes to display a warning tooltip over highlightStates: '=', // Id of a second initial state, which will be styled as an initial // state initStateId2: '=', isEditable: '=', // Object which maps linkProperty to a style linkPropertyMapping: '=', // Object whose keys are node ids and whose values are node colors nodeColors: '=', // A value which is the color of all nodes nodeFill: '@', // Object whose keys are node ids with secondary labels and whose // values are secondary labels. If this is undefined, it means no nodes // have secondary labels. nodeSecondaryLabels: '=', // Function called when node is clicked. Should take a parameter // node.id. onClickFunction: '=', onDeleteFunction: '=', onMaximizeFunction: '=', // Object whose keys are ids of nodes, and whose values are the // corresponding node opacities. opacityMap: '=', showWarningSign: '@' }, templateUrl: UrlInterpolationService.getDirectiveTemplateUrl( '/pages/exploration_editor/editor_tab/' + 'state_graph_visualization_directive.html'), controller: [ '$scope', '$element', '$timeout', '$filter', 'StateGraphLayoutService', 'ExplorationWarningsService', 'MAX_NODES_PER_ROW', 'MAX_NODE_LABEL_LENGTH', function( $scope, $element, $timeout, $filter, StateGraphLayoutService, ExplorationWarningsService, MAX_NODES_PER_ROW, MAX_NODE_LABEL_LENGTH) { var redrawGraph = function() { if ($scope.graphData()) { $scope.graphLoaded = false; $scope.drawGraph( $scope.graphData().nodes, $scope.graphData().links, $scope.graphData().initStateId, $scope.graphData().finalStateIds ); // Wait for the graph to finish loading before showing it again. $timeout(function() { $scope.graphLoaded = true; }); } }; $scope.$on('redrawGraph', function() { redrawGraph(); }); $scope.$watch('graphData()', redrawGraph, true); $scope.$watch('currentStateId()', redrawGraph); // If statistics for a different version of the exploration are // loaded, this may change the opacities of the nodes. $scope.$watch('opacityMap', redrawGraph); $(window).resize(redrawGraph); var getElementDimensions = function() { return { h: $element.height(), w: $element.width() }; }; // Returns the closest number to `value` in the range // [bound1, bound2]. var clamp = function(value, bound1, bound2) { var minValue = Math.min(bound1, bound2); var maxValue = Math.max(bound1, bound2); return Math.min(Math.max(value, minValue), maxValue); }; $scope.getGraphHeightInPixels = function() { return Math.max($scope.GRAPH_HEIGHT, 300); }; $scope.drawGraph = function( nodes, originalLinks, initStateId, finalStateIds) { $scope.finalStateIds = finalStateIds; var links = angular.copy(originalLinks); var nodeData = StateGraphLayoutService.computeLayout( nodes, links, initStateId, angular.copy(finalStateIds)); $scope.GRAPH_WIDTH = StateGraphLayoutService.getGraphWidth( MAX_NODES_PER_ROW, MAX_NODE_LABEL_LENGTH); $scope.GRAPH_HEIGHT = StateGraphLayoutService.getGraphHeight( nodeData); nodeData = StateGraphLayoutService.modifyPositionValues( nodeData, $scope.GRAPH_WIDTH, $scope.GRAPH_HEIGHT); // These constants correspond to the rectangle that, when clicked // and dragged, translates the graph. Its height, width, and x and // y offsets are set to arbitrary large values so that the // draggable area extends beyond the graph. $scope.VIEWPORT_WIDTH = Math.max(10000, $scope.GRAPH_WIDTH * 5); $scope.VIEWPORT_HEIGHT = Math.max(10000, $scope.GRAPH_HEIGHT * 5); $scope.VIEWPORT_X = -Math.max(1000, $scope.GRAPH_WIDTH * 2); $scope.VIEWPORT_Y = -Math.max(1000, $scope.GRAPH_HEIGHT * 2); var graphBounds = StateGraphLayoutService.getGraphBoundaries( nodeData); $scope.augmentedLinks = StateGraphLayoutService.getAugmentedLinks( nodeData, links); for (var i = 0; i < $scope.augmentedLinks.length; i++) { // Style links if link properties and style mappings are // provided if (links[i].hasOwnProperty('linkProperty') && $scope.linkPropertyMapping) { if ($scope.linkPropertyMapping.hasOwnProperty( links[i].linkProperty)) { $scope.augmentedLinks[i].style = ( $scope.linkPropertyMapping[links[i].linkProperty]); } } } var getNodeStrokeWidth = function(nodeId) { var currentNodeIsTerminal = ( $scope.finalStateIds.indexOf(nodeId) !== -1); return ( nodeId === $scope.currentStateId() ? '3' : (nodeId === $scope.initStateId2 || currentNodeIsTerminal) ? '2' : '1'); }; var getNodeFillOpacity = function(nodeId) { return $scope.opacityMap ? $scope.opacityMap[nodeId] : 0.5; }; $scope.isStateFlagged = function(nodeId) { return ( $scope.highlightStates && $scope.highlightStates.hasOwnProperty(nodeId)); }; $scope.getNodeTitle = function(node) { var warning = ''; if (node.reachable === false) { warning = 'Warning: this state is unreachable.'; } else if (node.reachableFromEnd === false) { warning = ( 'Warning: there is no path from this state to the END state.' ); } var tooltip = node.label; if (node.hasOwnProperty('secondaryLabel')) { tooltip += ' ' + node.secondaryLabel; } if (warning) { tooltip += ' (' + warning + ')'; } return tooltip; }; $scope.onNodeDeletionClick = function(nodeId) { if (nodeId !== initStateId) { $scope.onDeleteFunction(nodeId); } }; $scope.getHighlightTransform = function(x0, y0) { return 'rotate(-10,' + (x0 - 10) + ',' + (y0 - 5) + ')'; }; $scope.getHighlightTextTransform = function(x0, y0) { return 'rotate(-10,' + x0 + ',' + (y0 - 4) + ')'; }; $scope.canNavigateToNode = function(nodeId) { return nodeId !== $scope.currentStateId(); }; $scope.getTruncatedLabel = function(nodeLabel) { return $filter('truncate')(nodeLabel, MAX_NODE_LABEL_LENGTH); }; // Update the nodes. $scope.nodeList = []; for (var nodeId in nodeData) { nodeData[nodeId].style = ( 'stroke-width: ' + getNodeStrokeWidth(nodeId) + '; ' + 'fill-opacity: ' + getNodeFillOpacity(nodeId) + ';'); if ($scope.nodeFill) { nodeData[nodeId].style += ('fill: ' + $scope.nodeFill + '; '); } // Color nodes if ($scope.nodeColors) { nodeData[nodeId].style += ( 'fill: ' + $scope.nodeColors[nodeId] + '; '); } // Add secondary label if it exists if ($scope.nodeSecondaryLabels) { if ($scope.nodeSecondaryLabels.hasOwnProperty(nodeId)) { nodeData[nodeId].secondaryLabel = ( $scope.nodeSecondaryLabels[nodeId]); nodeData[nodeId].height *= 1.1; } } var currentNodeIsTerminal = ( $scope.finalStateIds.indexOf(nodeId) !== -1); nodeData[nodeId].nodeClass = ( currentNodeIsTerminal ? 'terminal-node' : nodeId === $scope.currentStateId() ? 'current-node' : nodeId === initStateId ? 'init-node' : !(nodeData[nodeId].reachable && nodeData[nodeId].reachableFromEnd) ? 'bad-node' : 'normal-node'); nodeData[nodeId].canDelete = (nodeId !== initStateId); $scope.nodeList.push(nodeData[nodeId]); } $scope.getNodeErrorMessage = function(nodeLabel) { var warnings = ExplorationWarningsService.getAllStateRelatedWarnings(); if (warnings.hasOwnProperty(nodeLabel)) { return warnings[nodeLabel][0].toString(); } }; // The translation applied when the graph is first loaded. var origTranslations = [0, 0]; $scope.overallTransformStr = 'translate(0,0)'; $scope.innerTransformStr = 'translate(0,0)'; if ($scope.allowPanning) { // Without the timeout, $element.find fails to find the required // rect in the state graph modal dialog. $timeout(function() { var dimensions = getElementDimensions(); d3.select($element.find('rect.pannable-rect')[0]) .call(d3.behavior.zoom().scaleExtent([1, 1]) .on('zoom', function() { if (graphBounds.right - graphBounds.left < dimensions.w) { d3.event.translate[0] = 0; } else { d3.event.translate[0] = clamp( d3.event.translate[0], dimensions.w - graphBounds.right - origTranslations[0], -graphBounds.left - origTranslations[0]); } if (graphBounds.bottom - graphBounds.top < dimensions.h) { d3.event.translate[1] = 0; } else { d3.event.translate[1] = clamp( d3.event.translate[1], dimensions.h - graphBounds.bottom - origTranslations[1], -graphBounds.top - origTranslations[1]); } // We need a separate layer here so that the translation // does not influence the panning event receivers. $scope.innerTransformStr = ( 'translate(' + d3.event.translate + ')'); $scope.$apply(); }) ); }, 10); } if ($scope.centerAtCurrentState) { $timeout(function() { var dimensions = getElementDimensions(); // Center the graph at the node representing the current state. origTranslations[0] = ( dimensions.w / 2 - nodeData[$scope.currentStateId()].x0 - nodeData[$scope.currentStateId()].width / 2); origTranslations[1] = ( dimensions.h / 2 - nodeData[$scope.currentStateId()].y0 - nodeData[$scope.currentStateId()].height / 2); if (graphBounds.right - graphBounds.left < dimensions.w) { origTranslations[0] = ( dimensions.w / 2 - (graphBounds.right + graphBounds.left) / 2); } else { origTranslations[0] = clamp( origTranslations[0], dimensions.w - graphBounds.right, -graphBounds.left); } if (graphBounds.bottom - graphBounds.top < dimensions.h) { origTranslations[1] = ( dimensions.h / 2 - (graphBounds.bottom + graphBounds.top) / 2); } else { origTranslations[1] = clamp( origTranslations[1], dimensions.h - graphBounds.bottom, -graphBounds.top); } $scope.overallTransformStr = ( 'translate(' + origTranslations + ')'); $scope.$apply(); }, 20); } }; } ] }; }]); /* eslint-enable angular/directive-restrict */<|fim▁end|>
// See the License for the specific language governing permissions and
<|file_name|>settings.py<|end_file_name|><|fim▁begin|><|fim▁hole|>_TRACKING_USPS_URL = 'http://trkcnfrm1.smi.usps.com/PTSInternetWeb/InterLabelInquiry.do?origTrackNum=' TRACKING_USPS_URL = getattr(settings, 'OWNEY_USPS_TRACKING_URL', _TRACKING_USPS_URL) _USPS_API_URL = 'http://production.shippingapis.com/ShippingAPI.dll' USPS_API_URL = getattr(settings, 'OWNEY_USPS_API_URL', _USPS_API_URL) _USPS_API_USERID = 'Set your USPS API userid here' USPS_API_USERID = getattr(settings, 'OWNEY_USPS_API_USERID', _USPS_API_USERID) _CS_URL = 'Set the URL for your Customer Service application here' TRACKING_CS_URL = getattr(settings, 'OWNEY_TRACKING_CS_URL', _CS_URL)<|fim▁end|>
from django.conf import settings
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2017 AltOS-Rust Team * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ //! This module provides system implementation information and allows //! configuration control and reporting of system exceptions. mod icsr; mod defs; use core::ops::{Deref, DerefMut}; use ::volatile::Volatile; use self::icsr::ICSR; use self::defs::*; /// Returns instance of the System Control Block. pub fn scb() -> SCB { SCB::scb() } #[derive(Copy, Clone, Debug)] #[repr(C)] #[doc(hidden)] pub struct RawSCB { cpuid: u32, icsr: ICSR, reserved1: u32, aircr: u32, scr: u32, ccr: u32, reserved2: u32, shpr2: u32, shpr3: u32, } /// System Control Block #[derive(Copy, Clone, Debug)] pub struct SCB(Volatile<RawSCB>); impl SCB { fn scb() -> Self { unsafe { SCB(Volatile::new(SCB_ADDR as *const _)) } } }<|fim▁hole|> impl Deref for SCB { type Target = RawSCB; fn deref(&self) -> &Self::Target { &*(self.0) } } impl DerefMut for SCB { fn deref_mut(&mut self) -> &mut Self::Target { &mut *(self.0) } } impl RawSCB { /// Trigger a pend_sv exception. /// /// PendSV signals to the operating system that a context switch should occur. pub fn set_pend_sv(&mut self) { self.icsr.set_pend_sv(); } /// Clear the pend_sv exception. pub fn clear_pend_sv(&mut self) { self.icsr.clear_pend_sv(); } }<|fim▁end|>
<|file_name|>shrinky.cc<|end_file_name|><|fim▁begin|>/* MiracleGrue - Model Generator for toolpathing. <http://www.grue.makerbot.com> Copyright (C) 2011 Far McKon <[email protected]>, Hugo Boyer ([email protected]) This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. */ #include <map> #include <set> #include "shrinky.h" #include "log.h" namespace mgl { using namespace std; void lengthCheck(const std::vector<Segment2Type> &segments, const char *msg) { for (unsigned int i = 0; i < segments.size(); i++) { const Segment2Type &seg = segments[i]; Scalar l = seg.length(); // Log::often() << msg << " seg[" << i << "] = " << seg << " l=" << l << endl; if (!(l > 0)) { Log::info() << "Z"; stringstream ss; ss << msg << " Zero length: segment[" << i << "] = " << seg << endl; ScadDebugFile::segment3(ss, "", "segments", segments, 0, 0.1); ShrinkyException mixup(ss.str().c_str()); throw mixup; } } } void connectivityCheck(const std::vector<Segment2Type> &segments, Scalar tol) { for (unsigned int i = 0; i < segments.size(); i++) { unsigned int prevId = i == 0 ? segments.size() - 1 : i - 1; const Segment2Type &prevSeg = segments[prevId]; Segment2Type seg = segments[i]; if (!prevSeg.b.tequals(seg.a, tol)) { Point2Type dist = prevSeg.b - seg.a; stringstream ss; ss << "Connectivity error: segment[" << prevId << "] = " << prevSeg << endl; ss << " and segment[" << i << "] = " << seg << endl; ss << " segment[" << prevId << "] length = " << prevSeg.length() << endl; ss << " segment[" << i << "] length = " << seg.length() << endl; ss << " Distance between segments " << dist.magnitude(); ss << endl; Log::info() << "C"; // Log::often() << "|" << dist.magnitude() << "|" << prevSeg.length() << "|" << seg.length() << "|"; ScadDebugFile::segment3(ss, "", "segments", segments, 0, 0.1); ShrinkyException mixup(ss.str().c_str()); throw mixup; } } } void createConvexList(const std::vector<Segment2Type> &segments, std::vector<bool> &convex) { Scalar tol = 0.3; for (size_t id = 0; id < segments.size(); id++) { size_t prevId = id == 0 ? segments.size() - 1 : id - 1; const Segment2Type &seg = segments[id]; const Segment2Type &prevSeg = segments[prevId]; const Point2Type & i = prevSeg.a; const Point2Type & j = seg.a; const Point2Type & j2 = prevSeg.b; const Point2Type & k = seg.b; bool isSameSame = j.tequals(j2, tol); if (!isSameSame) { stringstream ss; ss << endl << "CONNECTIVITY ERROR" << endl; ss << "Segment id: " << id << ", prevId: " << prevId << endl; ss << "i: " << i << endl; ss << "j: " << j << endl; ss << "j2: " << j2 << endl; ss << "k: " << k << endl; Point2Type d = j2 - j; Scalar distance = d.magnitude(); ss << "distance " << distance << endl; ss << "SameSame " << isSameSame << endl; Log::info() << "_C_"; ShrinkyException mixup(ss.str().c_str()); throw mixup; } bool vertex = convexVertex(i, j, k); convex.push_back(vertex); } } void segmentsDiagnostic(const char* title, const std::vector<Segment2Type> &segments) { Log::info() << endl << title << endl; Log::info() << "id\tconvex\tlength\tdistance\tangle\ta, b" << endl; for (size_t id = 0; id < segments.size(); id++) { const Segment2Type &seg = segments[id]; size_t prevId = id == 0 ? segments.size() - 1 : id - 1; const Segment2Type &prevSeg = segments[prevId]; const Point2Type & i = prevSeg.a; const Point2Type & j = seg.a; const Point2Type & j2 = prevSeg.b; const Point2Type & k = seg.b; Point2Type d = j2 - j; Scalar distance = d.magnitude(); Scalar length = seg.squaredLength(); Scalar angle = d.angleFromPoint2s(i, j, k); bool vertex = convexVertex(i, j, k); Log::info() << id << "\t" << vertex << "\t" << length << ",\t" << distance << ",\t" << angle << "\t" << seg.a << ", " << seg.b << "\t" << endl; } } Point2Type getInsetDirection(const Segment2Type &seg) { Point3Type v(seg.b.x - seg.a.x, seg.b.y - seg.a.y, 0); Point3Type up(0, 0, 1); Point3Type inset = v.crossProduct(up); inset.normalise(); Point2Type inset2(inset.x, inset.y); return inset2; } Segment2Type elongateAndPrelongate(const Segment2Type &s, Scalar dist) { Segment2Type segment(s); Point2Type l = segment.b - segment.a; l.normalise(); l *= dist; segment.b += l; segment.a -= l; return segment; } void insetSegments(const std::vector<Segment2Type> &segments, Scalar d, std::vector<Segment2Type> &insets) { assert(insets.size() == 0); for (size_t i = 0; i < segments.size(); i++) { Segment2Type seg = segments[i]; Point2Type inset = getInsetDirection(seg); inset *= d; seg.a += inset; seg.b += inset; insets.push_back(seg); } assert(insets.size() == segments.size()); } string segment3(const Segment2Type &s, Scalar z) { stringstream ss; ss << "[[" << s.a[0] << ", " << s.a[1] << ", " << z << "], [" << s.b[0] << ", " << s.b[1] << ", " << z << "]]"; return ss.str(); } void trimConvexSegments(const std::vector<Segment2Type> & rawInsets, const std::vector<bool> &convex, std::vector<Segment2Type> & segments) { assert(segments.size() == 0); segments = rawInsets; for (unsigned int i = 0; i < segments.size(); i++) { unsigned int prevId = i == 0 ? segments.size() - 1 : i - 1; Segment2Type &currentSegment = segments[i]; Segment2Type &previousSegment = segments[prevId]; if (convex[i]) { Point2Type intersection; bool trimmed = segmentSegmentIntersection(previousSegment, currentSegment, intersection); if (trimmed) { previousSegment.b = intersection; currentSegment.a = intersection; } else { stringstream ss; ss << "Trim ERROR: segment " << i << endl; ss << "segments = [ " << segment3(previousSegment, 0) << " , " << segment3(currentSegment, 0) << " ]; " << endl; ss << "color([0,0.5,0,1])loop_segments3(segments, false);" << endl; ShrinkyException mixup(ss.str().c_str()); // throw mixup; } } } } void AddReflexSegments(const std::vector<Segment2Type> &segments, const std::vector<Segment2Type> &trimmedInsets, const std::vector<bool> &convexVertices, std::vector<Segment2Type> &newSegments) { assert(newSegments.size() == 0); newSegments.reserve(segments.size() * 2); for (unsigned int i = 0; i < segments.size(); i++) { unsigned int prevId = i == 0 ? segments.size() - 1 : i - 1; if (!convexVertices[i]) { // Point2Type center = segments[i].a; Point2Type start = trimmedInsets[prevId].b; Point2Type end = trimmedInsets[i].a; Segment2Type straight(start, end); newSegments.push_back(straight); } newSegments.push_back(trimmedInsets[i]); } } void removeShortSegments(const std::vector<Segment2Type> &segments, Scalar cutoffLength, std::vector<Segment2Type> &shorts) { shorts.reserve(segments.size()); // worst case assert(cutoffLength > 0); Scalar cutoffLength2 = cutoffLength * cutoffLength; for (unsigned int i = 0; i < segments.size(); i++) { const Segment2Type &seg = segments[i]; Scalar length2 = seg.squaredLength(); if (length2 > cutoffLength2) { shorts.push_back(seg); } } } void Shrinky::openScadFile(const char *scadFileName) { if (scadFileName) { fscad.open(scadFileName); std::ostream & out = fscad.getOut(); out << "module loop_segments3(segments, ball=true)" << endl; out << "{" << endl; out << " if(ball) corner (x=segments[0][0][0], y=segments[0][0][1], z=segments[0][0][2], diameter=0.25, faces=12, thickness_over_width=1);" << endl; out << " for(seg = segments)" << endl; out << " {" << endl; out << " tube(x1=seg[0][0], y1=seg[0][1], z1=seg[0][2], x2=seg[1][0], y2=seg[1][1], z2=seg[1][2] , diameter1=0.1, diameter2=0.05, faces=4, thickness_over_width=1);" << endl; out << " }" << endl; out << "}" << endl; fscad.writeHeader(); } } Shrinky::Shrinky(const char *scadFileName) : scadFileName(scadFileName), scadZ(0), color(1), counter(0), dz(0) { openScadFile(scadFileName); } bool attachSegments(Segment2Type &first, Segment2Type &second, Scalar elongation) { // LineSegment2 s0 = elongateAndPrelongate(first, elongation); // elongate(first, elongation); // LineSegment2 s1 = elongateAndPrelongate(second, elongation); //prelongate(second, elongation); Segment2Type s0 = first.elongate(elongation); Segment2Type s1 = second.prelongate(elongation); Point2Type intersection; bool trimmed = segmentSegmentIntersection(s0, s1, intersection); if (trimmed) { first.b = intersection; second.a = intersection; return true; } return false; } // // Calculates altitude in terms of the sides // http://en.wikipedia.org/wiki/Altitude_(triangle) // // @inputs: the length of 3 triangle sides // @returns the altitude from side a Scalar triangleAltitude(Scalar a, Scalar b, Scalar c) { Scalar s = 0.5 * (a + b + c);<|fim▁hole|> return h; } // an edged has collapsed when its 2 bisectors intersect // at an altitude that is lower than the inset distance bool edgeCollapse(const Segment2Type& segment, const Point2Type& bisector0, const Point2Type& bisector1, Scalar elongation, Scalar &collapseDistance) { // segment is the base of the triangle // from which we want the altitude Segment2Type bisectorSegment0; bisectorSegment0.a = segment.a; bisectorSegment0.b = segment.a + bisector0; Segment2Type bisectorSegment1; bisectorSegment1.a = segment.b + bisector1; bisectorSegment1.b = segment.b; Segment2Type s0 = bisectorSegment0.elongate(elongation); Segment2Type s1 = bisectorSegment1.prelongate(elongation); Point2Type intersection; bool attached = segmentSegmentIntersection(s0, s1, intersection); if (attached) { // the triangle is made from Point2Type edge0 = segment.a - intersection; Point2Type edge1 = segment.b - intersection; Scalar a, b, c; a = segment.length(); b = edge0.magnitude(); c = edge1.magnitude(); collapseDistance = triangleAltitude(a, b, c); if (collapseDistance < 0) { assert(0); } return true; } return false; } void outMap(const std::multimap<Scalar, unsigned int> &collapsingSegments) { Log::info() << "collapse distance\tsegment id" << endl; Log::info() << "--------------------------------" << endl; for (std::multimap<Scalar, unsigned int>::const_iterator it = collapsingSegments.begin(); it != collapsingSegments.end(); it++) { const std::pair<Scalar, unsigned int>& seg = *it; Log::info() << "\t" << seg.first << ",\t" << seg.second << endl; } } Scalar removeFirstCollapsedSegments(const std::vector<Segment2Type> &originalSegments, const std::vector<Point2Type> &bisectors, Scalar insetDist, std::vector<Segment2Type> &relevantSegments) { Scalar elongation = 100; assert(relevantSegments.size() == 0); relevantSegments.reserve(originalSegments.size()); //Log::often() << "NB of segments:" << originalSegments.size() << endl; multimap<Scalar, unsigned int> collapsingSegments; // Log::often() << endl << "removeFirstCollapsedSegments:: looking for collapses" << endl; std::vector<Segment2Type> segments = originalSegments; for (unsigned int i = 0; i < segments.size(); i++) { unsigned int nextId = i == segments.size() - 1 ? 0 : i + 1; //const LineSegment2 &nextSeg = segments[nextId]; const Point2Type &nextBisector = bisectors[nextId]; const Segment2Type &currentSegment = segments[i]; const Point2Type &currentBisector = bisectors[i]; Scalar collapseDistance; // check //Log::often() << "segment[" << i << "] = " << currentSegment << endl; bool collapsed = edgeCollapse(currentSegment, currentBisector, nextBisector, elongation, collapseDistance); if (collapsed) { //Log::often() << " ** segment " << i << " ,collapse distance " << collapseDistance << endl; if (collapseDistance < insetDist) { // shortestCollapseDistance = collapseDistance; collapsingSegments.insert(std::pair<Scalar, unsigned int>(collapseDistance, i)); } } } if (collapsingSegments.empty()) { // no problem... inset all the way! for (unsigned int i = 0; i < segments.size(); i++) { relevantSegments.push_back(segments[i]); } return insetDist; } // otherwise... std::multimap<Scalar, unsigned int>::iterator collapserator = collapsingSegments.begin(); Scalar collapseDistance = (*collapserator).first; //Log::often() << "COLLAPSED ID " << firstCollapse << endl; std::set<unsigned int> toRemove; // Log::often() << "removeFirstCollapsedSegments:: who to remove" << endl; bool done = false; do { Scalar d = (*collapserator).first; unsigned int segmentId = (*collapserator).second; //Log::often() << " " << d << ": Removing collapsed segment[" << segmentId <<"]=" << segments[segmentId] << endl; toRemove.insert(segmentId); collapserator++; if (collapserator == collapsingSegments.end()) { done = true; } if (d > collapseDistance) { // Log::often() << "d(" << d << ") > collapseDistance (" << collapseDistance << endl; done = true; } } while (!done); //Log::often() << "removeFirstCollapsedSegments:: making new list" << endl; for (unsigned int i = 0; i < segments.size(); i++) { if (toRemove.find(i) == toRemove.end()) { relevantSegments.push_back(segments[i]); } } return collapseDistance; } // True if the 3 points are collinear bool collinear(const Point2Type &a, const Point2Type &b, const Point2Type &c, Scalar tol) { Scalar dot = ((b[0] - a[0]) * (c[1] - a[1]) - (c[0] - c[0]) * (b[1] - a[1])); bool r = tequals(dot, 0, tol); return r; } void elongateAndTrimSegments(const std::vector<Segment2Type> & longSegments, Scalar elongation, std::vector<Segment2Type> &segments) { Scalar tol = 1e-6; segments = longSegments; for (unsigned int i = 0; i < segments.size(); i++) { unsigned int prevId = i == 0 ? segments.size() - 1 : i - 1; Segment2Type &previousSegment = segments[prevId]; Segment2Type &currentSegment = segments[i]; //Log::often() << "prev: seg[" << prevId << "] = " << previousSegment << endl; //Log::often() << "cur: seg[" << i << "] = " << currentSegment << endl; if (previousSegment.b.tequals(currentSegment.a, tol)) { // the job is already done.. segments are attached, // nothing to see // Log::often() << "already attached" << endl; continue; } if (previousSegment.length() == 0) { Log::info() << "X"; continue; } if (currentSegment.length() == 0) { Log::info() << "Y"; continue; } bool attached = attachSegments(previousSegment, currentSegment, elongation); if (!attached) { Log::info() << "!"; Point2Type m = (previousSegment.a + currentSegment.b) * 0.5; previousSegment.b = m; currentSegment.a = m; } // Log::often() << "attach point " << currentSegment.a << endl; //Log::often() << endl; } } void createBisectors(const std::vector<Segment2Type>& segments, Scalar tol, std::vector<Point2Type> &motorCycles) { for (unsigned int i = 0; i < segments.size(); i++) { unsigned int prevId = i == 0 ? segments.size() - 1 : i - 1; const Segment2Type &prevSeg = segments[prevId]; const Segment2Type &seg = segments[i]; Point2Type prevInset = getInsetDirection(prevSeg); Point2Type inset = getInsetDirection(seg); Point2Type bisector = inset; // if points are disjoint, do not combine both insets if (prevSeg.b.tequals(seg.a, tol)) { bisector += prevInset; } else { // // ok... maybe this is a bit drastic and we could combine the biesctors // this author needs to make up his mind about non closed polygon support // Point2Type dist = prevSeg.b - seg.a; stringstream ss; ss << "This is not a closed polygon. segment[" << prevId << "].b = " << prevSeg.b; ss << " and segment[" << i << "].a = " << seg.a << " are distant by " << dist.magnitude(); ss << endl; ScadDebugFile::segment3(ss, "", "segments", segments, 0, 0.1); Log::info() << "O"; ShrinkyException mixup(ss.str().c_str()); throw mixup; // assert(0); } if (bisector.squaredMagnitude() == 0) { stringstream ss; ss << "Null bisector at segment [" << i << "] position=" << seg.a << endl; ss << " previous_inset=" << prevInset << " inset=" << inset; Log::info() << "N"; ShrinkyException mixup(ss.str().c_str()); throw mixup; } bisector.normalise(); motorCycles.push_back(bisector); } } void Shrinky::writeScadBisectors(const std::vector<Point2Type> & bisectors, const std::vector<Segment2Type> & originalSegments) { if (scadFileName) { std::vector<Segment2Type> motorCycleTraces; for (size_t i = 0; i < bisectors.size(); i++) { Point2Type a = originalSegments[i].a; Point2Type dir = bisectors[i]; dir *= 2; Point2Type b = a + dir; Segment2Type s(a, b); motorCycleTraces.push_back(s); } scadZ = fscad.writeSegments3("bisectors_", "color([0.75,0.5,0.2,1])loop_segments3", motorCycleTraces, scadZ, dz, this->counter); } } void Shrinky::writeScadSegments(const char* segNames, const char* prefix, const std::vector<Segment2Type> & segments) { if (scadFileName) { string funcName = prefix; funcName += "loop_segments3"; scadZ = fscad.writeSegments3(segNames, funcName.c_str(), segments, scadZ, dz, this->counter); } } void Shrinky::inset(const std::vector<Segment2Type>& originalSegments, Scalar insetDist, std::vector<Segment2Type> &finalInsets) { bool writePartialSteps = true; int count = originalSegments.size(); if (count < 2) { assert(0); } assert(finalInsets.size() == 0); finalInsets.reserve(originalSegments.size()); Scalar tol = 1e-6; // for continuity testing and distance to go Scalar distanceToGo = insetDist; std::vector<Segment2Type> initialSegs = originalSegments; bool done = false; while (!done) { connectivityCheck(initialSegs, tol); //Log::often() << " ** distance to go: " << distanceToGo << endl; finalInsets.clear(); Scalar distanceGone = insetStep(initialSegs, distanceToGo, tol, writePartialSteps, finalInsets); distanceToGo -= distanceGone; if (tequals(distanceToGo, 0, tol)) { done = true; return; } if (finalInsets.size() > 2) { initialSegs = finalInsets; } else { return; } } } void removeZeroLengthSegments(const std::vector<Segment2Type> &inputSegments, std::vector<Segment2Type> &segments, Scalar tol) { assert(inputSegments.size() > 0); segments.reserve(inputSegments.size()); // deep copy for (unsigned int i = 0; i < inputSegments.size(); i++) { const Segment2Type &seg = inputSegments[i]; if (tequals(seg.squaredLength(), 0, tol)) { continue; } segments.push_back(seg); } } Scalar Shrinky::insetStep(const std::vector<Segment2Type>& originalSegments, Scalar insetDist, Scalar continuityTolerance, bool, // writePartialStep, std::vector<Segment2Type> &finalInsets) { Scalar tol = 1e-6; // magic numbers Scalar elongation = insetDist * 100; // continuityTolerance * 5; unsigned int segmentCount = originalSegments.size(); assert(segmentCount > 0); assert(finalInsets.size() == 0); assert(&originalSegments != &finalInsets); if (segmentCount < 2) { stringstream ss; ss << "1 line segment is not enough to create a closed polygon"; assert(0); ShrinkyException mixup(ss.str().c_str()); throw mixup; } bool dumpSteps = false; if (dumpSteps)segmentsDiagnostic("originalSegments", originalSegments); Scalar insetStepDistance = insetDist; try { if (scadFileName) { // OpenScad // Scalar dz = 0.1; stringstream coloredOutline; // Scalar color = (1.0 * i)/(shells-1); int color = 0; coloredOutline << "color([" << color << "," << color << "," << 1 - color << " ,1])"; Scalar dzBefore = scadZ; writeScadSegments("outlines_", coloredOutline.str().c_str(), originalSegments); // trick to get the bisector in place scadZ = dzBefore; } std::vector<Segment2Type> relevantSegments; if (originalSegments.size() > 2) { //Log::often() << "...BISECTING..." << endl; std::vector<Point2Type> bisectors; createBisectors(originalSegments, continuityTolerance, bisectors); writeScadBisectors(bisectors, originalSegments); //Log::often() << "...COLLAPSING..." << endl; insetStepDistance = removeFirstCollapsedSegments(originalSegments, bisectors, insetDist, relevantSegments); if (dumpSteps) segmentsDiagnostic("relevantSegments", relevantSegments); writeScadSegments("relevants_", "color([0.5,0.5,0,1])", relevantSegments); } std::vector<Segment2Type> insets; unsigned int relevantCount = relevantSegments.size(); if (relevantCount > 2) { //Log::often() << "...INSETTING..." << endl; insetSegments(relevantSegments, insetStepDistance, insets); if (dumpSteps) segmentsDiagnostic("Insets", insets); writeScadSegments("raw_insets_", "color([1,0,0.4,1])", insets); lengthCheck(insets, "insets"); } std::vector<Segment2Type> connected; if (insets.size() > 2) { //Log::often() << "...ATTACHING..." << endl; elongateAndTrimSegments(insets, elongation, connected); writeScadSegments("connected_", "color([0.25,0.25,0.25,1])", connected); // lengthCheck(finalInsets, "finalInsets"); } if (connected.size() > 2) { removeZeroLengthSegments(connected, finalInsets, tol); writeScadSegments("final_insets_", "color([0.5,0.5,0.5,1])", connected); } } catch (ShrinkyException &mixup) { Log::info() << mixup.error << endl; // Log::often() << "ABORT MISSION!!! " << insetStepDistance << ": " << mixup.error << endl; // this is a lie... but we want to break the loop insetStepDistance = insetDist; throw; } this->counter++; return insetStepDistance; } void Shrinky::closeScadFile() { if (scadFileName) { std::ostream & out = fscad.getOut(); int shells = counter; fscad.writeMinMax("draw_outlines", "outlines_", shells); fscad.writeMinMax("draw_bisectors", "bisectors_", shells); fscad.writeMinMax("draw_raw_insets", "raw_insets_", shells); //fscad.writeMinMax("draw_trimmed_insets", "trimmed_insets_", shells); fscad.writeMinMax("draw_relevants", "relevants_", shells); fscad.writeMinMax("draw_final_insets", "final_insets_", shells); out << "min=0;" << endl; out << "max=" << shells - 1 << ";" << std::endl; out << std::endl; out << "draw_outlines(min, max);" << std::endl; out << "draw_bisectors(min, max);" << std::endl; out << "draw_relevants(min, max);" << std::endl; out << "draw_raw_insets(min, max);" << std::endl; out << "draw_final_insets(min, max);" << std::endl; out << endl; out << "// s = [\"segs.push_back(TriangleSegment2(Point2Type(%s+x, %s+y), Point2Type(%s+x, %s+y)));\" %(x[0][0], x[0][1], x[1][0], x[1][1]) for x in segments]" << std::endl; out << "// print '\\n'.join(s) " << endl; fscad.close(); } } Shrinky::~Shrinky() { closeScadFile(); } void createShells(const SegmentVector & outlinesSegments, const std::vector<Scalar> &insetDistances, unsigned int sliceId, const char *scadFile, bool writeDebugScadFiles, std::vector<SegmentVector> & insetsForLoops) { assert(insetsForLoops.size() == 0); unsigned int nbOfShells = insetDistances.size(); // dbgs__( "outlineSegmentCount " << outlineSegmentCount) for (unsigned int outlineId = 0; outlineId < outlinesSegments.size(); outlineId++) { const std::vector<Segment2Type> &outlineLoop = outlinesSegments[outlineId]; assert(outlineLoop.size() > 0); insetsForLoops.push_back(SegmentVector()); assert(insetsForLoops.size() == outlineId + 1); SegmentVector &insetTable = *insetsForLoops.rbegin(); // inset curves for a single loop insetTable.reserve(nbOfShells); for (unsigned int shellId = 0; shellId < nbOfShells; shellId++) { insetTable.push_back(std::vector<Segment2Type > ()); } //unsigned int segmentCountBefore =0; //unsigned int segmentCountAfter =0; unsigned int currentShellIdForErrorReporting = 0; try { Shrinky shrinky; const vector<Segment2Type> *previousInsets = &outlineLoop; for (unsigned int shellId = 0; shellId < nbOfShells; shellId++) { currentShellIdForErrorReporting = shellId; Scalar insetDistance = insetDistances[shellId]; std::vector<Segment2Type> &insets = insetTable[shellId]; if ((*previousInsets).size() > 2) { shrinky.inset(*previousInsets, insetDistance, insets); previousInsets = &insets; } } } catch (ShrinkyException &messup) { if (writeDebugScadFiles) { static int counter = 0; Log::info() << endl; Log::info() << "----- ------ ERROR " << counter << " ------ ------" << endl; Log::info() << "sliceId: " << sliceId << endl; Log::info() << "loopId : " << outlineId << endl; Log::info() << "shellId: " << currentShellIdForErrorReporting << endl; stringstream ss; ss << "_slice_" << sliceId << "_loop_" << outlineId << ".scad"; MyComputer myComputer; string loopScadFile = myComputer.fileSystem.ChangeExtension(scadFile, ss.str().c_str()); Shrinky shriker(loopScadFile.c_str()); shriker.dz = 0.1; try { std::ostream &scad = shriker.fscad.getOut(); scad << "/*" << endl; scad << messup.error; scad << endl << "*/" << endl; vector<Segment2Type> previousInsets = outlineLoop; Log::info() << "Creating file: " << loopScadFile << endl; Log::info() << " Number of points " << (int) previousInsets.size() << endl; ScadDebugFile::segment3(cout, "", "segments", previousInsets, 0, 0.1); std::vector<Segment2Type> insets; for (unsigned int shellId = 0; shellId < nbOfShells; shellId++) { Scalar insetDistance = insetDistances[shellId]; shriker.inset(previousInsets, insetDistance, insets); previousInsets = insets; insets.clear(); // discard... } } catch (ShrinkyException &) // the same excpetion is thrown again { Log::info() << "saving " << endl; } Log::info() << "--- --- ERROR " << counter << " END --- ----" << endl; counter++; } } } } //// creates shells from the segments table //// void createShellsForSliceUsingShrinky(const SegmentVector & outlinesSegments, const std::vector<Scalar> &insetDistances, unsigned int sliceId, const char *scadFile, std::vector<SegmentVector> & insetsForLoops) { assert(insetsForLoops.size() == 0); unsigned int nbOfShells = insetDistances.size(); for (unsigned int shellId = 0; shellId < nbOfShells; shellId++) { insetsForLoops.push_back(SegmentVector()); SegmentVector &currentShellTable = *insetsForLoops.rbegin(); for (unsigned int outlineId = 0; outlineId < outlinesSegments.size(); outlineId++) { try { Shrinky shrinky; currentShellTable.push_back(std::vector<Segment2Type > ()); std::vector<Segment2Type> &outlineShell = *currentShellTable.rbegin(); Scalar dist = insetDistances[shellId]; const SegmentVector *pInputs = NULL; if (shellId == 0) { pInputs = &outlinesSegments; } else { pInputs = &insetsForLoops[shellId - 1]; } const SegmentVector &inputTable = *pInputs; const std::vector<Segment2Type> & inputSegments = inputTable[outlineId]; if (inputSegments.size() > 2) { shrinky.inset(inputSegments, dist, outlineShell); } } catch (ShrinkyException &messup) { if (scadFile != 0x00) { static int counter = 0; Log::info() << endl; Log::info() << "----- ------ ERROR " << counter << " ------ ------" << endl; Log::info() << "sliceId: " << sliceId << endl; Log::info() << "loopId : " << outlineId << endl; Log::info() << "shellId: " << shellId << endl; stringstream ss; ss << "_slice_" << sliceId << "_loop_" << outlineId << ".scad"; MyComputer myComputer; string loopScadFile = myComputer.fileSystem.ChangeExtension(scadFile, ss.str().c_str()); Shrinky shriker(loopScadFile.c_str()); shriker.dz = 0.1; try { Shrinky shrinky; currentShellTable.push_back(std::vector<Segment2Type > ()); std::vector<Segment2Type> &outlineShell = *currentShellTable.rbegin(); Scalar dist = insetDistances[shellId]; if (shellId == 0) { const SegmentVector &inputTable = outlinesSegments; const std::vector<Segment2Type> & inputSegments = inputTable[outlineId]; shrinky.inset(inputSegments, dist, outlineShell); } else { const SegmentVector &inputTable = insetsForLoops[shellId - 1]; const std::vector<Segment2Type> & inputSegments = inputTable[outlineId]; shrinky.inset(inputSegments, dist, outlineShell); } } catch (ShrinkyException &) // the same excpetion is thrown again { Log::info() << "saving " << endl; } Log::info() << "--- --- ERROR " << counter << " END --- ----" << endl; counter++; } } } } } }<|fim▁end|>
Scalar h = 2 * SCALAR_SQRT(s * (s - a)*(s - b)*(s - c)) / a;
<|file_name|>export-named-specifiers.src.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export {foo, bar};
<|file_name|>cros_gralloc_helpers.cc<|end_file_name|><|fim▁begin|>/* * Copyright 2016 The Chromium OS Authors. All rights reserved. * Use of this source code is governed by a BSD-style license that can be * found in the LICENSE file. */ #include "cros_gralloc_helpers.h" #include <cstdlib> #include <cutils/log.h> #include <fcntl.h> #include <xf86drm.h> uint64_t cros_gralloc_convert_flags(int flags) { uint64_t usage = DRV_BO_USE_NONE; if (flags & GRALLOC_USAGE_CURSOR) usage |= DRV_BO_USE_CURSOR; if ((flags & sw_read()) == GRALLOC_USAGE_SW_READ_RARELY) usage |= DRV_BO_USE_SW_READ_RARELY;<|fim▁hole|> usage |= DRV_BO_USE_SW_READ_OFTEN; if ((flags & sw_write()) == GRALLOC_USAGE_SW_WRITE_RARELY) usage |= DRV_BO_USE_SW_WRITE_RARELY; if ((flags & sw_write()) == GRALLOC_USAGE_SW_WRITE_OFTEN) usage |= DRV_BO_USE_SW_WRITE_OFTEN; if (flags & GRALLOC_USAGE_HW_TEXTURE) usage |= DRV_BO_USE_RENDERING; if (flags & GRALLOC_USAGE_HW_RENDER) usage |= DRV_BO_USE_RENDERING; if (flags & GRALLOC_USAGE_HW_2D) usage |= DRV_BO_USE_RENDERING; if (flags & GRALLOC_USAGE_HW_COMPOSER) /* HWC wants to use display hardware, but can defer to OpenGL. */ usage |= DRV_BO_USE_SCANOUT | DRV_BO_USE_RENDERING; if (flags & GRALLOC_USAGE_HW_FB) usage |= DRV_BO_USE_SCANOUT; if (flags & GRALLOC_USAGE_EXTERNAL_DISP) /* We're ignoring this flag until we decide what to with display link */ usage |= DRV_BO_USE_NONE; if (flags & GRALLOC_USAGE_PROTECTED) usage |= DRV_BO_USE_PROTECTED; if (flags & GRALLOC_USAGE_HW_VIDEO_ENCODER) /*HACK: See b/30054495 */ usage |= DRV_BO_USE_SW_READ_OFTEN; if (flags & GRALLOC_USAGE_HW_CAMERA_WRITE) usage |= DRV_BO_USE_HW_CAMERA_WRITE; if (flags & GRALLOC_USAGE_HW_CAMERA_READ) usage |= DRV_BO_USE_HW_CAMERA_READ; if (flags & GRALLOC_USAGE_HW_CAMERA_ZSL) usage |= DRV_BO_USE_HW_CAMERA_ZSL; if (flags & GRALLOC_USAGE_RENDERSCRIPT) usage |= DRV_BO_USE_RENDERSCRIPT; return usage; } drv_format_t cros_gralloc_convert_format(int format) { /* * Conversion from HAL to fourcc-based DRV formats based on * platform_android.c in mesa. */ switch (format) { case HAL_PIXEL_FORMAT_BGRA_8888: return DRV_FORMAT_ARGB8888; case HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED: return DRV_FORMAT_FLEX_IMPLEMENTATION_DEFINED; case HAL_PIXEL_FORMAT_RGB_565: return DRV_FORMAT_RGB565; case HAL_PIXEL_FORMAT_RGB_888: return DRV_FORMAT_RGB888; case HAL_PIXEL_FORMAT_RGBA_8888: return DRV_FORMAT_ABGR8888; case HAL_PIXEL_FORMAT_RGBX_8888: return DRV_FORMAT_XBGR8888; case HAL_PIXEL_FORMAT_YCbCr_420_888: return DRV_FORMAT_FLEX_YCbCr_420_888; case HAL_PIXEL_FORMAT_YV12: return DRV_FORMAT_YVU420; } return DRV_FORMAT_NONE; } static int32_t cros_gralloc_query_rendernode(struct driver **drv, const char *name) { /* TODO(gsingh): Enable render nodes on udl/evdi. */ int fd; drmVersionPtr version; char const *str = "%s/renderD%d"; int32_t num_nodes = 63; int32_t min_node = 128; int32_t max_node = (min_node + num_nodes); for (int i = min_node; i < max_node; i++) { char *node; if (asprintf(&node, str, DRM_DIR_NAME, i) < 0) continue; fd = open(node, O_RDWR, 0); free(node); if (fd < 0) continue; version = drmGetVersion(fd); if (version && name && !strcmp(version->name, name)) { drmFreeVersion(version); continue; } drmFreeVersion(version); *drv = drv_create(fd); if (*drv) return CROS_GRALLOC_ERROR_NONE; } return CROS_GRALLOC_ERROR_NO_RESOURCES; } int32_t cros_gralloc_rendernode_open(struct driver **drv) { int32_t ret; ret = cros_gralloc_query_rendernode(drv, NULL); /* Look for vgem driver if no hardware is found. */ if (ret) ret = cros_gralloc_query_rendernode(drv, "vgem"); return ret; } int32_t cros_gralloc_validate_handle(struct cros_gralloc_handle *hnd) { if (!hnd || hnd->magic != cros_gralloc_magic()) return CROS_GRALLOC_ERROR_BAD_HANDLE; return CROS_GRALLOC_ERROR_NONE; } void cros_gralloc_log(const char *prefix, const char *file, int line, const char *format, ...) { va_list args; va_start(args, format); ALOGE("%s - [%s(%d)]", prefix, basename(file), line); __android_log_vprint(ANDROID_LOG_ERROR, prefix, format, args); va_end(args); }<|fim▁end|>
if ((flags & sw_read()) == GRALLOC_USAGE_SW_READ_OFTEN)
<|file_name|>internalAliasClassInsideLocalModuleWithExport.js<|end_file_name|><|fim▁begin|>//// [internalAliasClassInsideLocalModuleWithExport.js] (function (x) { var c = (function () { function c() { } c.prototype.foo = function (a) { return a; }; return c; })(); x.c = c; })(exports.x || (exports.x = {})); var x = exports.x; (function (m2) { (function (m3) { var c = x.c; m3.c = c; m3.cProp = new c(); var cReturnVal = m3.cProp.foo(10); })(m2.m3 || (m2.m3 = {})); var m3 = m2.m3; })(exports.m2 || (exports.m2 = {})); var m2 = exports.m2; exports.d = new m2.m3.c(); ////[internalAliasClassInsideLocalModuleWithExport.d.ts] <|fim▁hole|>} export declare module m2 { module m3 { export import c = x.c; var cProp: c; } } export declare var d: x.c;<|fim▁end|>
export declare module x { class c { public foo(a: number): number; }
<|file_name|>index.js<|end_file_name|><|fim▁begin|>var async = require('async'); function captainHook(schema) { // Pre-Save Setup schema.pre('validate', function (next) { var self = this; this._wasNew = this.isNew; if (this.isNew) { this.runPreMethods(schema.preCreateMethods, self, function(){ next(); }); } else { this.runPreMethods(schema.preUpdateMethods, self, function(){ next(); }); } }); // Post-Save Setup schema.post('save', function () { var self = this; if (this._wasNew) { this.runPostMethods(schema.postCreateMethods, self); } else { this.runPostMethods(schema.postUpdateMethods, self); } }); /** * Pre-Hooks * These hooks run before an instance has been created / updated */ schema.methods.runPreMethods = function(methods, self, callback){ async.eachSeries(methods, function(fn, cb) { fn(self, cb); }, function(err){ if (err){ throw err; } callback(); }); }; // Pre-Create Methods schema.preCreateMethods = []; schema.preCreate = function(fn){ schema.preCreateMethods.push(fn); }; // Pre-Update Methods schema.preUpdateMethods = []; schema.preUpdate = function(fn){ schema.preUpdateMethods.push(fn); }; /** * Post-Hooks * These hooks run after an instance has been created / updated */ schema.methods.runPostMethods = function(methods, self){ async.eachSeries(methods, function(fn, cb) { fn(self, cb); }, function(err){ if (err){ throw err; } }); }; // Post-Create Methods schema.postCreateMethods = [];<|fim▁hole|> }; // Post-Update Methods schema.postUpdateMethods = []; schema.postUpdate = function(fn){ schema.postUpdateMethods.push(fn); }; } module.exports = captainHook;<|fim▁end|>
schema.postCreate = function(fn){ schema.postCreateMethods.push(fn);
<|file_name|>mbcharsetprober.py<|end_file_name|><|fim▁begin|># ####################### BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # Proofpoint, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import sys from . import constants from .charsetprober import CharSetProber class MultiByteCharSetProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mDistributionAnalyzer = None self._mCodingSM = None self._mLastChar = [0, 0] def reset(self): CharSetProber.reset(self) if self._mCodingSM: self._mCodingSM.reset() if self._mDistributionAnalyzer: self._mDistributionAnalyzer.reset() self._mLastChar = [0, 0] def get_charset_name(self): pass def feed(self, aBuf): aLen = len(aBuf) for i in range(0, aLen): codingState = self._mCodingSM.next_state(aBuf[i]) if codingState == constants.eError: if constants._debug: sys.stderr.write(self.get_charset_name() + ' prober hit error at byte ' + str(i) + '\n') self._mState = constants.eNotMe break elif codingState == constants.eItsMe: self._mState = constants.eFoundIt break elif codingState == constants.eStart: charLen = self._mCodingSM.get_current_charlen() if i == 0: self._mLastChar[1] = aBuf[0] self._mDistributionAnalyzer.feed(self._mLastChar, charLen) else: self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1], charLen)<|fim▁hole|> if (self._mDistributionAnalyzer.got_enough_data() and (self.get_confidence() > constants.SHORTCUT_THRESHOLD)): self._mState = constants.eFoundIt return self.get_state() def get_confidence(self): return self._mDistributionAnalyzer.get_confidence()<|fim▁end|>
self._mLastChar[0] = aBuf[aLen - 1] if self.get_state() == constants.eDetecting:
<|file_name|>LogoutBehaviour.java<|end_file_name|><|fim▁begin|>/* * The MIT License * * Copyright 2015 Ryan Gilera. * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to deal * in the Software without restriction, including without limitation the rights * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell * copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN * THE SOFTWARE. */ package com.github.daytron.twaattin.presenter; import com.github.daytron.twaattin.ui.LoginScreen; import com.vaadin.server.Page; import com.vaadin.server.VaadinSession; import com.vaadin.shared.Position; import com.vaadin.ui.Button; import com.vaadin.ui.Notification; import com.vaadin.ui.UI; import java.security.Principal; /** * * @author Ryan Gilera */<|fim▁hole|> private final static long serialVersionUID = 1L; @Override public void buttonClick(Button.ClickEvent event) { VaadinSession.getCurrent().setAttribute(Principal.class, null); UI.getCurrent().setContent(new LoginScreen()); Notification logoutNotification = new Notification( "You've been logout", Notification.Type.TRAY_NOTIFICATION); logoutNotification.setPosition(Position.TOP_CENTER); logoutNotification.show(Page.getCurrent()); } }<|fim▁end|>
public class LogoutBehaviour implements Button.ClickListener {
<|file_name|>settings.py<|end_file_name|><|fim▁begin|>from awxkit.api.resources import resources from . import base from . import page <|fim▁hole|> page.register_page([resources.setting, resources.settings_all, resources.settings_authentication, resources.settings_changed, resources.settings_github, resources.settings_github_org, resources.settings_github_team, resources.settings_google_oauth2, resources.settings_jobs, resources.settings_ldap, resources.settings_radius, resources.settings_saml, resources.settings_system, resources.settings_tacacsplus, resources.settings_ui, resources.settings_user, resources.settings_user_defaults], Setting) class Settings(page.PageList, Setting): def get_endpoint(self, endpoint): """Helper method used to navigate to a specific settings endpoint. (Pdb) settings_pg.get_endpoint('all') """ base_url = '{0}{1}/'.format(self.endpoint, endpoint) return self.walk(base_url) get_setting = get_endpoint page.register_page(resources.settings, Settings)<|fim▁end|>
class Setting(base.Base): pass
<|file_name|>switch.py<|end_file_name|><|fim▁begin|>"""Support for Hydrawise cloud switches.""" import logging import voluptuous as vol from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchDevice from homeassistant.const import CONF_MONITORED_CONDITIONS import homeassistant.helpers.config_validation as cv from . import ( ALLOWED_WATERING_TIME, CONF_WATERING_TIME, DATA_HYDRAWISE, DEFAULT_WATERING_TIME, DEVICE_MAP, DEVICE_MAP_INDEX, SWITCHES, HydrawiseEntity) _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_MONITORED_CONDITIONS, default=SWITCHES): vol.All(cv.ensure_list, [vol.In(SWITCHES)]), vol.Optional(CONF_WATERING_TIME, default=DEFAULT_WATERING_TIME): vol.All(vol.In(ALLOWED_WATERING_TIME)), }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a sensor for a Hydrawise device.""" hydrawise = hass.data[DATA_HYDRAWISE].data default_watering_timer = config.get(CONF_WATERING_TIME) sensors = [] for sensor_type in config.get(CONF_MONITORED_CONDITIONS): # Create a switch for each zone for zone in hydrawise.relays: sensors.append( HydrawiseSwitch(default_watering_timer, zone, sensor_type)) add_entities(sensors, True)<|fim▁hole|> class HydrawiseSwitch(HydrawiseEntity, SwitchDevice): """A switch implementation for Hydrawise device.""" def __init__(self, default_watering_timer, *args): """Initialize a switch for Hydrawise device.""" super().__init__(*args) self._default_watering_timer = default_watering_timer @property def is_on(self): """Return true if device is on.""" return self._state def turn_on(self, **kwargs): """Turn the device on.""" if self._sensor_type == 'manual_watering': self.hass.data[DATA_HYDRAWISE].data.run_zone( self._default_watering_timer, (self.data['relay']-1)) elif self._sensor_type == 'auto_watering': self.hass.data[DATA_HYDRAWISE].data.suspend_zone( 0, (self.data['relay']-1)) def turn_off(self, **kwargs): """Turn the device off.""" if self._sensor_type == 'manual_watering': self.hass.data[DATA_HYDRAWISE].data.run_zone( 0, (self.data['relay']-1)) elif self._sensor_type == 'auto_watering': self.hass.data[DATA_HYDRAWISE].data.suspend_zone( 365, (self.data['relay']-1)) def update(self): """Update device state.""" mydata = self.hass.data[DATA_HYDRAWISE].data _LOGGER.debug("Updating Hydrawise switch: %s", self._name) if self._sensor_type == 'manual_watering': if not mydata.running: self._state = False else: self._state = int( mydata.running[0]['relay']) == self.data['relay'] elif self._sensor_type == 'auto_watering': for relay in mydata.relays: if relay['relay'] == self.data['relay']: if relay.get('suspended') is not None: self._state = False else: self._state = True break @property def icon(self): """Return the icon to use in the frontend, if any.""" return DEVICE_MAP[self._sensor_type][ DEVICE_MAP_INDEX.index('ICON_INDEX')]<|fim▁end|>
<|file_name|>view_manager.ts<|end_file_name|><|fim▁begin|>import { Injector, Inject, Provider, Injectable, ResolvedProvider, forwardRef } from 'angular2/src/core/di'; import {isPresent, isBlank, isArray} from 'angular2/src/facade/lang'; import {ListWrapper, StringMapWrapper} from 'angular2/src/facade/collection'; import {BaseException} from 'angular2/src/facade/exceptions'; import {AppView, HostViewFactory, flattenNestedViewRenderNodes} from './view'; import {AppElement} from './element'; import {ElementRef, ElementRef_} from './element_ref'; import { HostViewFactoryRef, HostViewFactoryRef_, EmbeddedViewRef, HostViewRef, ViewRef_ } from './view_ref'; import {ViewContainerRef} from './view_container_ref'; import {TemplateRef, TemplateRef_} from './template_ref'; import {AppViewListener} from './view_listener'; import {RootRenderer, RenderComponentType} from 'angular2/src/core/render/api'; import {wtfCreateScope, wtfLeave, WtfScopeFn} from '../profile/profile'; import {APP_ID} from 'angular2/src/core/application_tokens'; import {ViewEncapsulation} from 'angular2/src/core/metadata/view'; import {ViewType} from './view_type'; /** * Service exposing low level API for creating, moving and destroying Views. * * Most applications should use higher-level abstractions like {@link DynamicComponentLoader} and * {@link ViewContainerRef} instead. */ export abstract class AppViewManager { /** * Returns a {@link ViewContainerRef} of the View Container at the specified location. */ abstract getViewContainer(location: ElementRef): ViewContainerRef; /** * Returns the {@link ElementRef} that makes up the specified Host View. */ abstract getHostElement(hostViewRef: HostViewRef): ElementRef; /** * Searches the Component View of the Component specified via `hostLocation` and returns the * {@link ElementRef} for the Element identified via a Variable Name `variableName`. * * Throws an exception if the specified `hostLocation` is not a Host Element of a Component, or if * variable `variableName` couldn't be found in the Component View of this Component. */ abstract getNamedElementInComponentView(hostLocation: ElementRef, variableName: string): ElementRef; /** * Returns the component instance for the provided Host Element. */ abstract getComponent(hostLocation: ElementRef): any; /** * Creates an instance of a Component and attaches it to the first element in the global View * (usually DOM Document) that matches the component's selector or `overrideSelector`. * * This as a low-level way to bootstrap an application and upgrade an existing Element to a * Host Element. Most applications should use {@link DynamicComponentLoader#loadAsRoot} instead. * * The Component and its View are created based on the `hostProtoComponentRef` which can be * obtained * by compiling the component with {@link Compiler#compileInHost}. * * Use {@link AppViewManager#destroyRootHostView} to destroy the created Component and it's Host * View. * * ### Example * * ``` * @ng.Component({ * selector: 'child-component' * }) * @ng.View({ * template: 'Child' * }) * class ChildComponent { * * } * * @ng.Component({ * selector: 'my-app' * }) * @ng.View({ * template: ` * Parent (<some-component></some-component>) * ` * }) * class MyApp implements OnDestroy { * viewRef: ng.ViewRef; * * constructor(public appViewManager: ng.AppViewManager, compiler: ng.Compiler) { * compiler.compileInHost(ChildComponent).then((protoView: ng.ProtoComponentRef) => { * this.viewRef = appViewManager.createRootHostView(protoView, 'some-component', null); * }) * } * * ngOnDestroy() { * this.appViewManager.destroyRootHostView(this.viewRef); * this.viewRef = null; * } * } * * ng.bootstrap(MyApp); * ``` */ abstract createRootHostView(hostViewFactoryRef: HostViewFactoryRef, overrideSelector: string, injector: Injector, projectableNodes?: any[][]): HostViewRef; /** * Destroys the Host View created via {@link AppViewManager#createRootHostView}. * * Along with the Host View, the Component Instance as well as all nested View and Components are * destroyed as well. */ abstract destroyRootHostView(hostViewRef: HostViewRef); /** * Instantiates an Embedded View based on the {@link TemplateRef `templateRef`} and inserts it * into the View Container specified via `viewContainerLocation` at the specified `index`. * * Returns the {@link ViewRef} for the newly created View. * * This as a low-level way to create and attach an Embedded via to a View Container. Most * applications should used {@link ViewContainerRef#createEmbeddedView} instead. * * Use {@link AppViewManager#destroyViewInContainer} to destroy the created Embedded View. */ // TODO(i): this low-level version of ViewContainerRef#createEmbeddedView doesn't add anything new // we should make it private, otherwise we have two apis to do the same thing. abstract createEmbeddedViewInContainer(viewContainerLocation: ElementRef, index: number, templateRef: TemplateRef): EmbeddedViewRef;<|fim▁hole|> /** * Instantiates a single {@link Component} and inserts its Host View into the View Container * found at `viewContainerLocation`. Within the container, the view will be inserted at position * specified via `index`. * * The component is instantiated using its {@link ProtoViewRef `protoViewRef`} which can be * obtained via {@link Compiler#compileInHost}. * * You can optionally specify `dynamicallyCreatedProviders`, which configure the {@link Injector} * that will be created for the Host View. * * Returns the {@link HostViewRef} of the Host View created for the newly instantiated Component. * * Use {@link AppViewManager#destroyViewInContainer} to destroy the created Host View. */ abstract createHostViewInContainer( viewContainerLocation: ElementRef, index: number, hostViewFactoryRef: HostViewFactoryRef, dynamicallyCreatedProviders: ResolvedProvider[], projectableNodes: any[][]): HostViewRef; /** * Destroys an Embedded or Host View attached to a View Container at the specified `index`. * * The View Container is located via `viewContainerLocation`. */ abstract destroyViewInContainer(viewContainerLocation: ElementRef, index: number); /** * * See {@link AppViewManager#detachViewInContainer}. */ // TODO(i): refactor detachViewInContainer+attachViewInContainer to moveViewInContainer abstract attachViewInContainer(viewContainerLocation: ElementRef, index: number, viewRef: EmbeddedViewRef): EmbeddedViewRef; /** * See {@link AppViewManager#attachViewInContainer}. */ abstract detachViewInContainer(viewContainerLocation: ElementRef, index: number): EmbeddedViewRef; } @Injectable() export class AppViewManager_ extends AppViewManager { private _nextCompTypeId: number = 0; constructor(private _renderer: RootRenderer, private _viewListener: AppViewListener, @Inject(APP_ID) private _appId: string) { super(); } getViewContainer(location: ElementRef_): ViewContainerRef { return location.internalElement.getViewContainerRef(); } getHostElement(hostViewRef: ViewRef_): ElementRef { var hostView = hostViewRef.internalView; if (hostView.proto.type !== ViewType.HOST) { throw new BaseException('This operation is only allowed on host views'); } return hostView.appElements[0].ref; } getNamedElementInComponentView(hostLocation: ElementRef_, variableName: string): ElementRef { var appEl = hostLocation.internalElement; var componentView = appEl.componentView; if (isBlank(componentView)) { throw new BaseException(`There is no component directive at element ${hostLocation}`); } for (var i = 0; i < componentView.appElements.length; i++) { var compAppEl = componentView.appElements[i]; if (StringMapWrapper.contains(compAppEl.proto.directiveVariableBindings, variableName)) { return compAppEl.ref; } } throw new BaseException(`Could not find variable ${variableName}`); } getComponent(hostLocation: ElementRef_): any { return hostLocation.internalElement.getComponent(); } /** @internal */ _createRootHostViewScope: WtfScopeFn = wtfCreateScope('AppViewManager#createRootHostView()'); createRootHostView(hostViewFactoryRef: HostViewFactoryRef_, overrideSelector: string, injector: Injector, projectableNodes: any[][] = null): HostViewRef { var s = this._createRootHostViewScope(); var hostViewFactory = hostViewFactoryRef.internalHostViewFactory; var selector = isPresent(overrideSelector) ? overrideSelector : hostViewFactory.selector; var view = hostViewFactory.viewFactory(this._renderer, this, null, projectableNodes, selector, null, injector); return wtfLeave(s, view.ref); } /** @internal */ _destroyRootHostViewScope: WtfScopeFn = wtfCreateScope('AppViewManager#destroyRootHostView()'); destroyRootHostView(hostViewRef: ViewRef_) { var s = this._destroyRootHostViewScope(); var hostView = hostViewRef.internalView; hostView.renderer.detachView(flattenNestedViewRenderNodes(hostView.rootNodesOrAppElements)); hostView.destroy(); wtfLeave(s); } /** @internal */ _createEmbeddedViewInContainerScope: WtfScopeFn = wtfCreateScope('AppViewManager#createEmbeddedViewInContainer()'); createEmbeddedViewInContainer(viewContainerLocation: ElementRef_, index: number, templateRef: TemplateRef_): EmbeddedViewRef { var s = this._createEmbeddedViewInContainerScope(); var contextEl = templateRef.elementRef.internalElement; var view: AppView = contextEl.embeddedViewFactory(contextEl.parentView.renderer, this, contextEl, contextEl.parentView.projectableNodes, null, null, null); this._attachViewToContainer(view, viewContainerLocation.internalElement, index); return wtfLeave(s, view.ref); } /** @internal */ _createHostViewInContainerScope: WtfScopeFn = wtfCreateScope('AppViewManager#createHostViewInContainer()'); createHostViewInContainer(viewContainerLocation: ElementRef_, index: number, hostViewFactoryRef: HostViewFactoryRef_, dynamicallyCreatedProviders: ResolvedProvider[], projectableNodes: any[][]): HostViewRef { var s = this._createHostViewInContainerScope(); // TODO(tbosch): This should be specifiable via an additional argument! var contextEl = viewContainerLocation.internalElement; var hostViewFactory = hostViewFactoryRef.internalHostViewFactory; var view = hostViewFactory.viewFactory( contextEl.parentView.renderer, contextEl.parentView.viewManager, contextEl, projectableNodes, null, dynamicallyCreatedProviders, null); this._attachViewToContainer(view, viewContainerLocation.internalElement, index); return wtfLeave(s, view.ref); } /** @internal */ _destroyViewInContainerScope = wtfCreateScope('AppViewMananger#destroyViewInContainer()'); destroyViewInContainer(viewContainerLocation: ElementRef_, index: number) { var s = this._destroyViewInContainerScope(); var view = this._detachViewInContainer(viewContainerLocation.internalElement, index); view.destroy(); wtfLeave(s); } /** @internal */ _attachViewInContainerScope = wtfCreateScope('AppViewMananger#attachViewInContainer()'); // TODO(i): refactor detachViewInContainer+attachViewInContainer to moveViewInContainer attachViewInContainer(viewContainerLocation: ElementRef_, index: number, viewRef: ViewRef_): EmbeddedViewRef { var s = this._attachViewInContainerScope(); this._attachViewToContainer(viewRef.internalView, viewContainerLocation.internalElement, index); return wtfLeave(s, viewRef); } /** @internal */ _detachViewInContainerScope = wtfCreateScope('AppViewMananger#detachViewInContainer()'); // TODO(i): refactor detachViewInContainer+attachViewInContainer to moveViewInContainer detachViewInContainer(viewContainerLocation: ElementRef_, index: number): EmbeddedViewRef { var s = this._detachViewInContainerScope(); var view = this._detachViewInContainer(viewContainerLocation.internalElement, index); return wtfLeave(s, view.ref); } /** @internal */ onViewCreated(view: AppView) { this._viewListener.onViewCreated(view); } /** @internal */ onViewDestroyed(view: AppView) { this._viewListener.onViewDestroyed(view); } /** @internal */ createRenderComponentType(encapsulation: ViewEncapsulation, styles: Array<string | any[]>): RenderComponentType { return new RenderComponentType(`${this._appId}-${this._nextCompTypeId++}`, encapsulation, styles); } private _attachViewToContainer(view: AppView, vcAppElement: AppElement, viewIndex: number) { if (view.proto.type === ViewType.COMPONENT) { throw new BaseException(`Component views can't be moved!`); } var nestedViews = vcAppElement.nestedViews; if (nestedViews == null) { nestedViews = []; vcAppElement.nestedViews = nestedViews; } ListWrapper.insert(nestedViews, viewIndex, view); var refNode; if (viewIndex > 0) { var prevView = nestedViews[viewIndex - 1]; refNode = prevView.rootNodesOrAppElements.length > 0 ? prevView.rootNodesOrAppElements[prevView.rootNodesOrAppElements.length - 1] : null; } else { refNode = vcAppElement.nativeElement; } if (isPresent(refNode)) { var refRenderNode; if (refNode instanceof AppElement) { refRenderNode = (<AppElement>refNode).nativeElement; } else { refRenderNode = refNode; } view.renderer.attachViewAfter(refRenderNode, flattenNestedViewRenderNodes(view.rootNodesOrAppElements)); } // TODO: This is only needed when a view is destroyed, // not when it is detached for reordering with ng-for... vcAppElement.parentView.changeDetector.addContentChild(view.changeDetector); vcAppElement.traverseAndSetQueriesAsDirty(); } private _detachViewInContainer(vcAppElement: AppElement, viewIndex: number): AppView { var view = ListWrapper.removeAt(vcAppElement.nestedViews, viewIndex); if (view.proto.type === ViewType.COMPONENT) { throw new BaseException(`Component views can't be moved!`); } vcAppElement.traverseAndSetQueriesAsDirty(); view.renderer.detachView(flattenNestedViewRenderNodes(view.rootNodesOrAppElements)); // TODO: This is only needed when a view is destroyed, // not when it is detached for reordering with ng-for... view.changeDetector.remove(); return view; } }<|fim▁end|>
<|file_name|>complex_inner_join.rs<|end_file_name|><|fim▁begin|>extern crate rustorm; extern crate uuid; extern crate chrono; extern crate rustc_serialize; use uuid::Uuid; use rustorm::query::Query; use rustorm::query::Equality; use rustorm::dao::{Dao, IsDao}; use rustorm::pool::ManagedPool; #[derive(Debug, Clone)] pub struct Photo { pub photo_id: Uuid, pub url: Option<String>, } impl IsDao for Photo{ fn from_dao(dao: &Dao) -> Self { Photo { photo_id: dao.get("photo_id"), url: dao.get_opt("url"), } } fn to_dao(&self) -> Dao { let mut dao = Dao::new(); dao.set("photo_id", &self.photo_id); match self.url { Some(ref _value) => dao.set("url", _value), None => dao.set_null("url"), } dao } } fn main() { let url = "postgres://postgres:p0stgr3s@localhost/bazaar_v6"; let pool = ManagedPool::init(&url, 1).unwrap(); let db = pool.connect().unwrap(); let mut query = Query::select_all(); query.from_table("bazaar.product") .inner_join_table("bazaar.product_category", "product_category.product_id", "product.product_id") .inner_join_table("bazaar.category", "category.category_id", "product_category.category_id") .inner_join_table("product_photo", "product.product_id", "product_photo.product_id") .inner_join_table("bazaar.photo", "product_photo.photo_id", "photo.photo_id") .filter("product.name", Equality::EQ, &"GTX660 Ti videocard") .filter("category.name", Equality::EQ, &"Electronic") .group_by(vec!["category.name"]) .having("count(*)", Equality::GT, &1) .asc("product.name") .desc("product.created"); let frag = query.build(db.as_ref()); let expected = " SELECT * FROM bazaar.product INNER JOIN bazaar.product_category\x20 ON product_category.product_id = product.product_id\x20 INNER JOIN bazaar.category\x20 ON category.category_id = product_category.category_id\x20 INNER JOIN product_photo\x20 ON product.product_id = product_photo.product_id\x20 INNER JOIN bazaar.photo\x20 ON product_photo.photo_id = photo.photo_id\x20 WHERE product.name = $1\x20 AND category.name = $2\x20 GROUP BY category.name\x20 HAVING count(*) > $3\x20 ORDER BY product.name ASC, product.created DESC".to_string(); println!("actual: {{\n{}}} [{}]", frag.sql, frag.sql.len());<|fim▁hole|> println!("expected: {{{}}} [{}]", expected, expected.len()); assert!(frag.sql.trim() == expected.trim()); }<|fim▁end|>
<|file_name|>ui_fiscal_icnfefetuarpagamento.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'ui_fiscal_icnfefetuarpagamento.ui' # # Created: Mon Nov 24 22:25:57 2014 # by: pyside-uic 0.2.15 running on PySide 1.2.2 # # WARNING! All changes made in this file will be lost! from PySide import QtCore, QtGui from pydaruma.pydaruma import iCNFEfetuarPagamento_ECF_Daruma from scripts.fiscal.retornofiscal import tratarRetornoFiscal class Ui_ui_FISCAL_iCNFEfetuarPagamento(QtGui.QWidget): def __init__(self): super(Ui_ui_FISCAL_iCNFEfetuarPagamento, self).__init__() self.setupUi(self) self.pushButtonEnviar.clicked.connect(self.on_pushButtonEnviar_clicked) self.pushButtonCancelar.clicked.connect(self.on_pushButtonCancelar_clicked) def on_pushButtonEnviar_clicked(self): StrFormaPGTO = self.lineEditForma.text() StrValor = self.lineEditValor.text() StrInfo = self.lineEditInfo.text() tratarRetornoFiscal(iCNFEfetuarPagamento_ECF_Daruma(StrFormaPGTO,StrValor,StrInfo), self) def on_pushButtonCancelar_clicked(self): self.close() def setupUi(self, ui_FISCAL_iCNFEfetuarPagamento): ui_FISCAL_iCNFEfetuarPagamento.setObjectName("ui_FISCAL_iCNFEfetuarPagamento") ui_FISCAL_iCNFEfetuarPagamento.resize(531, 123) self.verticalLayout = QtGui.QVBoxLayout(ui_FISCAL_iCNFEfetuarPagamento) self.verticalLayout.setObjectName("verticalLayout") self.gridLayout = QtGui.QGridLayout() self.gridLayout.setObjectName("gridLayout") self.lineEditForma = QtGui.QLineEdit(ui_FISCAL_iCNFEfetuarPagamento) self.lineEditForma.setMaximumSize(QtCore.QSize(100, 16777215)) self.lineEditForma.setObjectName("lineEditForma") self.gridLayout.addWidget(self.lineEditForma, 0, 1, 1, 1) self.labelValor = QtGui.QLabel(ui_FISCAL_iCNFEfetuarPagamento) self.labelValor.setObjectName("labelValor") self.gridLayout.addWidget(self.labelValor, 1, 0, 1, 1) self.lineEditValor = QtGui.QLineEdit(ui_FISCAL_iCNFEfetuarPagamento) self.lineEditValor.setMaximumSize(QtCore.QSize(70, 25)) self.lineEditValor.setObjectName("lineEditValor") self.gridLayout.addWidget(self.lineEditValor, 1, 1, 1, 1) self.labelInformacao = QtGui.QLabel(ui_FISCAL_iCNFEfetuarPagamento) self.labelInformacao.setObjectName("labelInformacao") self.gridLayout.addWidget(self.labelInformacao, 2, 0, 1, 1) self.lineEditInfo = QtGui.QLineEdit(ui_FISCAL_iCNFEfetuarPagamento) self.lineEditInfo.setMinimumSize(QtCore.QSize(401, 20)) self.lineEditInfo.setObjectName("lineEditInfo") self.gridLayout.addWidget(self.lineEditInfo, 2, 1, 1, 1) self.labelForma = QtGui.QLabel(ui_FISCAL_iCNFEfetuarPagamento) self.labelForma.setObjectName("labelForma")<|fim▁hole|> spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem) self.pushButtonEnviar = QtGui.QPushButton(ui_FISCAL_iCNFEfetuarPagamento) self.pushButtonEnviar.setObjectName("pushButtonEnviar") self.horizontalLayout.addWidget(self.pushButtonEnviar) self.pushButtonCancelar = QtGui.QPushButton(ui_FISCAL_iCNFEfetuarPagamento) self.pushButtonCancelar.setObjectName("pushButtonCancelar") self.horizontalLayout.addWidget(self.pushButtonCancelar) spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum) self.horizontalLayout.addItem(spacerItem1) self.verticalLayout.addLayout(self.horizontalLayout) self.retranslateUi(ui_FISCAL_iCNFEfetuarPagamento) QtCore.QMetaObject.connectSlotsByName(ui_FISCAL_iCNFEfetuarPagamento) def retranslateUi(self, ui_FISCAL_iCNFEfetuarPagamento): ui_FISCAL_iCNFEfetuarPagamento.setWindowTitle(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Método iCNFEfetuarPagamento_ECF_Daruma", None, QtGui.QApplication.UnicodeUTF8)) self.lineEditForma.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Dinheiro", None, QtGui.QApplication.UnicodeUTF8)) self.labelValor.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Valor:", None, QtGui.QApplication.UnicodeUTF8)) self.lineEditValor.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "10,00", None, QtGui.QApplication.UnicodeUTF8)) self.labelInformacao.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Informação Adicional:", None, QtGui.QApplication.UnicodeUTF8)) self.lineEditInfo.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Obrigado Volte Sempre! DFW Efetua Forma pagamento com mensagem adicional.", None, QtGui.QApplication.UnicodeUTF8)) self.labelForma.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Forma Pagto:", None, QtGui.QApplication.UnicodeUTF8)) self.pushButtonEnviar.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Enviar", None, QtGui.QApplication.UnicodeUTF8)) self.pushButtonCancelar.setText(QtGui.QApplication.translate("ui_FISCAL_iCNFEfetuarPagamento", "Cancelar", None, QtGui.QApplication.UnicodeUTF8))<|fim▁end|>
self.gridLayout.addWidget(self.labelForma, 0, 0, 1, 1) self.verticalLayout.addLayout(self.gridLayout) self.horizontalLayout = QtGui.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout")
<|file_name|>sys3_process.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- import win32process import win32api import win32con import ctypes import os, sys, string TH32CS_SNAPPROCESS = 0x00000002 class PROCESSENTRY32(ctypes.Structure): _fields_ = [("dwSize", ctypes.c_ulong), ("cntUsage", ctypes.c_ulong), ("th32ProcessID", ctypes.c_ulong), ("th32DefaultHeapID", ctypes.c_ulong), ("th32ModuleID", ctypes.c_ulong), ("cntThreads", ctypes.c_ulong), ("th32ParentProcessID", ctypes.c_ulong), ("pcPriClassBase", ctypes.c_ulong), ("dwFlags", ctypes.c_ulong), ("szExeFile", ctypes.c_char * 260)] def getProcList(): CreateToolhelp32Snapshot = ctypes.windll.kernel32.CreateToolhelp32Snapshot Process32First = ctypes.windll.kernel32.Process32First Process32Next = ctypes.windll.kernel32.Process32Next CloseHandle = ctypes.windll.kernel32.CloseHandle hProcessSnap = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0) pe32 = PROCESSENTRY32() pe32.dwSize = ctypes.sizeof(PROCESSENTRY32) if Process32First(hProcessSnap,ctypes.byref(pe32)) == False: return while True: yield pe32 if Process32Next(hProcessSnap,ctypes.byref(pe32)) == False: break CloseHandle(hProcessSnap) def GetProcessModules( pid ): handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, False, pid ) hModule = win32process.EnumProcessModules(handle) temp=[] for i in hModule: temp.append([hex(i),debugfile(win32process.GetModuleFileNameEx(handle,i))]) win32api.CloseHandle(handle) <|fim▁hole|> def CloseProcess( pid ): handle = win32api.OpenProcess(win32con.PROCESS_ALL_ACCESS, False, pid ) exitcode = win32process.GetExitCodeProcess( handle ) win32api.TerminateProcess(handle, exitcode) win32api.CloseHandle(handle) def debugfile(file): if (file.split("\\")[-1]=="smss.exe"): file = "C:\\WINDOWS\\system32\\smss.exe" return file elif (file.split("\\")[-1]=="csrss.exe"): file = "C:\\WINDOWS\\system32\\csrss.exe" return file elif (file.split("\\")[-1]=="winlogon.exe"): file = "C:\\WINDOWS\\system32\\winlogon.exe" return file else: return file if __name__ =='__main__': #调用procup.dll的enableDebugPriv函数对本进程提权 procupdll=ctypes.cdll.LoadLibrary("InjectAssist.dll") self_pid = procupdll.GetPIDbyName('services.exe') print self_pid if procupdll.EnableOpenprocPriv()==0: print "提权失败" count = 0 procList = getProcList() for proc in procList: count+=1 print("name=%s\tfather=%d\tid=%d" % (proc.szExeFile, proc.th32ParentProcessID, proc.th32ProcessID)) try: TempGet=GetProcessModules(proc.th32ProcessID) except Exception, e: print "pid:%d can't read"%(proc.th32ProcessID) continue #TempGet[0][1].split("\\")[-1] 路径的最后一部分 #''' #枚举进程调用所有模块 for tempnum in range(0,len(TempGet)): try: print TempGet except Exception,e: print e #''' print "进程数:%d"%(count)<|fim▁end|>
return temp
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>// rscheme -- a scheme interpreter written in Rust // Copyright (C) {2015) Elizabeth Henry <[email protected]> // This program is free software; you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation; either version 2 of the License, or // (at your option) any later version. #[macro_use] extern crate log; mod lexer; mod read; mod eval; mod expr; mod init; #[cfg(test)] mod tests; use lexer::Token; use lexer::Lexer; use std::io::{self,BufRead}; use std::io::Write; fn repl() { let mut c = eval::Context::new(); c = c.eval_file("data/init.scm"); let stdin = io::stdin(); let mut stdout = io::stdout(); let mut tokens:Vec<Token> = vec!(); let mut n_par = 0; loop { if n_par == 0 { print!("=> "); let r = stdout.flush(); match r { Ok(_) => (), Err(_) => { error!("Error flushing stdout. abort"); break; } } } <|fim▁hole|> { let mut l = Lexer::new(&cs,&mut tokens); l.with_n_par(n_par); n_par = l.tokenize(); } if n_par == 0 { let es = read::read(&tokens); tokens = vec!(); for e in es { c = c.eval_expr(e.clone()); if c.error { c.error = false; break; } else { println!("{}", c.expr); info!("{:?}", c.env) } } } else { continue; } } } fn main() { init::init(); repl(); }<|fim▁end|>
let mut line = String::new(); stdin.lock().read_line(&mut line).unwrap(); let cs = line.chars().collect();
<|file_name|>models.py<|end_file_name|><|fim▁begin|>import datetime from django.db import models, transaction class Key(models.Model): uid = models.CharField(max_length=255, unique=True) name = models.TextField() created = models.DateTimeField(default=datetime.datetime.utcnow) class Meta: ordering = ('-created',) get_latest_by = 'created' def __unicode__(self): return u"pk=%d uid=%r name=%r" % ( self.pk, self.uid, self.name, ) def save(self, *args, **kwargs): created = not self.pk <|fim▁hole|> transaction.on_commit(lambda: update_or_create_key.delay(self.uid))<|fim▁end|>
super(Key, self).save(*args, **kwargs) if created: from .tasks import update_or_create_key
<|file_name|>shardcounter_sync.py<|end_file_name|><|fim▁begin|>import random from google.appengine.api import memcache from google.appengine.ext import ndb SHARD_KEY_TEMPLATE = 'shard-{}-{:d}' class GeneralCounterShardConfig(ndb.Model): num_shards = ndb.IntegerProperty(default=20) @classmethod def all_keys(cls, name): config = cls.get_or_insert(name) shard_key_strings = [SHARD_KEY_TEMPLATE.format(name, index) for index in range(config.num_shards)] return [ndb.Key(GeneralCounterShard, shard_key_string) for shard_key_string in shard_key_strings] class GeneralCounterShard(ndb.Model): count = ndb.IntegerProperty(default=0) def get_count(name): total = memcache.get(name) if total is None: total = 0 parent_key = ndb.Key('ShardCounterParent', name) shard_query = GeneralCounterShard.query(ancestor=parent_key) shard_counters = shard_query.fetch(limit=None) for counter in shard_counters: if counter is not None: total += counter.count memcache.add(name, total, 7200) # 2 hours to expire return total def increment(name): config = GeneralCounterShardConfig.get_or_insert(name) return _increment(name, config.num_shards) @ndb.transactional def _increment(name, num_shards): index = random.randint(0, num_shards - 1) shard_key_string = SHARD_KEY_TEMPLATE.format(name, index) parent_key = ndb.Key('ShardCounterParent', name) counter = GeneralCounterShard.get_by_id(shard_key_string, parent = parent_key) <|fim▁hole|> counter.count += 1 counter.put() rval = memcache.incr(name) # Memcache increment does nothing if the name is not a key in memcache if rval is None: return get_count(name) return rval @ndb.transactional def increase_shards(name, num_shards): config = GeneralCounterShardConfig.get_or_insert(name) if config.num_shards < num_shards: config.num_shards = num_shards config.put()<|fim▁end|>
if counter is None: counter = GeneralCounterShard(parent = parent_key, id=shard_key_string)
<|file_name|>cpuid.rs<|end_file_name|><|fim▁begin|>use std::str; use std::slice; use std::mem; use byteorder::{LittleEndian, WriteBytesExt}; const VENDOR_INFO: u32 = 0x0; const FEATURE_INFO: u32 = 0x1; const EXT_FEATURE_INFO: u32 = 0x7; const EXT_PROCESSOR_INFO: u32 = 0x80000001; #[cfg(target_arch = "x86_64")] pub fn cpuid(func: u32) -> CpuIdInfo { let (rax, rbx, rcx, rdx); unsafe { asm!("cpuid" : // output operands "={rax}"(rax), "={rbx}"(rbx), "={rcx}"(rcx), "={rdx}"(rdx) : // input operands "{rax}"(func), "{rcx}"(0 as u32) : // clobbers : // options ); } CpuIdInfo { rax: rax, rbx: rbx, rcx: rcx, rdx: rdx } } // Rename to something better pub struct CpuIdInfo { pub rax: u32, pub rbx: u32, pub rcx: u32, pub rdx: u32, } pub struct CpuId { pub highest_func_param: u32, pub vendor_info: CpuIdInfo, pub feature_info: CpuIdInfo, pub ext_feature_info: CpuIdInfo, pub ext_processor_info: CpuIdInfo } impl CpuId { pub fn detect() -> CpuId { CpuId { highest_func_param: cpuid(VENDOR_INFO).rax, vendor_info: cpuid(VENDOR_INFO), feature_info: cpuid(FEATURE_INFO), ext_feature_info: cpuid(EXT_FEATURE_INFO), ext_processor_info: cpuid(EXT_PROCESSOR_INFO) } } } #[test] fn test_usage() { let v = cpuid(VENDOR_INFO); let mut wtr: Vec<u8> = vec![]; wtr.write_u32::<LittleEndian>(v.rbx).unwrap(); wtr.write_u32::<LittleEndian>(v.rdx).unwrap();<|fim▁hole|>}<|fim▁end|>
wtr.write_u32::<LittleEndian>(v.rcx).unwrap(); let string = String::from_utf8(wtr).unwrap(); assert!(string == "AuthenticAMD" || string == "GenuineIntel")
<|file_name|>Mark.java<|end_file_name|><|fim▁begin|>package domain; import java.util.Date; /** * @author Verbroucht Johann * Test Java Date : 15 aožt. 2011 */ public class Mark { private Date date; private int mark; public Mark(Date _date, int _mark) { this.date = _date; this.mark = _mark; } public Date getDate() { return date; } public void setDate(Date date) { this.date = date; } <|fim▁hole|> return mark; } public void setMark(int mark) { this.mark = mark; } }<|fim▁end|>
public int getMark() {
<|file_name|>16.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { Mpeg16 as default } from "../../";
<|file_name|>RemoveLogs.cpp<|end_file_name|><|fim▁begin|>// Mantid Repository : https://github.com/mantidproject/mantid // // Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, // NScD Oak Ridge National Laboratory, European Spallation Source // & Institut Laue - Langevin // SPDX - License - Identifier: GPL - 3.0 + //---------------------------------------------------------------------- // Includes //---------------------------------------------------------------------- #include "MantidDataHandling/RemoveLogs.h" #include "MantidAPI/FileProperty.h"<|fim▁hole|>#include "MantidKernel/Glob.h" #include "MantidKernel/PropertyWithValue.h" #include "MantidKernel/Strings.h" #include "MantidKernel/TimeSeriesProperty.h" #include <Poco/DateTimeFormat.h> #include <Poco/DateTimeParser.h> #include <Poco/DirectoryIterator.h> #include <Poco/File.h> #include <Poco/Path.h> #include <boost/algorithm/string.hpp> #include <algorithm> #include <fstream> // used to get ifstream #include <sstream> namespace Mantid { namespace DataHandling { // Register the algorithm into the algorithm factory DECLARE_ALGORITHM(RemoveLogs) using namespace Kernel; using namespace API; using DataObjects::Workspace2D_sptr; /// Empty default constructor RemoveLogs::RemoveLogs() {} /// Initialisation method. void RemoveLogs::init() { // When used as a Child Algorithm the workspace name is not used - hence the // "Anonymous" to satisfy the validator declareProperty( make_unique<WorkspaceProperty<MatrixWorkspace>>("Workspace", "Anonymous", Direction::InOut), "The name of the workspace to which the log data will be removed"); declareProperty( make_unique<ArrayProperty<std::string>>("KeepLogs", Direction::Input), "List(comma separated) of logs to be kept"); } /** Executes the algorithm. Reading in log file(s) * * @throw Mantid::Kernel::Exception::FileError Thrown if file is not *recognised to be a raw datafile or log file * @throw std::runtime_error Thrown with Workspace problems */ void RemoveLogs::exec() { // Get the input workspace and retrieve run from workspace. // the log file(s) will be loaded into the run object of the workspace const MatrixWorkspace_sptr localWorkspace = getProperty("Workspace"); const std::vector<Mantid::Kernel::Property *> &logData = localWorkspace->run().getLogData(); std::vector<std::string> keepLogs = getProperty("KeepLogs"); std::vector<std::string> logNames; logNames.reserve(logData.size()); for (const auto property : logData) { logNames.push_back(property->name()); } for (const auto &name : logNames) { auto location = std::find(keepLogs.cbegin(), keepLogs.cend(), name); if (location == keepLogs.cend()) { localWorkspace->mutableRun().removeLogData(name); } } // operation was a success and ended normally return; } } // namespace DataHandling } // namespace Mantid<|fim▁end|>
#include "MantidDataObjects/Workspace2D.h" #include "MantidKernel/ArrayProperty.h"
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from os import path <|fim▁hole|> def get_html_theme_path(): """Return list of HTML theme paths.""" cur_dir = path.abspath(path.dirname(path.dirname(__file__))) return cur_dir def setup(app): app.add_html_theme( 'divio_docs_theme', path.abspath(path.dirname(__file__)) )<|fim▁end|>
__version__ = '0.0.22' __version_full__ = __version__
<|file_name|>builtin.torrent.onstart.settorrentlabels.py<|end_file_name|><|fim▁begin|>from torrentstatus.plugin import iTorrentAction from torrentstatus.utorrent.connection import Connection from contextlib import contextmanager from torrentstatus.bearlang import BearLang from torrentstatus.settings import config, labels_config from torrentstatus.utils import intTryParse @contextmanager def utorrent_connection(host, username, password): try: conn = Connection(host, username, password).utorrent(None) except Exception as err: yield None, err else: try: yield conn, None finally: pass def get_new_torrent_labels(labels, args): """Transforms torrent labels and args passing them into a BearLang Instance Parameters: labels (Dict) A dict of label and rules for that label args (Dict) A dict of arguments, will be passed to Bearlang Returns: a list of labels that match the rules defined. """ new_labels = [] for label, ruleset in labels.items(): # multiple rules accepted when configparser uses MultiOrderedDict rules = ruleset.split("\n") for rule in rules: rule = rule.strip() parser = BearLang(rule, args) is_match = parser.execute() print("\nrule:{0}, label:{1}, ismatch: {2}\n".format(rule, label, is_match)) if is_match: new_labels.append(label) return new_labels settings = config.getSettingsAsDict() <|fim▁hole|> class SetLabelsOnStart(iTorrentAction): def onstart(self, pluginconfig, utorrentargs): tempargs = vars(utorrentargs) # Use labels definition from config file and match them up against # provided input to the main script labels = labels_config.getSettingsAsDict() new_labels = get_new_torrent_labels(labels, tempargs) #only connect to utorrent if we need to do a label change if new_labels and intTryParse(settings["webui_enable"]) == 1: with utorrent_connection(settings["webui_host"], settings["webui_username"], settings["webui_password"]) as (conn, err): if err: print("Could not connect to webui, make sure webui_host, " "webui_username and webui_password is correctly " "defined in configuration file. Error:{0}".format(err)) else: print("Connection to utorrent web ui ok") print ("Got torrent '{0}' with hash {1} and tracker {2}. \n Setting new_labels: {3}" .format(utorrentargs.torrentname, utorrentargs.hash, utorrentargs.tracker, new_labels)) if utorrentargs.debug: print("debug mode on, not doing update") return #remove existing label conn.torrent_set_props([{utorrentargs.hash: {'label': ''}}]) #set new labels for new_label in new_labels: conn.torrent_set_props([{utorrentargs.hash: {'label': new_label}}]) return True else: print("Not trying to connect to webui") return False<|fim▁end|>
<|file_name|>cs2quiz1.py<|end_file_name|><|fim▁begin|>#40/40 #Part 1: Terminology (15 points) --> 15/15 #1 1pt) What is the symbol "=" used for? #to assign and store values to and in variables # 1pt # #2 3pts) Write a technical definition for 'function' #a named sequence of calculations which takes input and returns output # 3pts # #3 1pt) What does the keyword "return" do? #it gives back the output or result of the function # 1pt # #4 5pts) We know 5 basic data types. Write the name for each one and provide two # examples of each below # 1: integer ex: 1, 2 # 2: floating point ex: 1.2, 1.3 # 3: string ex: "hi", "hello" # 4: boolean ex: True, False # 5: tuple ex: ("HEllo", 3), ("Bob", 10, "fat") # 5pts # #5 2pts) What is the difference between a "function definition" and a # "function call"? #a function definition does not result in any output being presented, it simply defines a set of calculations which are run if and only if they are called by a function call # 2pts # #<|fim▁hole|># 2:processing (the program does something with those input values to for instance calculate something) # 3:output (the program returns the product of its labours (processing) often a something printed # 3pts # #Part 2: Programming (25 points) --> 25/25 #Write a program that asks the user for the areas of 3 circles. #It should then calculate the diameter of each and the sum of the diameters #of the 3 circles. #Finally, it should produce output like this: #Circle Diameter #c1 ... #c2 ... #c3 ... #TOTALS ... # Hint: Radius is the square root of the area divided by pi (a = pi(r)^2) so r = sqrt(a/pi) import math #1 pt for header line #3 pt for correct formula #1 pt for return value #1 pt for parameter name #1 pt for function name def circarea_to_diameter(circarea): return 2 * (math.sqrt(circarea/math.pi)) #finds radius and multiples by 2 to get diameter def sum_three(x, y, z): #takes three values and adds them return x + y + z #1pt for header line #1pt for parameter names #1pt for return value #1pt for correct output format #3pt for correct use of format function def output(d1, d2, d3, total): return """ Circle Diameter C1 {} C2 {} C3 {} Totals {} """.format(d1, d2, d3, total) #1pt header line #1pt getting input #1pt converting input #1pt for calling output function #2pt for correct diameter formula #1pt for variable names def main(): #input C1 = raw_input("Area of C1: ") C2 = raw_input("Area of C2: ") C3 = raw_input("Area of C3: ") #processing d1 = circarea_to_diameter(float(C1)) d2 = circarea_to_diameter(float(C2)) d3 = circarea_to_diameter(float(C3)) total = sum_three(d1, d2, d3) #output print output(d1, d2, d3, total) #1pt for calling main main() #1pt explanatory comments #1pt code format<|fim▁end|>
#6 3pts) What are the 3 phases that every computer program has? What happens in # each of them # 1:input (the program takes some input values, most often from the user)
<|file_name|>devops_borat.py<|end_file_name|><|fim▁begin|>from random import choice from feedparser import parse from errbot import botcmd, BotPlugin class DevOpsBorat(BotPlugin): """ Quotes from various dev humour related twitter accounts """ @botcmd def borat(self, mess, args): """ Random quotes from the DEVOPS_BORAT twitter account """ myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=DEVOPS_BORAT') items = myfeed['entries']<|fim▁hole|> @botcmd def jesus(self, mess, args): """ Random quotes from the devops_jesus twitter account """ myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=devops_jesus') items = myfeed['entries'] return choice(items).description @botcmd def yoda(self, mess, args): """ Random quotes from the UXYoda twitter account """ myfeed = parse('http://api.twitter.com/1/statuses/user_timeline.rss?screen_name=UXYoda') items = myfeed['entries'] return choice(items).description<|fim▁end|>
return choice(items).description
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages import sys, os<|fim▁hole|>used for publishing the summary statistics of Grape, a pipeline used for processing and analyzing RNA-Seq data.""" setup(name='raisin.restyler', version=version, description="A package used in the Raisin web application", long_description=long_description, classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Natural Language :: English', 'Topic :: Software Development :: Libraries :: Python Modules', 'Operating System :: POSIX :: Linux'], keywords='RNA-Seq pipeline ngs transcriptome bioinformatics ETL', author='Maik Roder', author_email='[email protected]', url='http://big.crg.cat/services/grape', license='GPL', packages=find_packages(exclude=['ez_setup', 'examples', 'tests']), namespace_packages = ['raisin'], package_data = {'raisin.restyler':['templates/*.pt']}, include_package_data=True, zip_safe=False, install_requires=[ # -*- Extra requirements: -*- 'configobj', 'zope.pagetemplate' ], entry_points=""" # -*- Entry points: -*- """, )<|fim▁end|>
version = '1.3' long_description = """The raisin.restyler package is a part of Raisin, the web application
<|file_name|>css_parser.cpp<|end_file_name|><|fim▁begin|>/* Copyright (C) 2003-2013 by Kristina Simpson <[email protected]> This software is provided 'as-is', without any express or implied warranty. In no event will the authors be held liable for any damages arising from the use of this software. Permission is granted to anyone to use this software for any purpose, including commercial applications, and to alter it and redistribute it freely, subject to the following restrictions: 1. The origin of this software must not be misrepresented; you must not claim that you wrote the original software. If you use this software in a product, an acknowledgment in the product documentation would be appreciated but is not required. 2. Altered source versions must be plainly marked as such, and must not be misrepresented as being the original software. 3. This notice may not be removed or altered from any source distribution. */ #include "asserts.hpp" #include "css_parser.hpp" #include "css_properties.hpp" #include "unit_test.hpp" namespace css { namespace { // rules class AtRule : public Token { public: AtRule(const std::string& name) : Token(TokenId::AT_RULE_TOKEN), name_(name) {} std::string toString() const override { std::ostringstream ss; for(auto& p : getParameters()) { ss << " " << p->toString(); } return formatter() << "@" << name_ << "(" << ss.str() << ")"; } private: std::string name_; }; class RuleToken : public Token { public: RuleToken() : Token(TokenId::RULE_TOKEN) {} std::string toString() const override { std::ostringstream ss; for(auto& p : getParameters()) { ss << " " << p->toString(); } return formatter() << "QualifiedRule(" << ss.str() << ")"; } private: }; class BlockToken : public Token { public: BlockToken() : Token(TokenId::BLOCK_TOKEN) {} explicit BlockToken(const std::vector<TokenPtr>& params) : Token(TokenId::BLOCK_TOKEN) { addParameters(params); } std::string toString() const override { std::ostringstream ss; for(auto& p : getParameters()) { ss << " " << p->toString(); } return formatter() << "BlockToken(" << ss.str() << ")"; } variant value() override { return variant(); } private: }; class SelectorToken : public Token { public: SelectorToken() : Token(TokenId::SELECTOR_TOKEN) {} std::string toString() const override { std::ostringstream ss; for(auto& p : getParameters()) { ss << " " << p->toString(); } return formatter() << "Selector(" << ss.str() << ")"; }; }; class DeclarationParser { public: DeclarationParser(Tokenizer::const_iterator begin, Tokenizer::const_iterator end) : it_(begin), end_(end), pp_() { while(isToken(TokenId::WHITESPACE)) { advance(); } if(isToken(TokenId::IDENT)) { parseDeclarationList(&pp_); } else if(isToken(TokenId::BLOCK_TOKEN)) { auto old_it = it_; auto old_end = end_; it_ = (*old_it)->getParameters().begin(); end_ = (*old_it)->getParameters().end(); parseDeclarationList(&pp_); it_ = old_it; end_ = old_end; advance(); } else if(isToken(TokenId::LBRACE)) { advance(); parseDeclarationList(&pp_); } else if(isToken(TokenId::EOF_TOKEN)) { throw ParserError("expected block declaration"); } } static PropertyList parseTokens(const std::vector<TokenPtr>& tokens) { //std::vector<TokenPtr> toks = preProcess(tokens.begin(), tokens.end()); DeclarationParser p(tokens.begin(), tokens.end()); return p.getProperties(); } PropertyList getProperties() { return pp_.getPropertyList(); } static std::vector<TokenPtr> preProcess(Tokenizer::const_iterator it, Tokenizer::const_iterator end) { std::vector<TokenPtr> res; while(it != end) { auto tok = *it; if(tok->id() == TokenId::FUNCTION) { auto fn_token = tok; ++it; bool done = false; while(!done && it != end) { tok = *it; if(tok->id() == TokenId::EOF_TOKEN || tok->id() == TokenId::RPAREN || tok->id() == TokenId::SEMICOLON) { ++it; done = true; } else { // this is a cut-down fn_token->addParameter(tok); ++it; } } res.emplace_back(fn_token); } else { res.emplace_back(tok); ++it; } } return res; } private: PropertyParser pp_; void advance(int n = 1) { if(it_ == end_) { return; } it_ += n; } bool isToken(TokenId value) { if(it_ == end_ ) { return value == TokenId::EOF_TOKEN ? true : false; } return (*it_)->id() == value; } bool isNextToken(TokenId value) { auto next = it_+1; if(next == end_) { return false; } return (*next)->id() == value; } void parseDeclarationList(PropertyParser* pp) { while(true) { while(isToken(TokenId::WHITESPACE)) { advance(); } if(isToken(TokenId::RBRACE)) { advance(); return; } if(isToken(TokenId::EOF_TOKEN) || it_ == end_) { return; } try { parseDeclaration(pp); } catch (ParserError& e) { LOG_ERROR("Dropping declaration: " << e.what()); while(!isToken(TokenId::SEMICOLON) && !isToken(TokenId::RBRACE) && !isToken(TokenId::EOF_TOKEN)) { advance(); } } while(isToken(TokenId::WHITESPACE)) { advance(); } if(isToken(TokenId::SEMICOLON)) { advance(); } else if(!isToken(TokenId::RBRACE) && !isToken(TokenId::EOF_TOKEN)) { throw ParserError("Expected semicolon."); } } } void parseDeclaration(PropertyParser* pp) { // assume first token is ident std::string property = (*it_)->getStringValue(); advance(); while(isToken(TokenId::WHITESPACE)) { advance(); } if(!isToken(TokenId::COLON)) { throw ParserError(formatter() << "Expected ':' in declaration, while parsing property: " << property); } advance(); while(isToken(TokenId::WHITESPACE)) { advance(); } // check for 'inherit' which is common to all properties if(isToken(TokenId::IDENT) && (*it_)->getStringValue() == "inherit") { advance(); pp->inheritProperty(property); } else { it_ = pp->parse(property, it_, end_); } while(isToken(TokenId::WHITESPACE)) { advance(); } if(isTokenDelimiter("!")) { advance(); while(isToken(TokenId::WHITESPACE)) { advance(); } if(isToken(TokenId::IDENT)) { const std::string ref = (*it_)->getStringValue(); advance(); if(ref == "important") { // add important tag to the rule in plist. // XXX this should apply to only the last member added! for(auto& pl : pp->getPropertyList()) { pl.second.style->setImportant(true); } } } } } bool isTokenDelimiter(const std::string& ch) { return isToken(TokenId::DELIM) && (*it_)->getStringValue() == ch; } std::vector<TokenPtr>::const_iterator it_; std::vector<TokenPtr>::const_iterator end_; }; } Parser::Parser(StyleSheetPtr ss, const std::vector<TokenPtr>& tokens) : style_sheet_(ss), tokens_(tokens), token_(tokens_.begin()), end_(tokens_.end()) { } void Parser::parse(StyleSheetPtr ss, const std::string& str) { css::Tokenizer tokens(str); Parser p(ss, tokens.getTokens()); p.init(); } TokenId Parser::currentTokenType() { if(token_ == end_) { return TokenId::EOF_TOKEN; } return (*token_)->id(); } void Parser::advance(int n) { if(token_ != end_) { std::advance(token_, n); } } std::vector<TokenPtr> Parser::pasrseRuleList(int level) { std::vector<TokenPtr> rules; while(true) { if(currentTokenType() == TokenId::WHITESPACE) { advance(); continue; } else if(currentTokenType() == TokenId::EOF_TOKEN) { return rules; } else if(currentTokenType() == TokenId::CDO || currentTokenType() == TokenId::CDC) { if(level == 0) { advance(); continue; } rules.emplace_back(parseQualifiedRule()); } else if(currentTokenType() == TokenId::AT) { rules.emplace_back(parseAtRule()); } else { rules.emplace_back(parseQualifiedRule()); } } return rules; } TokenPtr Parser::parseAtRule() { variant value = (*token_)->value(); auto rule = std::make_shared<AtRule>(value.as_string()); advance(); while(true) { if(currentTokenType() == TokenId::SEMICOLON || currentTokenType() == TokenId::EOF_TOKEN) { return rule; } else if(currentTokenType() == TokenId::LBRACE) { advance(); rule->addParameters(parseBraceBlock()); } else if(currentTokenType() == TokenId::LPAREN) { advance(); rule->addParameters(parseParenBlock()); } else if(currentTokenType() == TokenId::LBRACKET) { advance(); rule->addParameters(parseBracketBlock()); } } return nullptr; } TokenPtr Parser::parseQualifiedRule() { auto rule = std::make_shared<RuleToken>(); while(true) { if(currentTokenType() == TokenId::EOF_TOKEN) { LOG_ERROR("EOF token while parsing qualified rule prelude."); return nullptr; } else if(currentTokenType() == TokenId::LBRACE) { advance(); rule->setValue(std::make_shared<BlockToken>(parseBraceBlock())); return rule; } else { rule->addParameter(parseComponentValue()); } } return nullptr; } PropertyList Parser::parseDeclarationList(const std::string& str) { css::Tokenizer tokens(str); Parser p(nullptr, tokens.getTokens()); return DeclarationParser::parseTokens(p.parseBraceBlock()); } StylePtr Parser::parseSingleDeclaration(const std::string& str) { css::Tokenizer tokens(str); Parser p(nullptr, tokens.getTokens()); auto plist = DeclarationParser::parseTokens(p.parseBraceBlock()); if(plist.empty()) {<|fim▁hole|> } return plist.begin()->second.style; } TokenPtr Parser::parseComponentValue() { if(currentTokenType() == TokenId::LBRACE) { advance(); return std::make_shared<BlockToken>(parseBraceBlock()); } else if(currentTokenType() == TokenId::FUNCTION) { return parseFunction(); } auto tok = *token_; advance(); return tok; } std::vector<TokenPtr> Parser::parseBraceBlock() { std::vector<TokenPtr> res; while(true) { if(currentTokenType() == TokenId::EOF_TOKEN || currentTokenType() == TokenId::RBRACE) { advance(); return res; } else { res.emplace_back(parseComponentValue()); } } return res; } std::vector<TokenPtr> Parser::parseParenBlock() { std::vector<TokenPtr> res; res.emplace_back(*token_); while(true) { if(currentTokenType() == TokenId::EOF_TOKEN || currentTokenType() == TokenId::RPAREN) { advance(); return res; } else { res.emplace_back(parseComponentValue()); } } return res; } std::vector<TokenPtr> Parser::parseBracketBlock() { std::vector<TokenPtr> res; res.emplace_back(*token_); while(true) { if(currentTokenType() == TokenId::EOF_TOKEN || currentTokenType() == TokenId::RBRACKET) { advance(); return res; } else { res.emplace_back(parseComponentValue()); } } return res; } TokenPtr Parser::parseFunction() { auto fn_token = *token_; advance(); while(true) { if(currentTokenType() == TokenId::EOF_TOKEN || currentTokenType() == TokenId::RPAREN) { advance(); return fn_token; } else { fn_token->addParameter(parseComponentValue()); } } return fn_token; } void Parser::init() { for(auto& token : pasrseRuleList(0)) { try { parseRule(token); } catch(ParserError& e) { LOG_DEBUG("Dropping rule: " << e.what() << " " << (token != nullptr ? token->toString() : "")); } } } void Parser::parseRule(TokenPtr rule) { if(rule == nullptr) { throw ParserError("Trying to parse empty rule."); } auto prelude = rule->getParameters().begin(); while((*prelude)->id() == TokenId::WHITESPACE) { ++prelude; } if((*prelude)->id() == TokenId::AT_RULE_TOKEN) { // parse at rule // XXX temporarily skip @ rules. //while(!(*prelude)->isToken(TokenId::SEMICOLON) && !(*prelude)->isToken(TokenId::RBRACE) && prelude != rule->getPrelude().end()) { //} ASSERT_LOG(false, "fix @ rules."); } else { CssRulePtr css_rule = std::make_shared<CssRule>(); css_rule->selectors = Selector::parseTokens(rule->getParameters()); css_rule->declaractions = DeclarationParser::parseTokens(rule->getValue()->getParameters()); // Go through the properties and mark any that need to be handled with transitions //css_rule->declaractions.markTransitions(); style_sheet_->addRule(css_rule); } } } UNIT_TEST(css_declarations) { css::PropertyList pl = css::Parser::parseDeclarationList("color: rgb(100%,0,0);"); CHECK_EQ(pl.hasProperty(css::Property::COLOR), true); /*pl = css::Parser::parseDeclarationList("background: rgb(128,64,64) url(radial_gradient.png) repeat; color: rgb(128,255,128);"); CHECK_EQ(pl.hasProperty(css::Property::COLOR), true); CHECK_EQ(pl.hasProperty(css::Property::BACKGROUND_IMAGE), true); CHECK_EQ(pl.hasProperty(css::Property::BACKGROUND_COLOR), true); CHECK_EQ(pl.hasProperty(css::Property::BACKGROUND_REPEAT), true);*/ pl = css::Parser::parseDeclarationList("color: #ff0 !important; font-family: 'Arial'; color: hsl(360,0,0)"); CHECK_EQ(pl.hasProperty(css::Property::COLOR), true); CHECK_EQ(pl.hasProperty(css::Property::FONT_FAMILY), true); pl = css::Parser::parseDeclarationList("background: linear-gradient(45deg, blue, red)"); CHECK_EQ(pl.hasProperty(css::Property::BACKGROUND_IMAGE), true); }<|fim▁end|>
return nullptr;
<|file_name|>View.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2015-present, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @providesModule View * @flow */ 'use strict'; const EdgeInsetsPropType = require('EdgeInsetsPropType'); const NativeMethodsMixin = require('NativeMethodsMixin'); const NativeModules = require('NativeModules'); const Platform = require('Platform'); const React = require('React'); const ReactNativeStyleAttributes = require('ReactNativeStyleAttributes'); const ReactNativeViewAttributes = require('ReactNativeViewAttributes'); const StyleSheetPropType = require('StyleSheetPropType'); const ViewStylePropTypes = require('ViewStylePropTypes'); const invariant = require('fbjs/lib/invariant'); const { AccessibilityComponentTypes, AccessibilityTraits, } = require('ViewAccessibility'); var TVViewPropTypes = {}; if (Platform.isTVOS) { TVViewPropTypes = require('TVViewPropTypes'); } const requireNativeComponent = require('requireNativeComponent'); const PropTypes = React.PropTypes; const stylePropType = StyleSheetPropType(ViewStylePropTypes); const forceTouchAvailable = (NativeModules.IOSConstants && NativeModules.IOSConstants.forceTouchAvailable) || false; const statics = { AccessibilityTraits, AccessibilityComponentType: AccessibilityComponentTypes, /** * Is 3D Touch / Force Touch available (i.e. will touch events include `force`) * @platform ios */ forceTouchAvailable, }; /** * The most fundamental component for building a UI, `View` is a container that supports layout with * [flexbox](docs/flexbox.html), [style](docs/style.html), * [some touch handling](docs/handling-touches.html), and * [accessibility](docs/accessibility.html) controls. `View` maps directly to the * native view equivalent on whatever platform React Native is running on, whether that is a * `UIView`, `<div>`, `android.view`, etc. * * `View` is designed to be nested inside other views and can have 0 to many children of any type. * * This example creates a `View` that wraps two colored boxes and a text component in a row with * padding. * * ```javascript * class ViewColoredBoxesWithText extends Component { * render() { * return ( * <View style={{flexDirection: 'row', height: 100, padding: 20}}> * <View style={{backgroundColor: 'blue', flex: 0.3}} /> * <View style={{backgroundColor: 'red', flex: 0.5}} /> * <Text>Hello World!</Text> * </View> * ); * } * } * ``` * * > `View`s are designed to be used with [`StyleSheet`](docs/style.html) for clarity * > and performance, although inline styles are also supported. * * ### Synthetic Touch Events * * For `View` responder props (e.g., `onResponderMove`), the synthetic touch event passed to them * are of the following form: * * - `nativeEvent` * - `changedTouches` - Array of all touch events that have changed since the last event. * - `identifier` - The ID of the touch. * - `locationX` - The X position of the touch, relative to the element. * - `locationY` - The Y position of the touch, relative to the element. * - `pageX` - The X position of the touch, relative to the root element. * - `pageY` - The Y position of the touch, relative to the root element. * - `target` - The node id of the element receiving the touch event. * - `timestamp` - A time identifier for the touch, useful for velocity calculation. * - `touches` - Array of all current touches on the screen. */ const View = React.createClass({ // TODO: We should probably expose the mixins, viewConfig, and statics publicly. For example, // one of the props is of type AccessibilityComponentType. That is defined as a const[] above, // but it is not rendered by the docs, since `statics` below is not rendered. So its Possible // values had to be hardcoded. mixins: [NativeMethodsMixin], /** * `NativeMethodsMixin` will look for this when invoking `setNativeProps`. We * make `this` look like an actual native component class. */ viewConfig: { uiViewClassName: 'RCTView', validAttributes: ReactNativeViewAttributes.RCTView }, statics: { ...statics, }, propTypes: { ...TVViewPropTypes, /** * When `true`, indicates that the view is an accessibility element. By default, * all the touchable elements are accessible. */ accessible: PropTypes.bool, /** * Overrides the text that's read by the screen reader when the user interacts * with the element. By default, the label is constructed by traversing all the * children and accumulating all the `Text` nodes separated by space. */ accessibilityLabel: PropTypes.node, /** * Indicates to accessibility services to treat UI component like a * native one. Works for Android only. * * Possible values are one of: * * - `'none'` * - `'button'` * - `'radiobutton_checked'` * - `'radiobutton_unchecked'` * * @platform android */ accessibilityComponentType: PropTypes.oneOf(AccessibilityComponentTypes), /** * Indicates to accessibility services whether the user should be notified * when this view changes. Works for Android API >= 19 only. * Possible values: * * - `'none'` - Accessibility services should not announce changes to this view. * - `'polite'`- Accessibility services should announce changes to this view. * - `'assertive'` - Accessibility services should interrupt ongoing speech to immediately announce changes to this view. * * See the [Android `View` docs](http://developer.android.com/reference/android/view/View.html#attr_android:accessibilityLiveRegion) * for reference. * * @platform android */ accessibilityLiveRegion: PropTypes.oneOf([ 'none', 'polite', 'assertive', ]), /** * Controls how view is important for accessibility which is if it * fires accessibility events and if it is reported to accessibility services * that query the screen. Works for Android only. * * Possible values: * * - `'auto'` - The system determines whether the view is important for accessibility - * default (recommended). * - `'yes'` - The view is important for accessibility. * - `'no'` - The view is not important for accessibility. * - `'no-hide-descendants'` - The view is not important for accessibility, * nor are any of its descendant views. * * See the [Android `importantForAccessibility` docs](http://developer.android.com/reference/android/R.attr.html#importantForAccessibility) * for reference. * * @platform android */ importantForAccessibility: PropTypes.oneOf([ 'auto', 'yes', 'no', 'no-hide-descendants', ]), /** * Provides additional traits to screen reader. By default no traits are * provided unless specified otherwise in element. * * You can provide one trait or an array of many traits. * * Possible values for `AccessibilityTraits` are: * * - `'none'` - The element has no traits. * - `'button'` - The element should be treated as a button. * - `'link'` - The element should be treated as a link. * - `'header'` - The element is a header that divides content into sections. * - `'search'` - The element should be treated as a search field. * - `'image'` - The element should be treated as an image. * - `'selected'` - The element is selected. * - `'plays'` - The element plays sound. * - `'key'` - The element should be treated like a keyboard key. * - `'text'` - The element should be treated as text. * - `'summary'` - The element provides app summary information. * - `'disabled'` - The element is disabled. * - `'frequentUpdates'` - The element frequently changes its value. * - `'startsMedia'` - The element starts a media session. * - `'adjustable'` - The element allows adjustment over a range of values. * - `'allowsDirectInteraction'` - The element allows direct touch interaction for VoiceOver users. * - `'pageTurn'` - Informs VoiceOver that it should scroll to the next page when it finishes reading the contents of the element. * * See the [Accessibility guide](docs/accessibility.html#accessibilitytraits-ios) * for more information. * * @platform ios */ accessibilityTraits: PropTypes.oneOfType([ PropTypes.oneOf(AccessibilityTraits), PropTypes.arrayOf(PropTypes.oneOf(AccessibilityTraits)), ]), /** * When `accessible` is true, the system will try to invoke this function * when the user performs accessibility tap gesture. */ onAccessibilityTap: PropTypes.func, /** * When `accessible` is `true`, the system will invoke this function when the * user performs the magic tap gesture. */ onMagicTap: PropTypes.func, /** * Used to locate this view in end-to-end tests. * * > This disables the 'layout-only view removal' optimization for this view! */ testID: PropTypes.string, /** * For most touch interactions, you'll simply want to wrap your component in * `TouchableHighlight` or `TouchableOpacity`. Check out `Touchable.js`, * `ScrollResponder.js` and `ResponderEventPlugin.js` for more discussion. */ /** * The View is now responding for touch events. This is the time to highlight and show the user * what is happening. * * `View.props.onResponderGrant: (event) => {}`, where `event` is a synthetic touch event as * described above. */ onResponderGrant: PropTypes.func, /** * The user is moving their finger. * * `View.props.onResponderMove: (event) => {}`, where `event` is a synthetic touch event as * described above. */ onResponderMove: PropTypes.func, /** * Another responder is already active and will not release it to that `View` asking to be * the responder. * * `View.props.onResponderReject: (event) => {}`, where `event` is a synthetic touch event as * described above. */ onResponderReject: PropTypes.func, /** * Fired at the end of the touch. * * `View.props.onResponderRelease: (event) => {}`, where `event` is a synthetic touch event as * described above. */ onResponderRelease: PropTypes.func, /** * The responder has been taken from the `View`. Might be taken by other views after a call to * `onResponderTerminationRequest`, or might be taken by the OS without asking (e.g., happens * with control center/ notification center on iOS) * * `View.props.onResponderTerminate: (event) => {}`, where `event` is a synthetic touch event as * described above. */ onResponderTerminate: PropTypes.func, /** * Some other `View` wants to become responder and is asking this `View` to release its * responder. Returning `true` allows its release. * * `View.props.onResponderTerminationRequest: (event) => {}`, where `event` is a synthetic touch * event as described above. */ onResponderTerminationRequest: PropTypes.func, /** * Does this view want to become responder on the start of a touch? * * `View.props.onStartShouldSetResponder: (event) => [true | false]`, where `event` is a * synthetic touch event as described above. */ onStartShouldSetResponder: PropTypes.func, /** * If a parent `View` wants to prevent a child `View` from becoming responder on a touch start, * it should have this handler which returns `true`. * * `View.props.onStartShouldSetResponderCapture: (event) => [true | false]`, where `event` is a * synthetic touch event as described above. */ onStartShouldSetResponderCapture: PropTypes.func, /** * Does this view want to "claim" touch responsiveness? This is called for every touch move on * the `View` when it is not the responder. * * `View.props.onMoveShouldSetResponder: (event) => [true | false]`, where `event` is a * synthetic touch event as described above. */ onMoveShouldSetResponder: PropTypes.func, /** * If a parent `View` wants to prevent a child `View` from becoming responder on a move, * it should have this handler which returns `true`. * * `View.props.onMoveShouldSetResponderCapture: (event) => [true | false]`, where `event` is a * synthetic touch event as described above. */ onMoveShouldSetResponderCapture: PropTypes.func, /** * This defines how far a touch event can start away from the view. * Typical interface guidelines recommend touch targets that are at least * 30 - 40 points/density-independent pixels. * * For example, if a touchable view has a height of 20 the touchable height can be extended to * 40 with `hitSlop={{top: 10, bottom: 10, left: 0, right: 0}}` * * > The touch area never extends past the parent view bounds and the Z-index * > of sibling views always takes precedence if a touch hits two overlapping * > views. */ hitSlop: EdgeInsetsPropType, /** * Invoked on mount and layout changes with: * * `{nativeEvent: { layout: {x, y, width, height}}}` * * This event is fired immediately once the layout has been calculated, but * the new layout may not yet be reflected on the screen at the time the * event is received, especially if a layout animation is in progress. */ onLayout: PropTypes.func, /** * Controls whether the `View` can be the target of touch events. * * - `'auto'`: The View can be the target of touch events. * - `'none'`: The View is never the target of touch events. * - `'box-none'`: The View is never the target of touch events but it's * subviews can be. It behaves like if the view had the following classes * in CSS: * ``` * .box-none { * pointer-events: none; * } * .box-none * { * pointer-events: all; * } * ``` * - `'box-only'`: The view can be the target of touch events but it's * subviews cannot be. It behaves like if the view had the following classes * in CSS: * ``` * .box-only { * pointer-events: all; * } * .box-only * { * pointer-events: none; * } * ``` * > Since `pointerEvents` does not affect layout/appearance, and we are * > already deviating from the spec by adding additional modes, we opt to not * > include `pointerEvents` on `style`. On some platforms, we would need to * > implement it as a `className` anyways. Using `style` or not is an * > implementation detail of the platform. */ pointerEvents: PropTypes.oneOf([ 'box-none', 'none', 'box-only', 'auto', ]), style: stylePropType, /** * This is a special performance property exposed by `RCTView` and is useful * for scrolling content when there are many subviews, most of which are * offscreen. For this property to be effective, it must be applied to a * view that contains many subviews that extend outside its bound. The * subviews must also have `overflow: hidden`, as should the containing view * (or one of its superviews). */ removeClippedSubviews: PropTypes.bool, /** * Whether this `View` should render itself (and all of its children) into a * single hardware texture on the GPU. * * On Android, this is useful for animations and interactions that only * modify opacity, rotation, translation, and/or scale: in those cases, the * view doesn't have to be redrawn and display lists don't need to be * re-executed. The texture can just be re-used and re-composited with<|fim▁hole|> * memory, so this prop should be set back to false at the end of the * interaction/animation. * * @platform android */ renderToHardwareTextureAndroid: PropTypes.bool, /** * Whether this `View` should be rendered as a bitmap before compositing. * * On iOS, this is useful for animations and interactions that do not * modify this component's dimensions nor its children; for example, when * translating the position of a static view, rasterization allows the * renderer to reuse a cached bitmap of a static view and quickly composite * it during each frame. * * Rasterization incurs an off-screen drawing pass and the bitmap consumes * memory. Test and measure when using this property. * * @platform ios */ shouldRasterizeIOS: PropTypes.bool, /** * Views that are only used to layout their children or otherwise don't draw * anything may be automatically removed from the native hierarchy as an * optimization. Set this property to `false` to disable this optimization and * ensure that this `View` exists in the native view hierarchy. * * @platform android */ collapsable: PropTypes.bool, /** * Whether this `View` needs to rendered offscreen and composited with an alpha * in order to preserve 100% correct colors and blending behavior. The default * (`false`) falls back to drawing the component and its children with an alpha * applied to the paint used to draw each element instead of rendering the full * component offscreen and compositing it back with an alpha value. This default * may be noticeable and undesired in the case where the `View` you are setting * an opacity on has multiple overlapping elements (e.g. multiple overlapping * `View`s, or text and a background). * * Rendering offscreen to preserve correct alpha behavior is extremely * expensive and hard to debug for non-native developers, which is why it is * not turned on by default. If you do need to enable this property for an * animation, consider combining it with renderToHardwareTextureAndroid if the * view **contents** are static (i.e. it doesn't need to be redrawn each frame). * If that property is enabled, this View will be rendered off-screen once, * saved in a hardware texture, and then composited onto the screen with an alpha * each frame without having to switch rendering targets on the GPU. * * @platform android */ needsOffscreenAlphaCompositing: PropTypes.bool, }, contextTypes: { isInAParentText: React.PropTypes.bool, }, render: function() { invariant( !(this.context.isInAParentText && Platform.OS === 'android'), 'Nesting of <View> within <Text> is not supported on Android.'); // WARNING: This method will not be used in production mode as in that mode we // replace wrapper component View with generated native wrapper RCTView. Avoid // adding functionality this component that you'd want to be available in both // dev and prod modes. return <RCTView {...this.props} />; }, }); const RCTView = requireNativeComponent('RCTView', View, { nativeOnly: { nativeBackgroundAndroid: true, nativeForegroundAndroid: true, } }); if (__DEV__) { const UIManager = require('UIManager'); const viewConfig = UIManager.viewConfigs && UIManager.viewConfigs.RCTView || {}; for (const prop in viewConfig.nativeProps) { const viewAny: any = View; // Appease flow if (!viewAny.propTypes[prop] && !ReactNativeStyleAttributes[prop]) { throw new Error( 'View is missing propType for native prop `' + prop + '`' ); } } } let ViewToExport = RCTView; if (__DEV__) { ViewToExport = View; } else { Object.assign(RCTView, statics); } module.exports = ViewToExport;<|fim▁end|>
* different parameters. The downside is that this can use up limited video
<|file_name|>ob.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # coding:utf-8 class Ob(object): def __init__(self, *args, **kwds): for i in args: self.__dict__.update(args) self.__dict__.update(kwds) def __getattr__(self, name): return self.__dict__.get(name, '') def __setattr__(self, name, value): if value is not None: self.__dict__[name] = value def __delattr__(self, name): if name in self.__dict__: del self.__dict__[name] def __repr__(self): return self.__dict__.__repr__() __getitem__ = __getattr__ __delitem__ = __delattr__ __setitem__ = __setattr__ def __len__(self): return self.__dict__.__len__() def __iter__(self): for k, v in self.__dict__.items(): yield k, v def __contains__(self, name): return self.__dict__.__contains__(name) def __eq__(self, other): return self.__dict__ == other.__dict__ class StripOb(Ob): def __init__(self, *args, **kwds): super(StripJsOb, self).__init__(*args, **kwds) d = self.__dict__ for k, v in d.items(): if isinstance(v, str): if "\n" not in v: _v = v.strip() if _v != v: d[k] = _v if __name__ == '__main__': ob1 = Ob(a=1, b=2) # ob1.xx = None # print(ob1.__dict__) # del ob1.a # print(ob1.__dict__) # o = Ob(a='张沈鹏') # print(o)<|fim▁hole|> # print(k, v) # print(dict) # print(dict(iter(o)))<|fim▁end|>
# for k, v in o:
<|file_name|>mouseCursor.ts<|end_file_name|><|fim▁begin|>/*---------------------------------------------------------------------------------------------<|fim▁hole|> * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for license information. *--------------------------------------------------------------------------------------------*/ import 'vs/css!./mouseCursor'; export const MOUSE_CURSOR_TEXT_CSS_CLASS_NAME = `monaco-mouse-cursor-text`;<|fim▁end|>
<|file_name|>root.js<|end_file_name|><|fim▁begin|>import { create } from 'ember-cli-page-object'; import leadershipCollapsed from 'ilios-common/page-objects/components/leadership-collapsed'; import overview from './overview'; import header from './header'; import leadershipExpanded from './leadership-expanded';<|fim▁hole|> const definition = { scope: '[data-test-program-details]', header, overview, leadershipCollapsed, leadershipExpanded, }; export default definition; export const component = create(definition);<|fim▁end|>
<|file_name|>plugin_args.py<|end_file_name|><|fim▁begin|>import argparse import ui.output <|fim▁hole|> def help_format_cloudcredgrab(prog): kwargs = dict() kwargs['width'] = ui.output.columns() kwargs['max_help_position'] = 34 format = argparse.HelpFormatter(prog, **kwargs) return (format) def parse(args): parser = argparse.ArgumentParser(prog="cloudcredgrab", add_help=False, usage=argparse.SUPPRESS) parser.formatter_class = help_format_cloudcredgrab parser.add_argument('-u', '--username', metavar="<USER>", default=None) parser.add_argument('platform') options = vars(parser.parse_args(args))<|fim▁end|>
<|file_name|>dgen_test_output.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # # Copyright (c) 2012 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # """ Responsible for generating the testing decoders based on parsed table representations. """ # This file generates testing code for our class decoder. The decoder # tables are specifically written to minimize the number of decoder # classes needed to parse valid ARM instructions. For testing, this is # a problem. We can't (easily) tell if the intended instruction rules # of ARM are being met, since there is not a one-to-one mapping from # class decoders to rules. # # For example, consider the following two rows (from armv7.table): # # | 0011x - = Binary4RegisterShiftedOp => Defs12To15RdRnRsRmNotPc # Rsb_Rule_144_A1_P288 # cccc0000011snnnnddddssss0tt1mmmm # RegsNotPc # | 0100x - = Binary4RegisterShiftedOp => Defs12To15RdRnRsRmNotPc # Add_Rule_7_A1_P26 # cccc0000100snnnnddddssss0tt1mmmm # RegsNotPc # # Both rows state to return a Binary4RegisterShiftedOp class decoder. # The sequence of four symbols correspond to (in order presented): # # baseline - The name of the class decoder that should be used for testing. # actual - The name of the class decoder to use in sel_ldr # rule - A unique name identifying the rule from the manual that # defines what the selected class decoder is to decode. # pattern - The sequence of bits defines by the rule (above) # constraints - Any additional constraints assumed by the rule. # # All but the baseline is optional. The remaining fields provide # additional documentation and information for testing (which is used # by this file). If the actual is not specified (prefixed by '=>') # then it is assumed to have the same value as the baseline. # # If these two rows had a mergable bit pattern (which they do not), # these rows would still not mergable since the actions are # different. However, for sel_ldr, they both state to use a # Binary4RegisterShiftedOp. The remaining identifiers are added data # for testing only. # # We fix this by defining a notion of "action_filter" where one can # choose to keep only those fields that are applicable. For sel_ldr, # it's only 'actual'. For testing, it will include other fields, # depending on the context. # # Note: The current ARM instruction table has both new and old # actions. Old actions only define the 'InstClass' entry. If the # remaining fields are omitted, the corresponding testing for those # entries are omitted. # # Note: See dgen_decoder_output.py for more details on how we build a # decoder for sel_ldr. # # For testing, we would like to know the specific instruction rule # that was being tested. Further, we would like to know what # instruction rule was chosen for each decoder class selection made by # the parse tables. To do this, we do two levels of wrapping. # # This file generates a set of wrapper classes, each a subclass of # NamedClassDecoder. One is generated for each InstClass needed by # sel_ldr (i.e. only the 'actual' field). These named classes correspond # to what sel_ldr will select. # # The named version of each named InstClass is: # # class NamedInstClass : public NamedClassDecoder { # public: # NamedInstClass() # : NamedClassDecoder(decoder_, "InstClass") # {} # # private: # Binary3RegisterShiftedTest decoder_; # NACL_DISALLOW_COPY_AND_ASSIGN(NamedInstClass); #}; # # This makes sure that each decoder class can be identified using a # separate class decoder. For rows without rules, the corresponding # named class 'NamedInstClass' will be used. If a row also has # a rule, the 'NamedInstClass' is converted to 'NamedRuleInstClass' where # 'Rule' is the name of the rule. # # The base class for NamedClassDecoder is specified in # "named_class_decoder.h". This file defines a class that takes a # ClassDecoder (reference) C and a print name NAME, and builds a # corresponding ClassDecoder that acts like C, but will print out # NAME. The behaviour of C is maintained by dispatching each virtual # on the NamedClassDecoder to the corresponding virtual on C. # # We then define the class decoder Decoder, by defining a derived # instance of DecoderState as follows: # # class NamedDecoder : DecoderState { # public: # explicit NamedDecoder(); # const NamedClassDecoder& decode_named(const Instruction) const; # virtual const ClassDecoder& decode(const Instruction) const; # ... # }; # # The method decode is the expected API for the NamedDecoder, which is # an instance of DecoderState (defined in decode.h). The method # decode_named is the same, but returns NamedClassDecoder's so that # good error messages can be generated by the test harnesses for # ClassDecoder's (see decoder_tester.h for more details on # ClassDecoder test harnesses). # # To the NamedDecoder, we add a constant field NamedClassDecoder for # each possible class decoder method decode_named could return, or # that we could use in automatically generated tests. These fields # allow us to only create the corresponding decoder classes once # (during constructor initialization). # # Finally, we add a method corresponding to each defined decoder # table. The forms of these decoders is: # # inline const NamedClassDecoder& decode_TABLE( # const nacl_arm_dec::Instruction inst) const; # # Each of these methods are defined as inline methods so that they can # be optimized away in the corresponding top level methods (i.e. # decode_named and decode). # # For testing, there are three files generated: # # decoder_named_classes.h # decoder_named_decoder.h # decoder_named.cc # decoder_tests.cc # # File decoder_named_classes.h defines the class declarations for the # generated Rule classes, and named class decoder classes. File # decoder_named_decoder.h defines the decoder class NamedDecoder # (discussed above). decoder_named.cc contains the corresponding # implementations of the constructors and methods of these classes. # # decoder_tests.cc generates an automatic test harness executable, # that will test each instruction Rule. Each test generates all # possible matches the the corresponding Pattern of the table rule, # and calls the corresponding tester associated with the class decoder # of that row. By default, the tester is presumed to be named. # # InstClassTester # # If the row defines a Constraints identifier, then the tester # # InstClassTesterConstraints # # is used instead. import dgen_core import dgen_opt import dgen_output import dgen_decoder import dgen_actuals import dgen_baselines """The current command line arguments to use""" _cl_args = {} # The following defines naming conventions used for identifiers. # Note: DECODER will be replaced by 'actual' and 'baseline', defining # how both types of symbols are generated. CLASS = '%(DECODER)s_%(rule)s' NAMED_CLASS = 'Named%(DECODER)s_%(rule)s' INSTANCE = '%(DECODER_class)s_instance_' BASE_TESTER='%(decoder_base)sTester%(base_test_case)s' BASE_BASE_TESTER='%(decoder_base)sTester%(qualifier)s' DECODER_TESTER='%(baseline)sTester_%(test_case)s' def _safety_to_check(safety): return [s for s in safety if not isinstance(s, str)] def _interesting_patterns(patterns): """ Filters out non-interesting patterns.""" # Only include rows not corresponding to rule pattern, # and not always true. return [ p for p in patterns if ( (not p.column or p.column.name() != '$pattern') and not p.matches_any())] def _install_action(decoder, action, values): """Install common names needed to generate code for the given action, and adds it to the values map. """ # This code is somewhat inefficient in that most cases, most of the # added strings are not needed. On the other hand, by having a # single routine that generates all action specific names at one # spot, it is much easier to change definitions. values['baseline'] = action.baseline() values['actual'] = action.actual() values['decoder_base'] = decoder.base_class(values['baseline']) values['rule'] = action.rule() values['qualifier'] = ''.join([s for s in action.safety() if isinstance(s, str)]) if action.constraints(): values['qualifier'] += (action.constraints().other if action.constraints().other else '') else: values['qualifier'] ='' values['pattern'] = action.pattern() # Add dummies for row cases, in case not set up. See # function _install_row_cases) for more details on these fields. for field in [ 'base_test_case', 'test_case', 'test_pattern' ]: if not values.get(field): values[field] = '' values['baseline_class'] = _decoder_replace(CLASS, 'baseline') % values values['actual_class'] = _decoder_replace(CLASS, 'actual') % values _install_baseline_and_actuals('named_DECODER_class', NAMED_CLASS, values) _install_baseline_and_actuals('DECODER_instance', INSTANCE, values) values['base_tester'] = BASE_TESTER % values values['base_base_tester'] = BASE_BASE_TESTER % values values['decoder_tester'] = DECODER_TESTER % values def _decoder_replace(string, basis): return string.replace('DECODER', basis) def _install_key_pattern(key, pattern, basis, values): # Replace DECODER in key and pattern with basis, then # install into values. values[_decoder_replace(key, basis)] = ( _decoder_replace(pattern, basis) % values) def _install_baseline_and_actuals(key, pattern, values): # Replace DECODER with 'baseline' and 'actual', apply it # to the key and pattern, and then install into values. for basis in ['baseline', 'actual']: _install_key_pattern(key, pattern, basis, values) def _generate_baseline_and_actual(code, symbol, decoder, values, out, actions=['rule']): """ Generates code to define the given symbol. Does so for both baseline and actual decoders, filtering using actions. code - The code to generate. symbol - The symbol being defined. decoder - The decoder (tables) to use. values - The name map to use to generate code. actions - The fields to keep when generating code. """ generated_symbols = set() # Generate one for each type of basline decoder. baseline_actions = actions[:] baseline_actions.insert(0, 'baseline'); baseline_code = _decoder_replace(code, 'baseline') baseline_symbol = _decoder_replace(symbol, 'baseline'); for d in decoder.action_filter(baseline_actions).decoders(): _install_action(decoder, d, values); sym_name = (baseline_symbol % values) if sym_name not in generated_symbols: out.write(baseline_code % values) generated_symbols.add(sym_name) # Generate one for each actual type that is different than the # baseline. actual_actions = actions[:] actual_actions.insert(0, 'actual-not-baseline') actual_code = _decoder_replace(code, 'actual') actual_symbol = _decoder_replace(symbol, 'actual') for d in decoder.action_filter(actual_actions).decoders(): # Note: 'actual-not-baseline' sets actual to None if same as baseline. if d.actual(): _install_action(decoder, d, values); sym_name = (actual_symbol % values) if sym_name not in generated_symbols: out.write(actual_code % values) generated_symbols.add(sym_name) # Defines the header for decoder_bases.h NAMED_BASES_H_HEADER="""%(FILE_HEADER)s %(NOT_TCB_MESSAGE)s #ifndef %(IFDEF_NAME)s #define %(IFDEF_NAME)s #include "native_client/src/trusted/validator_arm/actual_classes.h" #include "native_client/src/trusted/validator_arm/baseline_classes.h" #include "native_client/src/trusted/validator_arm/named_class_decoder.h" #include "%(FILENAME_BASE)s_baselines.h" namespace nacl_arm_test { """ GENERATED_BASELINE_HEADER=""" /* * Define named class decoders for each automatically generated baseline * decoder. */ """ NAMED_GEN_BASE_DECLARE="""class Named%(gen_base)s : public NamedClassDecoder { public: Named%(gen_base)s() : NamedClassDecoder(decoder_, "%(gen_base)s") {} private: nacl_arm_dec::%(gen_base)s decoder_; NACL_DISALLOW_COPY_AND_ASSIGN(Named%(gen_base)s); }; """ NAMED_BASES_H_FOOTER=""" } // namespace nacl_arm_test #endif // %(IFDEF_NAME)s """ NAMED_BASES_H_SUFFIX = '_named_bases.h' def generate_named_bases_h(decoder, decoder_name, filename, out, cl_args): """Defines named classes needed for testing generated baselines. Args: tables: list of Table objects to process. decoder_name: The name of the decoder state to build. filename: The (localized) name for the .h file. out: a COutput object to write to. cl_args: A dictionary of additional command line arguments. """ global _cl_args if not decoder.primary: raise Exception('No tables provided.') assert filename.endswith(NAMED_BASES_H_SUFFIX) _cl_args = cl_args decoder = dgen_baselines.AddBaselinesToDecoder(decoder) values = { 'FILE_HEADER': dgen_output.HEADER_BOILERPLATE, 'NOT_TCB_MESSAGE' : dgen_output.NOT_TCB_BOILERPLATE, 'IFDEF_NAME' : dgen_output.ifdef_name(filename), 'FILENAME_BASE': filename[:-len(NAMED_BASES_H_SUFFIX)], 'decoder_name': decoder_name, } out.write(NAMED_BASES_H_HEADER % values) _generate_generated_baseline(decoder, out) out.write(NAMED_BASES_H_FOOTER % values) def _generate_generated_baseline(decoder, out): """ Generates code to define the given symbol. Does so for the generated baseline decoders, filtering using actions. """ generated_symbols = set() values = {} out.write(GENERATED_BASELINE_HEADER % values) for d in decoder.action_filter(['generated_baseline']).decoders(): gen_base = d.find('generated_baseline') if gen_base and gen_base not in generated_symbols: values['gen_base'] = gen_base out.write(NAMED_GEN_BASE_DECLARE % values) generated_symbols.add(gen_base) # Defines the header for decoder_named_classes.h NAMED_CLASSES_H_HEADER="""%(FILE_HEADER)s %(NOT_TCB_MESSAGE)s #ifndef %(IFDEF_NAME)s #define %(IFDEF_NAME)s #include "native_client/src/trusted/validator_arm/actual_classes.h" #include "native_client/src/trusted/validator_arm/baseline_classes.h" #include "native_client/src/trusted/validator_arm/named_class_decoder.h" #include "%(FILENAME_BASE)s_actuals.h" #include "%(FILENAME_BASE)s_named_bases.h" """ RULE_CLASSES_HEADER=""" /* * Define rule decoder classes. */ namespace nacl_arm_dec { """ RULE_CLASS="""class %(DECODER_class)s : public %(DECODER)s { }; """ RULE_CLASS_SYM="%(DECODER_class)s" NAMED_DECODERS_HEADER="""} // nacl_arm_dec namespace nacl_arm_test { /* * Define named class decoders for each class decoder. * The main purpose of these classes is to introduce * instances that are named specifically to the class decoder * and/or rule that was used to parse them. This makes testing * much easier in that error messages use these named classes * to clarify what row in the corresponding table was used * to select this decoder. Without these names, debugging the * output of the test code would be nearly impossible */ """ NAMED_CLASS_DECLARE="""class %(named_DECODER_class)s : public NamedClassDecoder { public: %(named_DECODER_class)s() : NamedClassDecoder(decoder_, "%(DECODER)s %(rule)s") {} private: nacl_arm_dec::%(DECODER_class)s decoder_; NACL_DISALLOW_COPY_AND_ASSIGN(%(named_DECODER_class)s); }; """ NAMED_CLASS_DECLARE_SYM="%(named_DECODER_class)s" NAMED_CLASSES_H_FOOTER=""" // Defines the default parse action if the table doesn't define // an action. class NotImplementedNamed : public NamedClassDecoder { public: NotImplementedNamed() : NamedClassDecoder(decoder_, "not implemented") {} private: nacl_arm_dec::NotImplemented decoder_; NACL_DISALLOW_COPY_AND_ASSIGN(NotImplementedNamed); }; } // namespace nacl_arm_test #endif // %(IFDEF_NAME)s """ def generate_named_classes_h(decoder, decoder_name, filename, out, cl_args): """Defines named classes needed for decoder testing. Args: tables: list of Table objects to process. decoder_name: The name of the decoder state to build. filename: The (localized) name for the .h file. out: a COutput object to write to. cl_args: A dictionary of additional command line arguments. """ global _cl_args if not decoder.primary: raise Exception('No tables provided.') assert filename.endswith('_named_classes.h') _cl_args = cl_args # Generate actuals from descriptions in tables, for each of the # tables that should automatically generate the corresponding # needed actual class decoders. actuals = cl_args.get('auto-actual') if actuals: decoder = dgen_actuals.AddAutoActualsToDecoder(decoder, actuals) values = { 'FILE_HEADER': dgen_output.HEADER_BOILERPLATE, 'NOT_TCB_MESSAGE' : dgen_output.NOT_TCB_BOILERPLATE, 'IFDEF_NAME' : dgen_output.ifdef_name(filename), 'FILENAME_BASE': filename[:-len('_named_classes.h')], 'decoder_name': decoder_name, } out.write(NAMED_CLASSES_H_HEADER % values) out.write(RULE_CLASSES_HEADER) _generate_baseline_and_actual(RULE_CLASS, RULE_CLASS_SYM, decoder, values, out) out.write(NAMED_DECODERS_HEADER) _generate_baseline_and_actual(NAMED_CLASS_DECLARE, NAMED_CLASS_DECLARE_SYM, decoder, values, out) out.write(NAMED_CLASSES_H_FOOTER % values) NAMED_DECODER_H_HEADER="""%(FILE_HEADER)s %(NOT_TCB_MESSAGE)s #ifndef %(IFDEF_NAME)s #define %(IFDEF_NAME)s #include "native_client/src/trusted/validator_arm/decode.h" #include "%(FILENAME_BASE)s_named_classes.h" #include "native_client/src/trusted/validator_arm/named_class_decoder.h" namespace nacl_arm_test { // Defines a (named) decoder class selector for instructions class Named%(decoder_name)s : nacl_arm_dec::DecoderState { public: explicit Named%(decoder_name)s(); // Parses the given instruction, returning the named class // decoder to use. const NamedClassDecoder& decode_named( const nacl_arm_dec::Instruction) const; // Parses the given instruction, returning the class decoder // to use. virtual const nacl_arm_dec::ClassDecoder& decode( const nacl_arm_dec::Instruction) const; // The following fields define the set of class decoders // that can be returned by the API function "decode_named". They // are created once as instance fields, and then returned // by the table methods above. This speeds up the code since // the class decoders need to only be bulit once (and reused // for each call to "decode_named").""" DECODER_STATE_FIELD=""" const %(named_DECODER_class)s %(DECODER_instance)s;""" DECODER_STATE_FIELD_NAME="%(named_DECODER_class)s" DECODER_STATE_DECODER_COMMENTS=""" private: // The following list of methods correspond to each decoder table, // and implements the pattern matching of the corresponding bit // patterns. After matching the corresponding bit patterns, they // either call other methods in this list (corresponding to another // decoder table), or they return the instance field that implements // the class decoder that should be used to decode the particular // instruction.""" DECODER_STATE_DECODER=""" inline const NamedClassDecoder& decode_%(table)s( const nacl_arm_dec::Instruction inst) const;""" NAMED_DECODER_H_FOOTER=""" // Defines default action if parse tables don't define what action // to take. const NotImplementedNamed not_implemented_; }; } // namespace nacl_arm_test #endif // %(IFDEF_NAME)s """ def generate_named_decoder_h(decoder, decoder_name, filename, out, cl_args): """Generates the named decoder for testing. Args: tables: list of Table objects to process. decoder_name: The name of the decoder state to build. filename: The (localized) name for the .h file. out: a COutput object to write to. cl_args: A dictionary of additional command line arguments. """ global _cl_args if not decoder.primary: raise Exception('No tables provided.') assert filename.endswith('_named_decoder.h') _cl_args = cl_args # Generate actuals from descriptions in tables, for each of the # tables that should automatically generate the corresponding # needed actual class decoders. actuals = cl_args.get('auto-actual') if actuals: decoder = dgen_actuals.AddAutoActualsToDecoder(decoder, actuals) values = { 'FILE_HEADER': dgen_output.HEADER_BOILERPLATE, 'NOT_TCB_MESSAGE' : dgen_output.NOT_TCB_BOILERPLATE, 'IFDEF_NAME' : dgen_output.ifdef_name(filename), 'FILENAME_BASE': filename[:-len('_named_decoder.h')], 'decoder_name': decoder_name, } out.write(NAMED_DECODER_H_HEADER % values) _generate_baseline_and_actual(DECODER_STATE_FIELD, DECODER_STATE_FIELD_NAME, decoder, values, out) out.write(DECODER_STATE_DECODER_COMMENTS) for table in decoder.tables(): values['table'] = table.name out.write(DECODER_STATE_DECODER % values) out.write(NAMED_DECODER_H_FOOTER % values) # Defines the source for DECODER_named.cc NAMED_CC_HEADER="""%(FILE_HEADER)s %(NOT_TCB_MESSAGE)s #include "%(FILENAME_BASE)s_decoder.h" using nacl_arm_dec::ClassDecoder; using nacl_arm_dec::Instruction; namespace nacl_arm_test { Named%(decoder_name)s::Named%(decoder_name)s() {} """ PARSE_TABLE_METHOD_HEADER=""" /* * Implementation of table %(table_name)s. * Specified by: %(citation)s */ const NamedClassDecoder& Named%(decoder_name)s::decode_%(table_name)s( const nacl_arm_dec::Instruction inst) const { """ METHOD_HEADER_TRACE=""" fprintf(stderr, "decode %(table_name)s\\n"); """ METHOD_DISPATCH_BEGIN=""" if (%s""" METHOD_DISPATCH_CONTINUE=""" && %s""" METHOD_DISPATCH_END=") {""" METHOD_DISPATCH_TRACE=""" fprintf(stderr, "count = %s\\n");""" PARSE_TABLE_METHOD_ROW=""" return %(action)s; """ METHOD_DISPATCH_CLOSE=""" } """ PARSE_TABLE_METHOD_FOOTER=""" // Catch any attempt to fall through... return not_implemented_; } """ NAMED_CC_FOOTER=""" const NamedClassDecoder& Named%(decoder_name)s:: decode_named(const nacl_arm_dec::Instruction inst) const { return decode_%(entry_table_name)s(inst); } const nacl_arm_dec::ClassDecoder& Named%(decoder_name)s:: decode(const nacl_arm_dec::Instruction inst) const { return decode_named(inst).named_decoder(); } } // namespace nacl_arm_test """ def generate_named_cc(decoder, decoder_name, filename, out, cl_args): """Implementation of the test decoder in .cc file Args: tables: list of Table objects to process. decoder_name: The name of the decoder state to build. filename: The (localized) name for the .h file. out: a COutput object to write to. cl_args: A dictionary of additional command line arguments. """ global _cl_args if not decoder.primary: raise Exception('No tables provided.') assert filename.endswith('.cc') _cl_args = cl_args # Generate actuals from descriptions in tables, for each of the # tables that should automatically generate the corresponding # needed actual class decoders. actuals = cl_args.get('auto-actual') if actuals: decoder = dgen_actuals.AddAutoActualsToDecoder(decoder, actuals) values = { 'FILE_HEADER': dgen_output.HEADER_BOILERPLATE, 'NOT_TCB_MESSAGE' : dgen_output.NOT_TCB_BOILERPLATE, 'FILENAME_BASE' : filename[:-len('.cc')], 'decoder_name': decoder_name, 'entry_table_name': decoder.primary.name, } out.write(NAMED_CC_HEADER % values) _generate_decoder_method_bodies(decoder, values, out) out.write(NAMED_CC_FOOTER % values) def _generate_decoder_method_bodies(decoder, values, out): global _cl_args for table in decoder.tables(): # Add the default row as the last in the optimized row, so that # it is applied if all other rows do not. opt_rows = sorted( dgen_opt.optimize_rows( table.action_filter(['baseline', 'rule']).rows(False))) if table.default_row: opt_rows.append(table.default_row) opt_rows = table.add_column_to_rows(opt_rows) print ("Table %s: %d rows minimized to %d" % (table.name, len(table.rows()), len(opt_rows))) values['table_name'] = table.name values['citation'] = table.citation, out.write(PARSE_TABLE_METHOD_HEADER % values) if _cl_args.get('trace') == 'True': out.write(METHOD_HEADER_TRACE % values) # Add message to stop compilation warnings if this table # doesn't require subtables to select a class decoder. if not table.methods(): out.write(" UNREFERENCED_PARAMETER(inst);") count = 0 for row in opt_rows: count = count + 1 if row.action.__class__.__name__ == 'DecoderAction': _install_action(decoder, row.action, values) action = '%(baseline_instance)s' % values elif row.action.__class__.__name__ == 'DecoderMethod': action = 'decode_%s(inst)' % row.action.name else: raise Exception('Bad table action: %s' % row.action) # Each row consists of a set of bit patterns defining if the row # is applicable. Convert this into a sequence of anded C test # expressions. For example, convert the following pair of bit # patterns: # # xxxx1010xxxxxxxxxxxxxxxxxxxxxxxx # xxxxxxxxxxxxxxxxxxxxxxxxxxxx0101 # # Each instruction is masked to get the the bits, and then # tested against the corresponding expected bits. Hence, the # above example is converted to: # # ((inst & 0x0F000000) != 0x0C000000) && # ((inst & 0x0000000F) != 0x00000005) out.write(METHOD_DISPATCH_BEGIN % row.patterns[0].to_commented_bool()) for p in row.patterns[1:]: out.write(METHOD_DISPATCH_CONTINUE % p.to_commented_bool()) out.write(METHOD_DISPATCH_END) if _cl_args.get('trace') == 'True': out.write(METHOD_DISPATCH_TRACE % count) values['action'] = action out.write(PARSE_TABLE_METHOD_ROW % values) out.write(METHOD_DISPATCH_CLOSE) out.write(PARSE_TABLE_METHOD_FOOTER % values) # Define the source for DECODER_tests.cc TEST_CC_HEADER="""%(FILE_HEADER)s %(NOT_TCB_MESSAGE)s #include "gtest/gtest.h" #include "native_client/src/trusted/validator_arm/actual_vs_baseline.h" #include "native_client/src/trusted/validator_arm/baseline_vs_baseline.h" #include "native_client/src/trusted/validator_arm/actual_classes.h" #include "native_client/src/trusted/validator_arm/baseline_classes.h" #include "native_client/src/trusted/validator_arm/inst_classes_testers.h" #include "native_client/src/trusted/validator_arm/arm_helpers.h" #include "native_client/src/trusted/validator_arm/gen/arm32_decode_named_bases.h" using nacl_arm_dec::Instruction; using nacl_arm_dec::ClassDecoder; using nacl_arm_dec::Register; using nacl_arm_dec::RegisterList; namespace nacl_arm_test { // The following classes are derived class decoder testers that // add row pattern constraints and decoder restrictions to each tester. // This is done so that it can be used to make sure that the // corresponding pattern is not tested for cases that would be excluded // due to row checks, or restrictions specified by the row restrictions. """ CONSTRAINT_TESTER_CLASS_HEADER=""" // %(row_comment)s class %(base_tester)s : public %(base_base_tester)s { public: %(base_tester)s(const NamedClassDecoder& decoder) : %(base_base_tester)s(decoder) {}""" CONSTRAINT_TESTER_RESTRICTIONS_HEADER=""" virtual bool PassesParsePreconditions( nacl_arm_dec::Instruction inst, const NamedClassDecoder& decoder);""" CONSTRAINT_TESTER_SANITY_HEADER="""<|fim▁hole|> CONSTRAINT_TESTER_CLASS_CLOSE=""" }; """ CONSTRAINT_TESTER_PARSE_HEADER=""" bool %(base_tester)s ::PassesParsePreconditions( nacl_arm_dec::Instruction inst, const NamedClassDecoder& decoder) {""" ROW_CONSTRAINTS_HEADER=""" // Check that row patterns apply to pattern being checked.'""" PATTERN_CONSTRAINT_RESTRICTIONS_HEADER=""" // Check pattern restrictions of row.""" CONSTRAINT_CHECK=""" // %(comment)s if (%(code)s) return false;""" CONSTRAINT_TESTER_CLASS_FOOTER=""" // Check other preconditions defined for the base decoder. return %(base_base_tester)s:: PassesParsePreconditions(inst, decoder); } """ SAFETY_TESTER_HEADER=""" bool %(base_tester)s ::ApplySanityChecks(nacl_arm_dec::Instruction inst, const NamedClassDecoder& decoder) { NC_PRECOND(%(base_base_tester)s:: ApplySanityChecks(inst, decoder));""" SAFETY_TESTER_CHECK=""" // safety: %(comment)s EXPECT_TRUE(%(code)s);""" DEFS_SAFETY_CHECK=""" // defs: %(comment)s; EXPECT_TRUE(decoder.defs(inst).IsSame(%(code)s));""" SAFETY_TESTER_FOOTER=""" return true; } """ TESTER_CLASS_HEADER=""" // The following are derived class decoder testers for decoder actions // associated with a pattern of an action. These derived classes introduce // a default constructor that automatically initializes the expected decoder // to the corresponding instance in the generated DecoderState. """ TESTER_CLASS=""" // %(row_comment)s class %(decoder_tester)s : public %(base_tester)s { public: %(decoder_tester)s() : %(base_tester)s( state_.%(baseline_instance)s) {} }; """ TEST_HARNESS=""" // Defines a gtest testing harness for tests. class %(decoder_name)sTests : public ::testing::Test { protected: %(decoder_name)sTests() {} }; // The following functions test each pattern specified in parse // decoder tables. """ TEST_FUNCTION_ACTUAL_VS_BASELINE=""" // %(row_comment)s TEST_F(%(decoder_name)sTests, %(decoder_tester)s_Test%(test_pattern)s) { %(decoder_tester)s baseline_tester; %(named_actual_class)s actual; ActualVsBaselineTester a_vs_b_tester(actual, baseline_tester); a_vs_b_tester.Test("%(pattern)s"); } """ TEST_FUNCTION_BASELINE=""" // %(row_comment)s TEST_F(%(decoder_name)sTests, %(decoder_tester)s_Test%(test_pattern)s) { %(decoder_tester)s tester; tester.Test("%(pattern)s"); } """ TEST_FUNCTION_BASELINE_VS_BASELINE=""" // %(row_comment)s TEST_F(%(decoder_name)sTests, BvB_%(decoder_tester)s_Test%(test_pattern)s) { %(decoder_tester)s old_baseline_tester; Named%(gen_decoder)s gen_baseline; BaselineVsBaselineTester b_vs_b_tester(gen_baseline, old_baseline_tester); b_vs_b_tester.Test("%(pattern)s"); } """ TEST_CC_FOOTER=""" } // namespace nacl_arm_test int main(int argc, char* argv[]) { testing::InitGoogleTest(&argc, argv); return RUN_ALL_TESTS(); } """ def generate_tests_cc(decoder, decoder_name, out, cl_args, tables): """Generates pattern tests for the rows in the given list of tables in the given decoder.""" global _cl_args if not decoder.primary: raise Exception('No tables provided.') _cl_args = cl_args # Generate actuals from descriptions in tables, for each of the # tables that should automatically generate the corresponding # needed actual class decoders. actuals = cl_args.get('auto-actual') if actuals: decoder = dgen_actuals.AddAutoActualsToDecoder(decoder, actuals) decoder = dgen_baselines.AddBaselinesToDecoder(decoder, tables) baselines = cl_args.get('test-base') if not baselines: baselines = [] decoder = _decoder_restricted_to_tables(decoder, tables) values = { 'FILE_HEADER': dgen_output.HEADER_BOILERPLATE, 'NOT_TCB_MESSAGE' : dgen_output.NOT_TCB_BOILERPLATE, 'decoder_name': decoder_name, } out.write(TEST_CC_HEADER % values) _generate_constraint_testers(decoder, values, out) _generate_rule_testers(decoder, values, out) out.write(TEST_HARNESS % values) _generate_test_patterns_with_baseline_tests(decoder, values, out, baselines) out.write(TEST_CC_FOOTER % values) def _filter_test_action(action, with_patterns, with_rules): """Filters the actions to pull out relavant entries, based on whether we want to include patterns and rules. """ action_fields = ['actual', 'baseline', 'generated_baseline', 'constraints'] + dgen_decoder.METHODS if with_patterns: action_fields += ['pattern' ] if with_rules: action_fields += ['rule'] return action.action_filter(action_fields) def _filter_test_row(row, with_patterns=False, with_rules=True): """Filters a row t pulll out actions with relavant entries, based on whether we want to include patterns and rules. """ return row.copy_with_action( _filter_test_action(row.action, with_patterns, with_rules)) def _install_row_cases(row, values): """Installs row case names, based on values entries.""" # First define base testers that add row constraints and safety checks. constraint_rows_map = values.get('constraint_rows') if constraint_rows_map: base_row = _filter_test_row(row, with_rules=False) values['base_test_case'] = ( 'Case%s' % constraint_rows_map[dgen_core.neutral_repr(base_row)]) else: values['base_test_case'] = '' # Add test decoders associated with the row in the table. decoder_rows_map = values.get('decoder_rows') if decoder_rows_map: decoder_row = _filter_test_row(row) values['test_case'] = ( 'Case%s' % decoder_rows_map[dgen_core.neutral_repr(decoder_row)]) else: values['test_case'] = '' # Encorporate patterns with each row. pattern_rows_map = values.get('test_rows') if pattern_rows_map: pattern_row = _filter_test_row(row, with_patterns=True) values['test_pattern'] = ( 'Case%s' % pattern_rows_map[dgen_core.neutral_repr(pattern_row)]) else: values['test_pattern'] = '' def _install_test_row(row, decoder, values, with_patterns=False, with_rules=True): """Installs data associated with the given row into the values map. Installs the baseline class, rule name, and constraints associated with the row. If with_patterns is specified, then pattern information and actual class information is also inserted. """ action = _filter_test_action(row.action, with_patterns, with_rules) values['row_comment'] = dgen_output.commented_string( repr(row.copy_with_action(action))) _install_action(decoder, action, values) return action def _rows_to_test(decoder, values, with_patterns=False, with_rules=True): """Returns the rows of the decoder that define enough information that testing can be done. """ generated_names = set() rows = [] for table in decoder.tables(): for row in table.rows(): if (isinstance(row.action, dgen_core.DecoderAction) and row.action.pattern()): new_row = row.copy_with_action( _install_test_row(row, decoder, values, with_patterns, with_rules)) constraint_tester = dgen_core.neutral_repr(new_row) if constraint_tester not in generated_names: generated_names.add(constraint_tester) rows.append(new_row) return sorted(rows) def _row_filter_interesting_patterns(row): """Builds a copy of the row, removing uninteresting column patterns.""" return row.copy_with_patterns(_interesting_patterns(row.patterns)) def _generate_constraint_testers(decoder, values, out): """Generates the testers needed to implement the constraints associated with each row having a pattern. """ rows = _rows_to_test(decoder, values, with_rules=False) values['constraint_rows'] = _index_neutral_map(rows) for r in rows: _install_row_cases(r, values) row = _row_filter_interesting_patterns(r) action = _install_test_row(row, decoder, values) safety_to_check = _safety_to_check(action.safety()) defs_to_check = action.defs() out.write(CONSTRAINT_TESTER_CLASS_HEADER % values) if row.patterns or action.constraints().restrictions: out.write(CONSTRAINT_TESTER_RESTRICTIONS_HEADER % values); if safety_to_check or defs_to_check: out.write(CONSTRAINT_TESTER_SANITY_HEADER % values) out.write(CONSTRAINT_TESTER_CLASS_CLOSE % values) if row.patterns or action.constraints().restrictions: out.write(CONSTRAINT_TESTER_PARSE_HEADER % values) if row.patterns: out.write(ROW_CONSTRAINTS_HEADER % values); for p in row.patterns: not_p = p.negate() values['comment'] = dgen_output.commented_string(repr(not_p), ' ') values['code'] = not_p.to_bool() out.write(CONSTRAINT_CHECK % values) if action.constraints().restrictions: out.write(PATTERN_CONSTRAINT_RESTRICTIONS_HEADER) for c in action.constraints().restrictions: not_c = c.negate() values['comment'] = dgen_output.commented_string(repr(not_c), ' ') values['code'] = not_c.to_bool() out.write(CONSTRAINT_CHECK % values) out.write(CONSTRAINT_TESTER_CLASS_FOOTER % values) if safety_to_check or defs_to_check: out.write(SAFETY_TESTER_HEADER % values) for check in safety_to_check: values['comment'] = dgen_output.commented_string( repr(check), ' ') values['code'] = check.to_bool() out.write(SAFETY_TESTER_CHECK % values) if defs_to_check: values['comment'] = dgen_output.commented_string( repr(defs_to_check), ' ') values['code'] = defs_to_check.to_register_list() out.write(DEFS_SAFETY_CHECK % values) out.write(SAFETY_TESTER_FOOTER % values) def _generate_rule_testers(decoder, values, out): """Generates the testers that tests the rule associated with each row having a pattern. """ out.write(TESTER_CLASS_HEADER % values) rows = _rows_to_test(decoder, values) values['decoder_rows'] = _index_neutral_map(rows) for r in rows: _install_row_cases(r, values) row = _row_filter_interesting_patterns(r) _install_test_row(row, decoder, values) out.write(TESTER_CLASS % values) def _decoder_restricted_to_tables(decoder, tables): """Returns a copy of the decoder, with only the given table names ( or all tables if no names are specified. """ if not tables: return decoder new_decoder = dgen_core.Decoder() for tbl in [tbl for tbl in decoder.tables() if tbl.name in tables]: new_decoder.add(tbl) new_decoder.set_class_defs(decoder.get_class_defs()) return new_decoder def _generate_test_patterns_with_baseline_tests( decoder, values, out, baseline_test_tables): _generate_test_patterns(decoder, values, out, False) _generate_test_patterns( _decoder_restricted_to_tables(decoder, baseline_test_tables), values, out, True) def _generate_test_patterns(decoder, values, out, add_baseline_tests): """Generates a test function for each row having a pattern associated with the table row. """ rows = _rows_to_test(decoder, values, with_patterns=True) values['test_rows'] = _index_neutral_map(rows) for r in rows: _install_row_cases(r, values) row = _row_filter_interesting_patterns(r) action = _install_test_row(row, decoder, values, with_patterns=True) if add_baseline_tests: if action.find('generated_baseline'): values['gen_decoder'] = action.find('generated_baseline') out.write(TEST_FUNCTION_BASELINE_VS_BASELINE % values) elif action.actual() == action.baseline(): out.write(TEST_FUNCTION_BASELINE % values) else: out.write(TEST_FUNCTION_ACTUAL_VS_BASELINE % values) def _index_neutral_map(values): """Returns a dictionary from each neutral_repr(value) in list values, to its corresponding index. This is done to reduce the number of compares to find the index, speeding up code generation. """ lookup_map = {} index = 0 for v in values: lookup_map[dgen_core.neutral_repr(v)] = index index += 1 return lookup_map<|fim▁end|>
virtual bool ApplySanityChecks(nacl_arm_dec::Instruction inst, const NamedClassDecoder& decoder);"""
<|file_name|>app.component.ts<|end_file_name|><|fim▁begin|>import { Component } from '@angular/core'; import { AuthService } from './users/shared/auth.service'; @Component({ selector: 'app-root', templateUrl: './app.component.html', styleUrls: ['./app.component.css'] })<|fim▁hole|> } }<|fim▁end|>
export class AppComponent { title = 'app works!'; constructor(private authService:AuthService) {
<|file_name|>app.py<|end_file_name|><|fim▁begin|># Copyright (c) 2001-2014, Canal TP and/or its affiliates. All rights reserved. # # This file is part of Navitia, # the software to build cool stuff with public transport. # # Hope you'll enjoy and contribute to this project, # powered by Canal TP (www.canaltp.fr). # Help us simplify mobility and open public transport: # a non ending quest to the responsive locomotion way of traveling! # # LICENCE: This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Stay tuned using # twitter @navitia # IRC #navitia on freenode # https://groups.google.com/d/forum/navitia # www.navitia.io from flask import Flask, request import json import datetime from ConfigParser import ConfigParser import zmq from monitor_kraken import request_pb2 from monitor_kraken import response_pb2 from monitor_kraken import type_pb2 app = Flask(__name__) app.config.from_object('monitor_kraken.default_settings') app.config.from_envvar('MONITOR_CONFIG_FILE', silent=True) context = zmq.Context() @app.route('/') def monitor(): if 'instance' not in request.args: return json.dumps({'error': ['instance invalid']}), 400 instance = request.args['instance'] config_file = '{path}/{instance}/kraken.ini'.format( path=app.config['KRAKEN_DIR'], instance=instance) parser = ConfigParser()<|fim▁hole|> parser.read(config_file) try: uri = parser.get('GENERAL', 'zmq_socket') except: return json.dumps({'error': ['instance invalid']}), 500 uri = uri.replace('*', 'localhost') sock = context.socket(zmq.REQ) # discard messages when socket closed sock.setsockopt(zmq.LINGER, 0) try: sock.connect(uri) req = request_pb2.Request() req.requested_api = type_pb2.STATUS sock.send(req.SerializeToString()) if sock.poll(app.config['TIMEOUT']) < 1: return json.dumps({'status': 'timeout'}), 503 pb = sock.recv() resp = response_pb2.Response() resp.ParseFromString(pb) response = {} return_code = 200 if resp.error and resp.error.message: response['status'] = resp.error.message response['start_production_date'] = resp.status.start_production_date response['end_production_date'] = resp.status.end_production_date response['last_load'] = resp.status.last_load_at response['last_load_status'] = resp.status.last_load_status response['loaded'] = resp.status.loaded response['is_connected_to_rabbitmq'] = resp.status.is_connected_to_rabbitmq response['publication_date'] = resp.status.publication_date if resp.status.last_load_status == False and 'status' not in response: response['status'] = 'last load failed' if 'status' not in response: response['status'] = 'running' else: return_code = 503 return json.dumps(response), return_code finally: sock.close() if __name__ == '__main__': app.run()<|fim▁end|>
<|file_name|>navbar.crtl.js<|end_file_name|><|fim▁begin|><|fim▁hole|> $scope.logout = function () { AuthenticationService.logout(); }; });<|fim▁end|>
'use strict'; angular.module('baka') .controller('NavbarCtrl', function ($scope, AuthenticationService) {
<|file_name|>adc.py<|end_file_name|><|fim▁begin|>from simcore import * comm.setADCValue(1,0,1,400) comm.setADCValue(2,0,1,380)<|fim▁hole|>comm.setADCValue(5,0,1,412) comm.setADCValue(6,0,1,425) comm.setADCValue(7,0,1,445) comm.setADCValue(8,0,1,429)<|fim▁end|>
comm.setADCValue(3,0,1,362) comm.setADCValue(4,0,1,334)
<|file_name|>index.js<|end_file_name|><|fim▁begin|>// Actions export const ADD_NOTIFICATION = 'notifications/ADD_NOTIFICATION' export const DISMISS_NOTIFICATION = 'notifications/DISMISS_NOTIFICATION' export const CLEAR_NOTIFICATIONS = 'notifications/CLEAR_NOTIFICATIONS' // Reducer export const initialState = [] export default function reducer(state = initialState, action) { const { payload, type } = action switch (type) { case ADD_NOTIFICATION: return [...state, payload] case DISMISS_NOTIFICATION: return state.filter(notification => notification.id !== payload) case CLEAR_NOTIFICATIONS: return [state] default: return state } } // Action Creators export function addNotification(notification) { const { id, dismissAfter } = notification if (!id) {<|fim▁hole|> dispatch({ type: ADD_NOTIFICATION, payload: notification }) if (dismissAfter) { setTimeout(() => { const { notifications } = getState() const found = notifications.find(lookup => { return lookup.id === notification.id }) if (found) { dispatch({ type: DISMISS_NOTIFICATION, payload: notification.id }) } }, dismissAfter) } } } export function dismissNotification(id) { return { type: DISMISS_NOTIFICATION, payload: id } } export function clearNotifications(id) { return { type: CLEAR_NOTIFICATIONS, payload: id } }<|fim▁end|>
notification.id = new Date().getTime() } return (dispatch, getState) => {
<|file_name|>model.py<|end_file_name|><|fim▁begin|>import os path = os.path.dirname(os.path.realpath(__file__)) sbmlFilePath = os.path.join(path, 'BIOMD0000000102.xml') with open(sbmlFilePath,'r') as f: sbmlString = f.read() <|fim▁hole|> return False else: return True if module_exists('libsbml'): import libsbml sbml = libsbml.readSBMLFromString(sbmlString)<|fim▁end|>
def module_exists(module_name): try: __import__(module_name) except ImportError:
<|file_name|>DegenerativeVenom.ts<|end_file_name|><|fim▁begin|>import { Trait } from '../../../shared/models/trait'; export class DegenerativeVenom extends Trait { static baseClass = 'Thief';<|fim▁hole|> static traitName = 'DegenerativeVenom'; static description = 'Your Venom spell will also lower the perception of the afflicted target.'; static icon = 'dripping-goo'; static upgrades = [ { cost: 10, capstone: true } ]; }<|fim▁end|>
<|file_name|>new_tab.js<|end_file_name|><|fim▁begin|>// Copyright (c) 2012 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. /** * @fileoverview New tab page * This is the main code for the new tab page used by touch-enabled Chrome * browsers. For now this is still a prototype. */ // Use an anonymous function to enable strict mode just for this file (which // will be concatenated with other files when embedded in Chrome cr.define('ntp', function() { 'use strict'; /** * NewTabView instance. * @type {!Object|undefined} */ var newTabView; /** * The 'notification-container' element. * @type {!Element|undefined} */ var notificationContainer; /** * If non-null, an info bubble for showing messages to the user. It points at * the Most Visited label, and is used to draw more attention to the * navigation dot UI. * @type {!Element|undefined} */ var promoBubble; /** * If non-null, an bubble confirming that the user has signed into sync. It * points at the login status at the top of the page. * @type {!Element|undefined} */ var loginBubble; /** * true if |loginBubble| should be shown. * @type {boolean} */ var shouldShowLoginBubble = false; /** * The 'other-sessions-menu-button' element. * @type {!Element|undefined} */ var otherSessionsButton; /** * The time when all sections are ready. * @type {number|undefined} * @private */ var startTime; /** * The time in milliseconds for most transitions. This should match what's * in new_tab.css. Unfortunately there's no better way to try to time * something to occur until after a transition has completed. * @type {number} * @const */ var DEFAULT_TRANSITION_TIME = 500; /** * See description for these values in ntp_stats.h. * @enum {number} */ var NtpFollowAction = { CLICKED_TILE: 11, CLICKED_OTHER_NTP_PANE: 12, OTHER: 13 }; /** * Creates a NewTabView object. NewTabView extends PageListView with * new tab UI specific logics. * @constructor * @extends {PageListView} */ function NewTabView() { var pageSwitcherStart = null; var pageSwitcherEnd = null; if (loadTimeData.getValue('showApps')) { pageSwitcherStart = getRequiredElement('page-switcher-start'); pageSwitcherEnd = getRequiredElement('page-switcher-end'); } this.initialize(getRequiredElement('page-list'), getRequiredElement('dot-list'), getRequiredElement('card-slider-frame'), getRequiredElement('trash'), pageSwitcherStart, pageSwitcherEnd); } NewTabView.prototype = { __proto__: ntp.PageListView.prototype, /** @override */ appendTilePage: function(page, title, titleIsEditable, opt_refNode) { ntp.PageListView.prototype.appendTilePage.apply(this, arguments); if (promoBubble) window.setTimeout(promoBubble.reposition.bind(promoBubble), 0); } }; /** * Invoked at startup once the DOM is available to initialize the app. */ function onLoad() { sectionsToWaitFor = 0; if (loadTimeData.getBoolean('showMostvisited')) sectionsToWaitFor++; if (loadTimeData.getBoolean('showApps')) { sectionsToWaitFor++; if (loadTimeData.getBoolean('showAppLauncherPromo')) { $('app-launcher-promo-close-button').addEventListener('click', function() { chrome.send('stopShowingAppLauncherPromo'); }); $('apps-promo-learn-more').addEventListener('click', function() { chrome.send('onLearnMore'); }); } } if (loadTimeData.getBoolean('isDiscoveryInNTPEnabled')) sectionsToWaitFor++; measureNavDots(); // Load the current theme colors. themeChanged(); newTabView = new NewTabView(); notificationContainer = getRequiredElement('notification-container'); notificationContainer.addEventListener( 'webkitTransitionEnd', onNotificationTransitionEnd); if (loadTimeData.getBoolean('showRecentlyClosed')) { cr.ui.decorate($('recently-closed-menu-button'), ntp.RecentMenuButton); chrome.send('getRecentlyClosedTabs'); } else { $('recently-closed-menu-button').hidden = true; } if (loadTimeData.getBoolean('showOtherSessionsMenu')) { otherSessionsButton = getRequiredElement('other-sessions-menu-button'); cr.ui.decorate(otherSessionsButton, ntp.OtherSessionsMenuButton); otherSessionsButton.initialize(loadTimeData.getBoolean('isUserSignedIn')); } else { getRequiredElement('other-sessions-menu-button').hidden = true; } if (loadTimeData.getBoolean('showMostvisited')) { var mostVisited = new ntp.MostVisitedPage(); // Move the footer into the most visited page if we are in "bare minimum" // mode. if (document.body.classList.contains('bare-minimum')) mostVisited.appendFooter(getRequiredElement('footer')); newTabView.appendTilePage(mostVisited, loadTimeData.getString('mostvisited'), false); chrome.send('getMostVisited'); } if (loadTimeData.getBoolean('isDiscoveryInNTPEnabled')) { var suggestionsScript = document.createElement('script'); suggestionsScript.src = 'suggestions_page.js'; suggestionsScript.onload = function() { newTabView.appendTilePage(new ntp.SuggestionsPage(), loadTimeData.getString('suggestions'), false, (newTabView.appsPages.length > 0) ? newTabView.appsPages[0] : null); chrome.send('getSuggestions'); cr.dispatchSimpleEvent(document, 'sectionready', true, true); }; document.querySelector('head').appendChild(suggestionsScript); } if (!loadTimeData.getBoolean('showWebStoreIcon')) { var webStoreIcon = $('chrome-web-store-link'); // Not all versions of the NTP have a footer, so this may not exist. if (webStoreIcon) webStoreIcon.hidden = true; } else { var webStoreLink = loadTimeData.getString('webStoreLink'); var url = appendParam(webStoreLink, 'utm_source', 'chrome-ntp-launcher'); $('chrome-web-store-link').href = url; $('chrome-web-store-link').addEventListener('click', onChromeWebStoreButtonClick); } // We need to wait for all the footer menu setup to be completed before // we can compute its layout. layoutFooter(); if (loadTimeData.getString('login_status_message')) { loginBubble = new cr.ui.Bubble; loginBubble.anchorNode = $('login-container'); loginBubble.arrowLocation = cr.ui.ArrowLocation.TOP_END; loginBubble.bubbleAlignment = cr.ui.BubbleAlignment.BUBBLE_EDGE_TO_ANCHOR_EDGE; loginBubble.deactivateToDismissDelay = 2000; loginBubble.closeButtonVisible = false; $('login-status-advanced').onclick = function() { chrome.send('showAdvancedLoginUI'); }; $('login-status-dismiss').onclick = loginBubble.hide.bind(loginBubble); var bubbleContent = $('login-status-bubble-contents'); loginBubble.content = bubbleContent; // The anchor node won't be updated until updateLogin is called so don't // show the bubble yet. shouldShowLoginBubble = true; } if (loadTimeData.valueExists('bubblePromoText')) { promoBubble = new cr.ui.Bubble; promoBubble.anchorNode = getRequiredElement('promo-bubble-anchor'); promoBubble.arrowLocation = cr.ui.ArrowLocation.BOTTOM_START; promoBubble.bubbleAlignment = cr.ui.BubbleAlignment.ENTIRELY_VISIBLE; promoBubble.deactivateToDismissDelay = 2000; promoBubble.content = parseHtmlSubset( loadTimeData.getString('bubblePromoText'), ['BR']); var bubbleLink = promoBubble.querySelector('a'); if (bubbleLink) { bubbleLink.addEventListener('click', function(e) { chrome.send('bubblePromoLinkClicked'); }); } promoBubble.handleCloseEvent = function() { promoBubble.hide(); chrome.send('bubblePromoClosed'); }; promoBubble.show(); chrome.send('bubblePromoViewed'); } var loginContainer = getRequiredElement('login-container'); loginContainer.addEventListener('click', showSyncLoginUI); if (loadTimeData.getBoolean('shouldShowSyncLogin')) chrome.send('initializeSyncLogin'); doWhenAllSectionsReady(function() { // Tell the slider about the pages. newTabView.updateSliderCards(); // Mark the current page. newTabView.cardSlider.currentCardValue.navigationDot.classList.add( 'selected'); if (loadTimeData.valueExists('notificationPromoText')) { var promoText = loadTimeData.getString('notificationPromoText'); var tags = ['IMG']; var attrs = { src: function(node, value) { return node.tagName == 'IMG' && /^data\:image\/(?:png|gif|jpe?g)/.test(value); }, }; var promo = parseHtmlSubset(promoText, tags, attrs); var promoLink = promo.querySelector('a'); if (promoLink) { promoLink.addEventListener('click', function(e) { chrome.send('notificationPromoLinkClicked'); }); } showNotification(promo, [], function() { chrome.send('notificationPromoClosed'); }, 60000); chrome.send('notificationPromoViewed'); } cr.dispatchSimpleEvent(document, 'ntpLoaded', true, true); document.documentElement.classList.remove('starting-up'); startTime = Date.now(); }); preventDefaultOnPoundLinkClicks(); // From webui/js/util.js. cr.ui.FocusManager.disableMouseFocusOnButtons(); } /** * Launches the chrome web store app with the chrome-ntp-launcher * source. * @param {Event} e The click event. */ function onChromeWebStoreButtonClick(e) { chrome.send('recordAppLaunchByURL', [encodeURIComponent(this.href), ntp.APP_LAUNCH.NTP_WEBSTORE_FOOTER]); } /* * The number of sections to wait on. * @type {number} */ var sectionsToWaitFor = -1; /** * Queued callbacks which lie in wait for all sections to be ready. * @type {array} */ var readyCallbacks = []; /** * Fired as each section of pages becomes ready. * @param {Event} e Each page's synthetic DOM event. */ document.addEventListener('sectionready', function(e) { if (--sectionsToWaitFor <= 0) { while (readyCallbacks.length) { readyCallbacks.shift()(); } } }); /** * This is used to simulate a fire-once event (i.e. $(document).ready() in * jQuery or Y.on('domready') in YUI. If all sections are ready, the callback * is fired right away. If all pages are not ready yet, the function is queued * for later execution. * @param {function} callback The work to be done when ready. */ function doWhenAllSectionsReady(callback) { assert(typeof callback == 'function'); if (sectionsToWaitFor > 0) readyCallbacks.push(callback); else window.setTimeout(callback, 0); // Do soon after, but asynchronously. } /** * Measure the width of a nav dot with a given title. * @param {string} id The loadTimeData ID of the desired title. * @return {number} The width of the nav dot. */ function measureNavDot(id) { var measuringDiv = $('fontMeasuringDiv'); measuringDiv.textContent = loadTimeData.getString(id); // The 4 is for border and padding. return Math.max(measuringDiv.clientWidth * 1.15 + 4, 80); } /** * Fills in an invisible div with the longest dot title string so that * its length may be measured and the nav dots sized accordingly. */ function measureNavDots() { var pxWidth = measureNavDot('appDefaultPageName'); if (loadTimeData.getBoolean('showMostvisited')) pxWidth = Math.max(measureNavDot('mostvisited'), pxWidth); var styleElement = document.createElement('style'); styleElement.type = 'text/css'; // max-width is used because if we run out of space, the nav dots will be // shrunk. styleElement.textContent = '.dot { max-width: ' + pxWidth + 'px; }'; document.querySelector('head').appendChild(styleElement); } /** * Layout the footer so that the nav dots stay centered. */ function layoutFooter() { // We need the image to be loaded. var logo = $('logo-img'); var logoImg = logo.querySelector('img'); if (!logoImg.complete) { logoImg.onload = layoutFooter; return; } var menu = $('footer-menu-container'); if (menu.clientWidth > logoImg.width) logo.style.WebkitFlex = '0 1 ' + menu.clientWidth + 'px'; else menu.style.WebkitFlex = '0 1 ' + logoImg.width + 'px'; } function themeChanged(opt_hasAttribution) { $('themecss').href = 'chrome://theme/css/new_tab_theme.css?' + Date.now(); if (typeof opt_hasAttribution != 'undefined') { document.documentElement.setAttribute('hasattribution', opt_hasAttribution); } updateAttribution(); } function setBookmarkBarAttached(attached) { document.documentElement.setAttribute('bookmarkbarattached', attached); } /** * Attributes the attribution image at the bottom left. */ function updateAttribution() { var attribution = $('attribution'); if (document.documentElement.getAttribute('hasattribution') == 'true') { attribution.hidden = false; } else { attribution.hidden = true; } } /** * Timeout ID. * @type {number} */ var notificationTimeout = 0; /** * Shows the notification bubble. * @param {string|Node} message The notification message or node to use as * message. * @param {Array.<{text: string, action: function()}>} links An array of * records describing the links in the notification. Each record should * have a 'text' attribute (the display string) and an 'action' attribute * (a function to run when the link is activated). * @param {Function} opt_closeHandler The callback invoked if the user * manually dismisses the notification. */ function showNotification(message, links, opt_closeHandler, opt_timeout) { window.clearTimeout(notificationTimeout); var span = document.querySelector('#notification > span'); if (typeof message == 'string') { span.textContent = message; } else { span.textContent = ''; // Remove all children. span.appendChild(message); } var linksBin = $('notificationLinks'); linksBin.textContent = ''; for (var i = 0; i < links.length; i++) { var link = linksBin.ownerDocument.createElement('div'); link.textContent = links[i].text; link.action = links[i].action; link.onclick = function() { this.action(); hideNotification(); }; link.setAttribute('role', 'button'); link.setAttribute('tabindex', 0); link.className = 'link-button'; linksBin.appendChild(link); } function closeFunc(e) { if (opt_closeHandler) opt_closeHandler(); hideNotification(); } document.querySelector('#notification button').onclick = closeFunc; document.addEventListener('dragstart', closeFunc); notificationContainer.hidden = false; showNotificationOnCurrentPage(); newTabView.cardSlider.frame.addEventListener( 'cardSlider:card_change_ended', onCardChangeEnded); var timeout = opt_timeout || 10000; notificationTimeout = window.setTimeout(hideNotification, timeout); } /** * Hide the notification bubble. */ function hideNotification() { notificationContainer.classList.add('inactive'); newTabView.cardSlider.frame.removeEventListener( 'cardSlider:card_change_ended', onCardChangeEnded); } /** * Happens when 1 or more consecutive card changes end. * @param {Event} e The cardSlider:card_change_ended event. */ function onCardChangeEnded(e) { // If we ended on the same page as we started, ignore. if (newTabView.cardSlider.currentCardValue.notification) return; // Hide the notification the old page. notificationContainer.classList.add('card-changed'); showNotificationOnCurrentPage(); } /** * Move and show the notification on the current page. */ function showNotificationOnCurrentPage() { var page = newTabView.cardSlider.currentCardValue; doWhenAllSectionsReady(function() { if (page != newTabView.cardSlider.currentCardValue) return; // NOTE: This moves the notification to inside of the current page. page.notification = notificationContainer; // Reveal the notification and instruct it to hide itself if ignored. notificationContainer.classList.remove('inactive'); // Gives the browser time to apply this rule before we remove it (causing // a transition). window.setTimeout(function() { notificationContainer.classList.remove('card-changed'); }, 0); }); } /** * When done fading out, set hidden to true so the notification can't be * tabbed to or clicked. * @param {Event} e The webkitTransitionEnd event. */ function onNotificationTransitionEnd(e) { if (notificationContainer.classList.contains('inactive')) notificationContainer.hidden = true; } function setRecentlyClosedTabs(dataItems) { $('recently-closed-menu-button').dataItems = dataItems; layoutFooter(); } function setMostVisitedPages(data, hasBlacklistedUrls) { newTabView.mostVisitedPage.data = data; cr.dispatchSimpleEvent(document, 'sectionready', true, true); } function setSuggestionsPages(data, hasBlacklistedUrls) { newTabView.suggestionsPage.data = data; } /** * Set the dominant color for a node. This will be called in response to * getFaviconDominantColor. The node represented by |id| better have a setter * for stripeColor. * @param {string} id The ID of a node. * @param {string} color The color represented as a CSS string. */ function setFaviconDominantColor(id, color) { var node = $(id); if (node) node.stripeColor = color; } /** * Updates the text displayed in the login container. If there is no text then * the login container is hidden. * @param {string} loginHeader The first line of text. * @param {string} loginSubHeader The second line of text. * @param {string} iconURL The url for the login status icon. If this is null then the login status icon is hidden. * @param {boolean} isUserSignedIn Indicates if the user is signed in or not. */ function updateLogin(loginHeader, loginSubHeader, iconURL, isUserSignedIn) { if (loginHeader || loginSubHeader) { $('login-container').hidden = false; $('login-status-header').innerHTML = loginHeader; $('login-status-sub-header').innerHTML = loginSubHeader; $('card-slider-frame').classList.add('showing-login-area'); if (iconURL) { $('login-status-header-container').style.backgroundImage = url(iconURL); $('login-status-header-container').classList.add('login-status-icon'); } else { $('login-status-header-container').style.backgroundImage = 'none'; $('login-status-header-container').classList.remove( 'login-status-icon'); } } else { $('login-container').hidden = true; $('card-slider-frame').classList.remove('showing-login-area'); } if (shouldShowLoginBubble) { window.setTimeout(loginBubble.show.bind(loginBubble), 0); chrome.send('loginMessageSeen'); shouldShowLoginBubble = false; } else if (loginBubble) { loginBubble.reposition(); } if (otherSessionsButton) { otherSessionsButton.updateSignInState(isUserSignedIn); layoutFooter(); } } /** * Show the sync login UI. * @param {Event} e The click event. */ function showSyncLoginUI(e) { var rect = e.currentTarget.getBoundingClientRect(); chrome.send('showSyncLoginUI', [rect.left, rect.top, rect.width, rect.height]); } /** * Logs the time to click for the specified item. * @param {string} item The item to log the time-to-click. */ function logTimeToClick(item) { var timeToClick = Date.now() - startTime; chrome.send('logTimeToClick', ['NewTabPage.TimeToClick' + item, timeToClick]); } /** * Wrappers to forward the callback to corresponding PageListView member. */ function appAdded() { return newTabView.appAdded.apply(newTabView, arguments); } function appMoved() { return newTabView.appMoved.apply(newTabView, arguments);<|fim▁hole|> function appRemoved() { return newTabView.appRemoved.apply(newTabView, arguments); } function appsPrefChangeCallback() { return newTabView.appsPrefChangedCallback.apply(newTabView, arguments); } function appLauncherPromoPrefChangeCallback() { return newTabView.appLauncherPromoPrefChangeCallback.apply(newTabView, arguments); } function appsReordered() { return newTabView.appsReordered.apply(newTabView, arguments); } function enterRearrangeMode() { return newTabView.enterRearrangeMode.apply(newTabView, arguments); } function setForeignSessions(sessionList, isTabSyncEnabled) { if (otherSessionsButton) { otherSessionsButton.setForeignSessions(sessionList, isTabSyncEnabled); layoutFooter(); } } function getAppsCallback() { return newTabView.getAppsCallback.apply(newTabView, arguments); } function getAppsPageIndex() { return newTabView.getAppsPageIndex.apply(newTabView, arguments); } function getCardSlider() { return newTabView.cardSlider; } function leaveRearrangeMode() { return newTabView.leaveRearrangeMode.apply(newTabView, arguments); } function saveAppPageName() { return newTabView.saveAppPageName.apply(newTabView, arguments); } function setAppToBeHighlighted(appId) { newTabView.highlightAppId = appId; } // Return an object with all the exports return { appAdded: appAdded, appMoved: appMoved, appRemoved: appRemoved, appsPrefChangeCallback: appsPrefChangeCallback, appLauncherPromoPrefChangeCallback: appLauncherPromoPrefChangeCallback, enterRearrangeMode: enterRearrangeMode, getAppsCallback: getAppsCallback, getAppsPageIndex: getAppsPageIndex, getCardSlider: getCardSlider, onLoad: onLoad, leaveRearrangeMode: leaveRearrangeMode, logTimeToClick: logTimeToClick, NtpFollowAction: NtpFollowAction, saveAppPageName: saveAppPageName, setAppToBeHighlighted: setAppToBeHighlighted, setBookmarkBarAttached: setBookmarkBarAttached, setForeignSessions: setForeignSessions, setMostVisitedPages: setMostVisitedPages, setSuggestionsPages: setSuggestionsPages, setRecentlyClosedTabs: setRecentlyClosedTabs, setFaviconDominantColor: setFaviconDominantColor, showNotification: showNotification, themeChanged: themeChanged, updateLogin: updateLogin }; }); document.addEventListener('DOMContentLoaded', ntp.onLoad); var toCssPx = cr.ui.toCssPx;<|fim▁end|>
}
<|file_name|>test_meta_functions.py<|end_file_name|><|fim▁begin|>import unittest class UnitParsingTest(unittest.TestCase): def _assert_meters(self, tag_value, expected): from vectordatasource.meta.function import mz_to_float_meters parsed = mz_to_float_meters(tag_value) if parsed is None and expected is not None: self.fail("Failed to parse %r, but expected %r." % (tag_value, expected)) elif parsed is not None and expected is None: self.fail("Parsed %r as %r, but expected parsing to fail." % (tag_value, parsed)) elif parsed != expected and abs(parsed - expected) > 0.001: self.fail("Expected %r from %r, but got %r instead." % (expected, tag_value, parsed)) def test_parse_miles(self): self._assert_meters('1mi', 1609.3440) def test_parse_kilometers(self): self._assert_meters('1km', 1000.0) def test_parse_meters(self): self._assert_meters('1m', 1.0) def test_parse_nautical_miles(self): self._assert_meters('1nmi', 1852.0) def test_parse_feet(self): self._assert_meters('1ft', 0.3048) def test_parse_space_variations(self): self._assert_meters('1.0 m', 1.0) self._assert_meters('10.0m', 10.0) self._assert_meters('1 m', 1.0) self._assert_meters('1m', 1.0) def test_imperial(self): self._assert_meters('1\'', 0.3048) self._assert_meters('1.5\'', 0.3048 * 1.5) self._assert_meters('1\'6"', 0.3048 * 1.5) # this is technically allowed by the regex, so it should be parsed # properly, but doesn't make any sense. self._assert_meters('1.5\'6"', 0.3048 * 2)<|fim▁hole|> def test_numeric(self): # just a number on its own is assumed to be in meters self._assert_meters('1234', 1234.0) def test_junk_units(self): # we shouldn't parse anything that's not a unit that we can convert. self._assert_meters('1nm', None) self._assert_meters('1foo', None) self._assert_meters('1 foo', None) self._assert_meters('not 1', None) self._assert_meters('1mm', None) def test_none(self): # missing tags will be passed through as None, so we have to handle # that by returning None. self._assert_meters(None, None) def test_finite(self): # should return a finite number or None self._assert_meters('NaN', None) self._assert_meters('Inf', None) self._assert_meters('-Inf', None) class ToFloatTest(unittest.TestCase): def test_finite(self): # to_float should return a finite number or None. technically, both # Inf and NaN are valid values for floats, but they do strange things # and may raise unexpected exceptions during arithmetic. in general, # we do not expect to see valid uses of NaN or Inf in input data. from vectordatasource.util import to_float self.assertIsNone(to_float('NaN')) self.assertIsNone(to_float('Inf')) self.assertIsNone(to_float('-Inf'))<|fim▁end|>
<|file_name|>leaflet.browser.print.utils.js<|end_file_name|><|fim▁begin|>/** MIT License http://www.opensource.org/licenses/mit-license.php Author Igor Vladyka <[email protected]> (https://github.com/Igor-Vladyka/leaflet.browser.print) **/ L.Control.BrowserPrint.Utils = { _ignoreArray: [], _cloneFactoryArray: [], _cloneRendererArray: [], _knownRenderers: {}, cloneOptions: function(options) { var utils = this; var retOptions = {}; for (var name in options) { var item = options[name]; if (item && item.clone) { retOptions[name] = item.clone(); } else if (item && item.onAdd) { retOptions[name] = utils.cloneLayer(item); } else { retOptions[name] = item; } } return retOptions; }, cloneBasicOptionsWithoutLayers: function(options) { var retOptions = {}; var optionNames = Object.getOwnPropertyNames(options); if (optionNames.length) { for (var i = 0; i < optionNames.length; i++) { var optName = optionNames[i]; if (optName && optName != "layers") { retOptions[optName] = options[optName]; } } return this.cloneOptions(retOptions); } return retOptions; }, cloneInnerLayers: function (layer) { var utils = this; var layers = []; layer.eachLayer(function (inner) { var l = utils.cloneLayer(inner); if (l) { layers.push(l); } }); return layers; }, initialize: function () {<|fim▁hole|> // Renderers this.registerRenderer(L.SVG, 'L.SVG'); this.registerRenderer(L.Canvas, 'L.Canvas'); this.registerLayer(L.TileLayer.WMS, 'L.TileLayer.WMS', function(layer, utils) { return L.tileLayer.wms(layer._url, utils.cloneOptions(layer.options)); }); this.registerLayer(L.TileLayer, 'L.TileLayer', function(layer, utils) { return L.tileLayer(layer._url, utils.cloneOptions(layer.options)); }); this.registerLayer(L.GridLayer, 'L.GridLayer', function(layer, utils) { return L.gridLayer(utils.cloneOptions(layer.options)); }); this.registerLayer(L.ImageOverlay, 'L.ImageOverlay', function(layer, utils) { return L.imageOverlay(layer._url, layer._bounds, utils.cloneOptions(layer.options)); }); this.registerLayer(L.Marker, 'L.Marker', function(layer, utils) { return L.marker(layer.getLatLng(), utils.cloneOptions(layer.options)); }); this.registerLayer(L.Popup, 'L.Popup', function(layer, utils) { return L.popup(utils.cloneOptions(layer.options)).setLatLng(layer.getLatLng()).setContent(layer.getContent()); }); this.registerLayer(L.Circle, 'L.Circle', function(layer, utils) { return L.circle(layer.getLatLng(), layer.getRadius(), utils.cloneOptions(layer.options)); }); this.registerLayer(L.CircleMarker, 'L.CircleMarker', function(layer, utils) { return L.circleMarker(layer.getLatLng(), utils.cloneOptions(layer.options)); }); this.registerLayer(L.Rectangle, 'L.Rectangle', function(layer, utils) { return L.rectangle(layer.getBounds(), utils.cloneOptions(layer.options)); }); this.registerLayer(L.Polygon, 'L.Polygon', function(layer, utils) { return L.polygon(layer.getLatLngs(), utils.cloneOptions(layer.options)); }); // MultiPolyline is removed in leaflet 1.0.0 this.registerLayer(L.MultiPolyline, 'L.MultiPolyline', function(layer, utils) { return L.polyline(layer.getLatLngs(), utils.cloneOptions(layer.options)); }); // MultiPolygon is removed in leaflet 1.0.0 this.registerLayer(L.MultiPolygon, 'L.MultiPolygon', function(layer, utils) { return L.multiPolygon(layer.getLatLngs(), utils.cloneOptions(layer.options)); }); this.registerLayer(L.Polyline, 'L.Polyline', function(layer, utils) { return L.polyline(layer.getLatLngs(), utils.cloneOptions(layer.options)); }); this.registerLayer(L.GeoJSON, 'L.GeoJSON', function(layer, utils) { return L.geoJson(layer.toGeoJSON(), utils.cloneOptions(layer.options)); }); this.registerIgnoreLayer(L.FeatureGroup, 'L.FeatureGroup'); this.registerIgnoreLayer(L.LayerGroup, 'L.LayerGroup'); // There is no point to clone tooltips here; L.tooltip(options); this.registerLayer(L.Tooltip, 'L.Tooltip', function(){ return null; }); }, _register: function(array, type, identifier, builderFunction) { if (type && !array.filter(function(l){ return l.identifier === identifier; }).length) { array.push({ type: type, identifier: identifier, builder: builderFunction || function (layer) { return new type(layer.options); } }); } }, registerLayer: function(type, identifier, builderFunction) { this._register(this._cloneFactoryArray, type, identifier, builderFunction); }, registerRenderer: function(type, identifier, builderFunction) { this._register(this._cloneRendererArray, type, identifier, builderFunction); }, registerIgnoreLayer: function(type, identifier) { this._register(this._ignoreArray, type, identifier); }, cloneLayer: function(layer) { if (!layer) return null; // First we check if this layer is actual renderer var renderer = this.__getRenderer(layer); if (renderer) { return renderer; } var factoryObject; if (layer._group) { // Exceptional check for L.MarkerClusterGroup factoryObject = this.__getFactoryObject(layer._group, true); } else { factoryObject = this.__getFactoryObject(layer); } // We clone and recreate layer if it's simple overlay if (factoryObject) { factoryObject = factoryObject.builder(layer, this); } return factoryObject; }, getType: function(layer) { if (!layer) return null; var factoryObject = this.__getFactoryObject(layer); if (factoryObject) { factoryObject = factoryObject.identifier; } return factoryObject; }, __getRenderer: function(oldRenderer) { var renderer = this._knownRenderers[oldRenderer._leaflet_id]; if (!renderer) { for (var i = 0; i < this._cloneRendererArray.length; i++) { var factoryObject = this._cloneRendererArray[i]; if (oldRenderer instanceof factoryObject.type) { this._knownRenderers[oldRenderer._leaflet_id] = factoryObject.builder(oldRenderer.options); break; } } renderer = this._knownRenderers[oldRenderer._leaflet_id]; } return renderer; }, __getFactoryObject: function (layer, skipIgnore) { if (!skipIgnore) { for (var i = 0; i < this._ignoreArray.length; i++) { var ignoreObject = this._ignoreArray[i]; if (ignoreObject.type && layer instanceof ignoreObject.type) { return null; } } } for (var i = 0; i < this._cloneFactoryArray.length; i++) { var factoryObject = this._cloneFactoryArray[i]; if (factoryObject.type && layer instanceof factoryObject.type) { return factoryObject; } } for (var i = 0; i < this._cloneRendererArray.length; i++) { var factoryObject = this._cloneRendererArray[i]; if (factoryObject.type && layer instanceof factoryObject.type) { return null; } } this.__unknownLayer__(); return null; }, __unknownLayer__: function(){ console.warn('Unknown layer, cannot clone this layer. Leaflet version: ' + L.version); console.info('For additional information please refer to documentation on: https://github.com/Igor-Vladyka/leaflet.browser.print.'); console.info('-------------------------------------------------------------------------------------------------------------------'); } };<|fim▁end|>
this._knownRenderers = {};
<|file_name|>constants.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2013-2020 Blockstack PBC, a public benefit corporation // Copyright (C) 2020 Stacks Open Internet Foundation<|fim▁hole|>// it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program. If not, see <http://www.gnu.org/licenses/>. pub const AS_CONTRACT_MEMORY: u64 = 1; pub const AT_BLOCK_MEMORY: u64 = 1;<|fim▁end|>
// // This program is free software: you can redistribute it and/or modify
<|file_name|>variadic-ffi.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. extern "stdcall" { fn printf(_: *u8, ...); //~ ERROR: variadic function must have C calling convention } extern { fn foo(f: int, x: u8, ...); } extern "C" fn bar(f: int, x: u8) {} fn main() { unsafe { foo(); //~ ERROR: this function takes at least 2 parameters but 0 parameters were supplied foo(1); //~ ERROR: this function takes at least 2 parameters but 1 parameter was supplied let x: extern "C" unsafe fn(f: int, x: u8) = foo; //~^ ERROR: mismatched types: expected `extern "C" unsafe fn(int, u8)` but found `extern "C" unsafe fn(int, u8, ...)` (expected non-variadic fn but found variadic function) let y: extern "C" unsafe fn(f: int, x: u8, ...) = bar; //~^ ERROR: mismatched types: expected `extern "C" unsafe fn(int, u8, ...)` but found `extern "C" extern fn(int, u8)` (expected variadic fn but found non-variadic function) foo(1, 2, 3f32); //~ ERROR: can't pass an f32 to variadic function, cast to c_double foo(1, 2, true); //~ ERROR: can't pass bool to variadic function, cast to c_int foo(1, 2, 1i8); //~ ERROR: can't pass i8 to variadic function, cast to c_int foo(1, 2, 1u8); //~ ERROR: can't pass u8 to variadic function, cast to c_uint<|fim▁hole|>}<|fim▁end|>
foo(1, 2, 1i16); //~ ERROR: can't pass i16 to variadic function, cast to c_int foo(1, 2, 1u16); //~ ERROR: can't pass u16 to variadic function, cast to c_uint }
<|file_name|>GraphEdgeChangeEvent.java<|end_file_name|><|fim▁begin|>/* ========================================== * JGraphT : a free Java graph-theory library * ========================================== * * Project Info: http://jgrapht.sourceforge.net/ * Project Creator: Barak Naveh (http://sourceforge.net/users/barak_naveh) * * (C) Copyright 2003-2008, by Barak Naveh and Contributors. * * This library is free software; you can redistribute it and/or modify it * under the terms of the GNU Lesser General Public License as published by * the Free Software Foundation; either version 2.1 of the License, or * (at your option) any later version. * * This library is distributed in the hope that it will be useful, but * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public * License for more details. * * You should have received a copy of the GNU Lesser General Public License * along with this library; if not, write to the Free Software Foundation, * Inc., * 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. */ /* ------------------------- * GraphEdgeChangeEvent.java * ------------------------- * (C) Copyright 2003-2008, by Barak Naveh and Contributors. * * Original Author: Barak Naveh * Contributor(s): Christian Hammer * * $Id: GraphEdgeChangeEvent.java 645 2008-09-30 19:44:48Z perfecthash $ * * Changes * ------- * 10-Aug-2003 : Initial revision (BN); * 11-Mar-2004 : Made generic (CH); * */ package edu.nd.nina.event; /** * An event which indicates that a graph edge has changed, or is about to * change. The event can be used either as an indication <i>after</i> the edge * has been added or removed, or <i>before</i> it is added. The type of the * event can be tested using the {@link * edu.nd.nina.event.GraphChangeEvent#getType()} method. * * @author Barak Naveh * @since Aug 10, 2003 */ public class GraphEdgeChangeEvent<V, E> extends GraphChangeEvent { //~ Static fields/initializers --------------------------------------------- private static final long serialVersionUID = 3618134563335844662L; /** * Before edge added event. This event is fired before an edge is added to a * graph. */ public static final int BEFORE_EDGE_ADDED = 21; /** * Before edge removed event. This event is fired before an edge is removed <|fim▁hole|> /** * Edge added event. This event is fired after an edge is added to a graph. */ public static final int EDGE_ADDED = 23; /** * Edge removed event. This event is fired after an edge is removed from a * graph. */ public static final int EDGE_REMOVED = 24; //~ Instance fields -------------------------------------------------------- /** * The edge that this event is related to. */ protected E edge; //~ Constructors ----------------------------------------------------------- /** * Constructor for GraphEdgeChangeEvent. * * @param eventSource the source of this event. * @param type the event type of this event. * @param e the edge that this event is related to. */ public GraphEdgeChangeEvent(Object eventSource, int type, E e) { super(eventSource, type); edge = e; } //~ Methods ---------------------------------------------------------------- /** * Returns the edge that this event is related to. * * @return the edge that this event is related to. */ public E getEdge() { return edge; } } // End GraphEdgeChangeEvent.java<|fim▁end|>
* from a graph. */ public static final int BEFORE_EDGE_REMOVED = 22;
<|file_name|>lightbox.js<|end_file_name|><|fim▁begin|>/*! UIkit 3.6.13 | https://www.getuikit.com | (c) 2014 - 2021 YOOtheme | MIT License */ (function (global, factory) { typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory(require('uikit-util')) : typeof define === 'function' && define.amd ? define('uikitlightbox', ['uikit-util'], factory) : (global = typeof globalThis !== 'undefined' ? globalThis : global || self, global.UIkitLightbox = factory(global.UIkit.util)); }(this, (function (uikitUtil) { 'use strict'; var Animations = { slide: { show: function(dir) { return [ {transform: translate(dir * -100)}, {transform: translate()} ]; }, percent: function(current) { return translated(current); }, translate: function(percent, dir) { return [ {transform: translate(dir * -100 * percent)}, {transform: translate(dir * 100 * (1 - percent))} ]; } } }; function translated(el) { return Math.abs(uikitUtil.css(el, 'transform').split(',')[4] / el.offsetWidth) || 0; } function translate(value, unit) { if ( value === void 0 ) value = 0; if ( unit === void 0 ) unit = '%'; value += value ? unit : ''; return uikitUtil.isIE ? ("translateX(" + value + ")") : ("translate3d(" + value + ", 0, 0)"); // currently not translate3d in IE, translate3d within translate3d does not work while transitioning } function scale3d(value) { return ("scale3d(" + value + ", " + value + ", 1)"); } var Animations$1 = uikitUtil.assign({}, Animations, { fade: { show: function() { return [ {opacity: 0}, {opacity: 1} ]; }, percent: function(current) { return 1 - uikitUtil.css(current, 'opacity'); }, translate: function(percent) { return [ {opacity: 1 - percent}, {opacity: percent} ]; } }, scale: { show: function() { return [ {opacity: 0, transform: scale3d(1 - .2)}, {opacity: 1, transform: scale3d(1)} ]; }, percent: function(current) { return 1 - uikitUtil.css(current, 'opacity'); }, translate: function(percent) { return [ {opacity: 1 - percent, transform: scale3d(1 - .2 * percent)}, {opacity: percent, transform: scale3d(1 - .2 + .2 * percent)} ]; } } }); var Container = { props: { container: Boolean }, data: { container: true }, computed: { container: function(ref) { var container = ref.container; return container === true && this.$container || container && uikitUtil.$(container); } } }; var Class = { connected: function() { !uikitUtil.hasClass(this.$el, this.$name) && uikitUtil.addClass(this.$el, this.$name); } }; var Togglable = { props: { cls: Boolean, animation: 'list', duration: Number, origin: String, transition: String }, data: { cls: false, animation: [false], duration: 200, origin: false, transition: 'linear', clsEnter: 'uk-togglabe-enter', clsLeave: 'uk-togglabe-leave', initProps: { overflow: '', height: '', paddingTop: '', paddingBottom: '', marginTop: '', marginBottom: '' }, hideProps: { overflow: 'hidden', height: 0, paddingTop: 0, paddingBottom: 0, marginTop: 0, marginBottom: 0 } }, computed: { hasAnimation: function(ref) { var animation = ref.animation; return !!animation[0]; }, hasTransition: function(ref) { var animation = ref.animation; return this.hasAnimation && animation[0] === true; } }, methods: { toggleElement: function(targets, toggle, animate) { var this$1 = this; return new uikitUtil.Promise(function (resolve) { return uikitUtil.Promise.all(uikitUtil.toNodes(targets).map(function (el) { var show = uikitUtil.isBoolean(toggle) ? toggle : !this$1.isToggled(el); if (!uikitUtil.trigger(el, ("before" + (show ? 'show' : 'hide')), [this$1])) { return uikitUtil.Promise.reject(); } var promise = ( uikitUtil.isFunction(animate) ? animate : animate === false || !this$1.hasAnimation ? this$1._toggle : this$1.hasTransition ? toggleHeight(this$1) : toggleAnimation(this$1) )(el, show) || uikitUtil.Promise.resolve(); uikitUtil.addClass(el, show ? this$1.clsEnter : this$1.clsLeave); uikitUtil.trigger(el, show ? 'show' : 'hide', [this$1]); promise .catch(uikitUtil.noop) .then(function () { return uikitUtil.removeClass(el, show ? this$1.clsEnter : this$1.clsLeave); }); return promise.then(function () { uikitUtil.removeClass(el, show ? this$1.clsEnter : this$1.clsLeave); uikitUtil.trigger(el, show ? 'shown' : 'hidden', [this$1]); this$1.$update(el); }); })).then(resolve, uikitUtil.noop); } ); }, isToggled: function(el) { if ( el === void 0 ) el = this.$el; return uikitUtil.hasClass(el, this.clsEnter) ? true : uikitUtil.hasClass(el, this.clsLeave) ? false : this.cls ? uikitUtil.hasClass(el, this.cls.split(' ')[0]) : !uikitUtil.hasAttr(el, 'hidden'); }, _toggle: function(el, toggled) { if (!el) { return; } toggled = Boolean(toggled); var changed; if (this.cls) { changed = uikitUtil.includes(this.cls, ' ') || toggled !== uikitUtil.hasClass(el, this.cls); changed && uikitUtil.toggleClass(el, this.cls, uikitUtil.includes(this.cls, ' ') ? undefined : toggled); } else { changed = toggled === el.hidden; changed && (el.hidden = !toggled); } uikitUtil.$$('[autofocus]', el).some(function (el) { return uikitUtil.isVisible(el) ? el.focus() || true : el.blur(); }); if (changed) { uikitUtil.trigger(el, 'toggled', [toggled, this]); this.$update(el); } } } }; function toggleHeight(ref) { var isToggled = ref.isToggled; var duration = ref.duration; var initProps = ref.initProps; var hideProps = ref.hideProps; var transition = ref.transition; var _toggle = ref._toggle; return function (el, show) { var inProgress = uikitUtil.Transition.inProgress(el); var inner = el.hasChildNodes ? uikitUtil.toFloat(uikitUtil.css(el.firstElementChild, 'marginTop')) + uikitUtil.toFloat(uikitUtil.css(el.lastElementChild, 'marginBottom')) : 0; var currentHeight = uikitUtil.isVisible(el) ? uikitUtil.height(el) + (inProgress ? 0 : inner) : 0; uikitUtil.Transition.cancel(el); if (!isToggled(el)) { _toggle(el, true); } uikitUtil.height(el, ''); // Update child components first uikitUtil.fastdom.flush(); var endHeight = uikitUtil.height(el) + (inProgress ? 0 : inner); uikitUtil.height(el, currentHeight); return (show ? uikitUtil.Transition.start(el, uikitUtil.assign({}, initProps, {overflow: 'hidden', height: endHeight}), Math.round(duration * (1 - currentHeight / endHeight)), transition) : uikitUtil.Transition.start(el, hideProps, Math.round(duration * (currentHeight / endHeight)), transition).then(function () { return _toggle(el, false); }) ).then(function () { return uikitUtil.css(el, initProps); }); }; } function toggleAnimation(cmp) { return function (el, show) { uikitUtil.Animation.cancel(el); var animation = cmp.animation; var duration = cmp.duration; var _toggle = cmp._toggle; if (show) { _toggle(el, true); return uikitUtil.Animation.in(el, animation[0], duration, cmp.origin); } return uikitUtil.Animation.out(el, animation[1] || animation[0], duration, cmp.origin).then(function () { return _toggle(el, false); }); }; } var active = []; var Modal = { mixins: [Class, Container, Togglable], props: { selPanel: String, selClose: String, escClose: Boolean, bgClose: Boolean, stack: Boolean }, data: { cls: 'uk-open', escClose: true, bgClose: true, overlay: true, stack: false }, computed: { panel: function(ref, $el) { var selPanel = ref.selPanel; return uikitUtil.$(selPanel, $el); }, transitionElement: function() { return this.panel; }, bgClose: function(ref) { var bgClose = ref.bgClose; return bgClose && this.panel; } }, beforeDisconnect: function() { if (this.isToggled()) { this.toggleElement(this.$el, false, false); } }, events: [ { name: 'click', delegate: function() { return this.selClose; }, handler: function(e) { e.preventDefault(); this.hide(); } }, { name: 'toggle', self: true, handler: function(e) { if (e.defaultPrevented) { return; } e.preventDefault(); if (this.isToggled() === uikitUtil.includes(active, this)) { this.toggle(); } } }, { name: 'beforeshow', self: true, handler: function(e) { if (uikitUtil.includes(active, this)) { return false; } if (!this.stack && active.length) { uikitUtil.Promise.all(active.map(function (modal) { return modal.hide(); })).then(this.show); e.preventDefault(); } else { active.push(this); } } }, { name: 'show', self: true, handler: function() { var this$1 = this; if (uikitUtil.width(window) - uikitUtil.width(document) && this.overlay) { uikitUtil.css(document.body, 'overflowY', 'scroll'); } if (this.stack) { uikitUtil.css(this.$el, 'zIndex', uikitUtil.toFloat(uikitUtil.css(this.$el, 'zIndex')) + active.length); } uikitUtil.addClass(document.documentElement, this.clsPage); if (this.bgClose) { uikitUtil.once(this.$el, 'hide', uikitUtil.on(document, uikitUtil.pointerDown, function (ref) { var target = ref.target; if (uikitUtil.last(active) !== this$1 || this$1.overlay && !uikitUtil.within(target, this$1.$el) || uikitUtil.within(target, this$1.panel)) { return; } uikitUtil.once(document, (uikitUtil.pointerUp + " " + uikitUtil.pointerCancel + " scroll"), function (ref) { var defaultPrevented = ref.defaultPrevented; var type = ref.type; var newTarget = ref.target; if (!defaultPrevented && type === uikitUtil.pointerUp && target === newTarget) { this$1.hide(); } }, true); }), {self: true}); } if (this.escClose) { uikitUtil.once(this.$el, 'hide', uikitUtil.on(document, 'keydown', function (e) { if (e.keyCode === 27 && uikitUtil.last(active) === this$1) { this$1.hide(); } }), {self: true}); } } }, { name: 'hidden', self: true, handler: function() { var this$1 = this; active.splice(active.indexOf(this), 1); if (!active.length) { uikitUtil.css(document.body, 'overflowY', ''); } uikitUtil.css(this.$el, 'zIndex', ''); if (!active.some(function (modal) { return modal.clsPage === this$1.clsPage; })) { uikitUtil.removeClass(document.documentElement, this.clsPage); } } } ], methods: { toggle: function() { return this.isToggled() ? this.hide() : this.show(); }, show: function() { var this$1 = this; if (this.isToggled()) { return uikitUtil.Promise.resolve(); } if (this.container && uikitUtil.parent(this.$el) !== this.container) { uikitUtil.append(this.container, this.$el); return new uikitUtil.Promise(function (resolve) { return requestAnimationFrame(function () { return this$1.show().then(resolve); } ); } ); } return this.toggleElement(this.$el, true, animate(this)); }, hide: function() { if (!this.isToggled()) { return uikitUtil.Promise.resolve(); } return this.toggleElement(this.$el, false, animate(this)); } } }; function animate(ref) { var transitionElement = ref.transitionElement; var _toggle = ref._toggle; return function (el, show) { return new uikitUtil.Promise(function (resolve, reject) { return uikitUtil.once(el, 'show hide', function () { el._reject && el._reject(); el._reject = reject; _toggle(el, show); var off = uikitUtil.once(transitionElement, 'transitionstart', function () { uikitUtil.once(transitionElement, 'transitionend transitioncancel', resolve, {self: true}); clearTimeout(timer); }, {self: true}); var timer = setTimeout(function () { off(); resolve(); }, uikitUtil.toMs(uikitUtil.css(transitionElement, 'transitionDuration'))); }); } ).then(function () { return delete el._reject; }); }; } function Transitioner(prev, next, dir, ref) { var animation = ref.animation; var easing = ref.easing; var percent = animation.percent; var translate = animation.translate; var show = animation.show; if ( show === void 0 ) show = uikitUtil.noop; var props = show(dir); var deferred = new uikitUtil.Deferred(); return { dir: dir, show: function(duration, percent, linear) { var this$1 = this; if ( percent === void 0 ) percent = 0; var timing = linear ? 'linear' : easing; duration -= Math.round(duration * uikitUtil.clamp(percent, -1, 1)); this.translate(percent); triggerUpdate(next, 'itemin', {percent: percent, duration: duration, timing: timing, dir: dir}); triggerUpdate(prev, 'itemout', {percent: 1 - percent, duration: duration, timing: timing, dir: dir}); uikitUtil.Promise.all([ uikitUtil.Transition.start(next, props[1], duration, timing), uikitUtil.Transition.start(prev, props[0], duration, timing) ]).then(function () { this$1.reset(); deferred.resolve(); }, uikitUtil.noop); return deferred.promise; }, cancel: function() { uikitUtil.Transition.cancel([next, prev]); }, reset: function() { for (var prop in props[0]) { uikitUtil.css([next, prev], prop, ''); } }, forward: function(duration, percent) { if ( percent === void 0 ) percent = this.percent(); uikitUtil.Transition.cancel([next, prev]); return this.show(duration, percent, true); }, translate: function(percent) { this.reset(); var props = translate(percent, dir); uikitUtil.css(next, props[1]); uikitUtil.css(prev, props[0]); triggerUpdate(next, 'itemtranslatein', {percent: percent, dir: dir}); triggerUpdate(prev, 'itemtranslateout', {percent: 1 - percent, dir: dir}); }, percent: function() { return percent(prev || next, next, dir); }, getDistance: function() { return prev && prev.offsetWidth; } }; } function triggerUpdate(el, type, data) { uikitUtil.trigger(el, uikitUtil.createEvent(type, false, false, data)); } var SliderAutoplay = { props: { autoplay: Boolean, autoplayInterval: Number, pauseOnHover: Boolean }, data: { autoplay: false, autoplayInterval: 7000, pauseOnHover: true }, connected: function() { this.autoplay && this.startAutoplay(); }, disconnected: function() { this.stopAutoplay(); }, update: function() { uikitUtil.attr(this.slides, 'tabindex', '-1'); }, events: [ { name: 'visibilitychange', el: uikitUtil.inBrowser && document, filter: function() { return this.autoplay; }, handler: function() { if (document.hidden) { this.stopAutoplay(); } else { this.startAutoplay(); } } } ], methods: { startAutoplay: function() { var this$1 = this; this.stopAutoplay(); this.interval = setInterval( function () { return (!this$1.draggable || !uikitUtil.$(':focus', this$1.$el)) && (!this$1.pauseOnHover || !uikitUtil.matches(this$1.$el, ':hover')) && !this$1.stack.length && this$1.show('next'); }, this.autoplayInterval ); }, stopAutoplay: function() { this.interval && clearInterval(this.interval); } } }; var SliderDrag = { props: { draggable: Boolean }, data: { draggable: true, threshold: 10 }, created: function() { var this$1 = this; ['start', 'move', 'end'].forEach(function (key) { var fn = this$1[key]; this$1[key] = function (e) { var pos = uikitUtil.getEventPos(e).x * (uikitUtil.isRtl ? -1 : 1); this$1.prevPos = pos !== this$1.pos ? this$1.pos : this$1.prevPos; this$1.pos = pos; fn(e); }; }); }, events: [ { name: uikitUtil.pointerDown, delegate: function() { return this.selSlides; }, handler: function(e) { if (!this.draggable || !uikitUtil.isTouch(e) && hasTextNodesOnly(e.target) || uikitUtil.closest(e.target, uikitUtil.selInput) || e.button > 0 || this.length < 2 ) { return; } this.start(e); } }, { name: 'dragstart', handler: function(e) { e.preventDefault(); } } ], methods: { start: function() { this.drag = this.pos; if (this._transitioner) { this.percent = this._transitioner.percent(); this.drag += this._transitioner.getDistance() * this.percent * this.dir; this._transitioner.cancel(); this._transitioner.translate(this.percent); this.dragging = true; this.stack = []; } else { this.prevIndex = this.index; } // Workaround for iOS's inert scrolling preventing pointerdown event // https://developer.mozilla.org/en-US/docs/Web/CSS/touch-action uikitUtil.on(this.list, 'touchmove', this.move, {passive: false}); uikitUtil.on(document, uikitUtil.pointerMove, this.move, {passive: false}); uikitUtil.on(document, (uikitUtil.pointerUp + " " + uikitUtil.pointerCancel), this.end, true); uikitUtil.css(this.list, 'userSelect', 'none'); }, move: function(e) { var this$1 = this; var distance = this.pos - this.drag; if (distance === 0 || this.prevPos === this.pos || !this.dragging && Math.abs(distance) < this.threshold) { return; } e.cancelable && e.preventDefault(); this.dragging = true; this.dir = (distance < 0 ? 1 : -1); var ref = this; var slides = ref.slides; var ref$1 = this; var prevIndex = ref$1.prevIndex; var dis = Math.abs(distance); var nextIndex = this.getIndex(prevIndex + this.dir, prevIndex); var width = this._getDistance(prevIndex, nextIndex) || slides[prevIndex].offsetWidth; while (nextIndex !== prevIndex && dis > width) { this.drag -= width * this.dir; prevIndex = nextIndex; dis -= width; nextIndex = this.getIndex(prevIndex + this.dir, prevIndex); width = this._getDistance(prevIndex, nextIndex) || slides[prevIndex].offsetWidth; } this.percent = dis / width; var prev = slides[prevIndex]; var next = slides[nextIndex]; var changed = this.index !== nextIndex; var edge = prevIndex === nextIndex; var itemShown; [this.index, this.prevIndex].filter(function (i) { return !uikitUtil.includes([nextIndex, prevIndex], i); }).forEach(function (i) { uikitUtil.trigger(slides[i], 'itemhidden', [this$1]); if (edge) { itemShown = true; this$1.prevIndex = prevIndex; } }); if (this.index === prevIndex && this.prevIndex !== prevIndex || itemShown) { uikitUtil.trigger(slides[this.index], 'itemshown', [this]); } if (changed) { this.prevIndex = prevIndex; this.index = nextIndex; !edge && uikitUtil.trigger(prev, 'beforeitemhide', [this]); uikitUtil.trigger(next, 'beforeitemshow', [this]); } this._transitioner = this._translate(Math.abs(this.percent), prev, !edge && next); if (changed) { !edge && uikitUtil.trigger(prev, 'itemhide', [this]); uikitUtil.trigger(next, 'itemshow', [this]); } }, end: function() { uikitUtil.off(this.list, 'touchmove', this.move, {passive: false}); uikitUtil.off(document, uikitUtil.pointerMove, this.move, {passive: false}); uikitUtil.off(document, (uikitUtil.pointerUp + " " + uikitUtil.pointerCancel), this.end, true); if (this.dragging) { this.dragging = null; if (this.index === this.prevIndex) { this.percent = 1 - this.percent; this.dir *= -1; this._show(false, this.index, true); this._transitioner = null; } else { var dirChange = (uikitUtil.isRtl ? this.dir * (uikitUtil.isRtl ? 1 : -1) : this.dir) < 0 === this.prevPos > this.pos; this.index = dirChange ? this.index : this.prevIndex; if (dirChange) { this.percent = 1 - this.percent; } this.show(this.dir > 0 && !dirChange || this.dir < 0 && dirChange ? 'next' : 'previous', true); } } uikitUtil.css(this.list, {userSelect: '', pointerEvents: ''}); this.drag = this.percent = null; } } }; function hasTextNodesOnly(el) { return !el.children.length && el.childNodes.length; } var SliderNav = { data: { selNav: false }, computed: { nav: function(ref, $el) { var selNav = ref.selNav; return uikitUtil.$(selNav, $el); }, selNavItem: function(ref) { var attrItem = ref.attrItem; return ("[" + attrItem + "],[data-" + attrItem + "]"); }, navItems: function(_, $el) { return uikitUtil.$$(this.selNavItem, $el); } }, update: { write: function() { var this$1 = this; if (this.nav && this.length !== this.nav.children.length) { uikitUtil.html(this.nav, this.slides.map(function (_, i) { return ("<li " + (this$1.attrItem) + "=\"" + i + "\"><a href></a></li>"); }).join('')); } this.navItems.concat(this.nav).forEach(function (el) { return el && (el.hidden = !this$1.maxIndex); }); this.updateNav(); }, events: ['resize'] }, events: [ { name: 'click', delegate: function() { return this.selNavItem; }, handler: function(e) { e.preventDefault(); this.show(uikitUtil.data(e.current, this.attrItem)); } }, { name: 'itemshow', handler: 'updateNav' } ], methods: { updateNav: function() { var this$1 = this; var i = this.getValidIndex(); this.navItems.forEach(function (el) { var cmd = uikitUtil.data(el, this$1.attrItem); uikitUtil.toggleClass(el, this$1.clsActive, uikitUtil.toNumber(cmd) === i); uikitUtil.toggleClass(el, 'uk-invisible', this$1.finite && (cmd === 'previous' && i === 0 || cmd === 'next' && i >= this$1.maxIndex)); }); } } }; var Slider = { mixins: [SliderAutoplay, SliderDrag, SliderNav], props: { clsActivated: Boolean, easing: String, index: Number, finite: Boolean, velocity: Number, selSlides: String }, data: function () { return ({ easing: 'ease', finite: false, velocity: 1, index: 0, prevIndex: -1, stack: [], percent: 0, clsActive: 'uk-active', clsActivated: false, Transitioner: false, transitionOptions: {} }); }, connected: function() { this.prevIndex = -1; this.index = this.getValidIndex(this.$props.index); this.stack = []; }, disconnected: function() { uikitUtil.removeClass(this.slides, this.clsActive); }, computed: { duration: function(ref, $el) { var velocity = ref.velocity; return speedUp($el.offsetWidth / velocity); }, list: function(ref, $el) { var selList = ref.selList; return uikitUtil.$(selList, $el); }, maxIndex: function() { return this.length - 1; }, selSlides: function(ref) { var selList = ref.selList; var selSlides = ref.selSlides; <|fim▁hole|> }, slides: { get: function() { return uikitUtil.$$(this.selSlides, this.$el); }, watch: function() { this.$reset(); } }, length: function() { return this.slides.length; } }, events: { itemshown: function() { this.$update(this.list); } }, methods: { show: function(index, force) { var this$1 = this; if ( force === void 0 ) force = false; if (this.dragging || !this.length) { return; } var ref = this; var stack = ref.stack; var queueIndex = force ? 0 : stack.length; var reset = function () { stack.splice(queueIndex, 1); if (stack.length) { this$1.show(stack.shift(), true); } }; stack[force ? 'unshift' : 'push'](index); if (!force && stack.length > 1) { if (stack.length === 2) { this._transitioner.forward(Math.min(this.duration, 200)); } return; } var prevIndex = this.getIndex(this.index); var prev = uikitUtil.hasClass(this.slides, this.clsActive) && this.slides[prevIndex]; var nextIndex = this.getIndex(index, this.index); var next = this.slides[nextIndex]; if (prev === next) { reset(); return; } this.dir = getDirection(index, prevIndex); this.prevIndex = prevIndex; this.index = nextIndex; if (prev && !uikitUtil.trigger(prev, 'beforeitemhide', [this]) || !uikitUtil.trigger(next, 'beforeitemshow', [this, prev]) ) { this.index = this.prevIndex; reset(); return; } var promise = this._show(prev, next, force).then(function () { prev && uikitUtil.trigger(prev, 'itemhidden', [this$1]); uikitUtil.trigger(next, 'itemshown', [this$1]); return new uikitUtil.Promise(function (resolve) { uikitUtil.fastdom.write(function () { stack.shift(); if (stack.length) { this$1.show(stack.shift(), true); } else { this$1._transitioner = null; } resolve(); }); }); }); prev && uikitUtil.trigger(prev, 'itemhide', [this]); uikitUtil.trigger(next, 'itemshow', [this]); return promise; }, getIndex: function(index, prev) { if ( index === void 0 ) index = this.index; if ( prev === void 0 ) prev = this.index; return uikitUtil.clamp(uikitUtil.getIndex(index, this.slides, prev, this.finite), 0, this.maxIndex); }, getValidIndex: function(index, prevIndex) { if ( index === void 0 ) index = this.index; if ( prevIndex === void 0 ) prevIndex = this.prevIndex; return this.getIndex(index, prevIndex); }, _show: function(prev, next, force) { this._transitioner = this._getTransitioner( prev, next, this.dir, uikitUtil.assign({ easing: force ? next.offsetWidth < 600 ? 'cubic-bezier(0.25, 0.46, 0.45, 0.94)' /* easeOutQuad */ : 'cubic-bezier(0.165, 0.84, 0.44, 1)' /* easeOutQuart */ : this.easing }, this.transitionOptions) ); if (!force && !prev) { this._translate(1); return uikitUtil.Promise.resolve(); } var ref = this.stack; var length = ref.length; return this._transitioner[length > 1 ? 'forward' : 'show'](length > 1 ? Math.min(this.duration, 75 + 75 / (length - 1)) : this.duration, this.percent); }, _getDistance: function(prev, next) { return this._getTransitioner(prev, prev !== next && next).getDistance(); }, _translate: function(percent, prev, next) { if ( prev === void 0 ) prev = this.prevIndex; if ( next === void 0 ) next = this.index; var transitioner = this._getTransitioner(prev !== next ? prev : false, next); transitioner.translate(percent); return transitioner; }, _getTransitioner: function(prev, next, dir, options) { if ( prev === void 0 ) prev = this.prevIndex; if ( next === void 0 ) next = this.index; if ( dir === void 0 ) dir = this.dir || 1; if ( options === void 0 ) options = this.transitionOptions; return new this.Transitioner( uikitUtil.isNumber(prev) ? this.slides[prev] : prev, uikitUtil.isNumber(next) ? this.slides[next] : next, dir * (uikitUtil.isRtl ? -1 : 1), options ); } } }; function getDirection(index, prevIndex) { return index === 'next' ? 1 : index === 'previous' ? -1 : index < prevIndex ? -1 : 1; } function speedUp(x) { return .5 * x + 300; // parabola through (400,500; 600,600; 1800,1200) } var Slideshow = { mixins: [Slider], props: { animation: String }, data: { animation: 'slide', clsActivated: 'uk-transition-active', Animations: Animations, Transitioner: Transitioner }, computed: { animation: function(ref) { var animation = ref.animation; var Animations = ref.Animations; return uikitUtil.assign(Animations[animation] || Animations.slide, {name: animation}); }, transitionOptions: function() { return {animation: this.animation}; } }, events: { 'itemshow itemhide itemshown itemhidden': function(ref) { var target = ref.target; this.$update(target); }, beforeitemshow: function(ref) { var target = ref.target; uikitUtil.addClass(target, this.clsActive); }, itemshown: function(ref) { var target = ref.target; uikitUtil.addClass(target, this.clsActivated); }, itemhidden: function(ref) { var target = ref.target; uikitUtil.removeClass(target, this.clsActive, this.clsActivated); } } }; var LightboxPanel = { mixins: [Container, Modal, Togglable, Slideshow], functional: true, props: { delayControls: Number, preload: Number, videoAutoplay: Boolean, template: String }, data: function () { return ({ preload: 1, videoAutoplay: false, delayControls: 3000, items: [], cls: 'uk-open', clsPage: 'uk-lightbox-page', selList: '.uk-lightbox-items', attrItem: 'uk-lightbox-item', selClose: '.uk-close-large', selCaption: '.uk-lightbox-caption', pauseOnHover: false, velocity: 2, Animations: Animations$1, template: "<div class=\"uk-lightbox uk-overflow-hidden\"> <ul class=\"uk-lightbox-items\"></ul> <div class=\"uk-lightbox-toolbar uk-position-top uk-text-right uk-transition-slide-top uk-transition-opaque\"> <button class=\"uk-lightbox-toolbar-icon uk-close-large\" type=\"button\" uk-close></button> </div> <a class=\"uk-lightbox-button uk-position-center-left uk-position-medium uk-transition-fade\" href uk-slidenav-previous uk-lightbox-item=\"previous\"></a> <a class=\"uk-lightbox-button uk-position-center-right uk-position-medium uk-transition-fade\" href uk-slidenav-next uk-lightbox-item=\"next\"></a> <div class=\"uk-lightbox-toolbar uk-lightbox-caption uk-position-bottom uk-text-center uk-transition-slide-bottom uk-transition-opaque\"></div> </div>" }); }, created: function() { var $el = uikitUtil.$(this.template); var list = uikitUtil.$(this.selList, $el); this.items.forEach(function () { return uikitUtil.append(list, '<li>'); }); this.$mount(uikitUtil.append(this.container, $el)); }, computed: { caption: function(ref, $el) { var selCaption = ref.selCaption; return uikitUtil.$('.uk-lightbox-caption', $el); } }, events: [ { name: (uikitUtil.pointerMove + " " + uikitUtil.pointerDown + " keydown"), handler: 'showControls' }, { name: 'click', self: true, delegate: function() { return this.selSlides; }, handler: function(e) { if (e.defaultPrevented) { return; } this.hide(); } }, { name: 'shown', self: true, handler: function() { this.showControls(); } }, { name: 'hide', self: true, handler: function() { this.hideControls(); uikitUtil.removeClass(this.slides, this.clsActive); uikitUtil.Transition.stop(this.slides); } }, { name: 'hidden', self: true, handler: function() { this.$destroy(true); } }, { name: 'keyup', el: uikitUtil.inBrowser && document, handler: function(e) { if (!this.isToggled(this.$el) || !this.draggable) { return; } switch (e.keyCode) { case 37: this.show('previous'); break; case 39: this.show('next'); break; } } }, { name: 'beforeitemshow', handler: function(e) { if (this.isToggled()) { return; } this.draggable = false; e.preventDefault(); this.toggleElement(this.$el, true, false); this.animation = Animations$1['scale']; uikitUtil.removeClass(e.target, this.clsActive); this.stack.splice(1, 0, this.index); } }, { name: 'itemshow', handler: function() { uikitUtil.html(this.caption, this.getItem().caption || ''); for (var j = -this.preload; j <= this.preload; j++) { this.loadItem(this.index + j); } } }, { name: 'itemshown', handler: function() { this.draggable = this.$props.draggable; } }, { name: 'itemload', handler: function(_, item) { var this$1 = this; var src = item.source; var type = item.type; var alt = item.alt; if ( alt === void 0 ) alt = ''; var poster = item.poster; var attrs = item.attrs; if ( attrs === void 0 ) attrs = {}; this.setItem(item, '<span uk-spinner></span>'); if (!src) { return; } var matches; var iframeAttrs = { frameborder: '0', allow: 'autoplay', allowfullscreen: '', style: 'max-width: 100%; box-sizing: border-box;', 'uk-responsive': '', 'uk-video': ("" + (this.videoAutoplay)) }; // Image if (type === 'image' || src.match(/\.(jpe?g|png|gif|svg|webp)($|\?)/i)) { uikitUtil.getImage(src, attrs.srcset, attrs.size).then( function (ref) { var width = ref.width; var height = ref.height; return this$1.setItem(item, createEl('img', uikitUtil.assign({src: src, width: width, height: height, alt: alt}, attrs))); }, function () { return this$1.setError(item); } ); // Video } else if (type === 'video' || src.match(/\.(mp4|webm|ogv)($|\?)/i)) { var video = createEl('video', uikitUtil.assign({ src: src, poster: poster, controls: '', playsinline: '', 'uk-video': ("" + (this.videoAutoplay)) }, attrs)); uikitUtil.on(video, 'loadedmetadata', function () { uikitUtil.attr(video, {width: video.videoWidth, height: video.videoHeight}); this$1.setItem(item, video); }); uikitUtil.on(video, 'error', function () { return this$1.setError(item); }); // Iframe } else if (type === 'iframe' || src.match(/\.(html|php)($|\?)/i)) { this.setItem(item, createEl('iframe', uikitUtil.assign({ src: src, frameborder: '0', allowfullscreen: '', class: 'uk-lightbox-iframe' }, attrs))); // YouTube } else if ((matches = src.match(/\/\/(?:.*?youtube(-nocookie)?\..*?[?&]v=|youtu\.be\/)([\w-]{11})[&?]?(.*)?/))) { this.setItem(item, createEl('iframe', uikitUtil.assign({ src: ("https://www.youtube" + (matches[1] || '') + ".com/embed/" + (matches[2]) + (matches[3] ? ("?" + (matches[3])) : '')), width: 1920, height: 1080 }, iframeAttrs, attrs))); // Vimeo } else if ((matches = src.match(/\/\/.*?vimeo\.[a-z]+\/(\d+)[&?]?(.*)?/))) { uikitUtil.ajax(("https://vimeo.com/api/oembed.json?maxwidth=1920&url=" + (encodeURI(src))), { responseType: 'json', withCredentials: false }).then( function (ref) { var ref_response = ref.response; var height = ref_response.height; var width = ref_response.width; return this$1.setItem(item, createEl('iframe', uikitUtil.assign({ src: ("https://player.vimeo.com/video/" + (matches[1]) + (matches[2] ? ("?" + (matches[2])) : '')), width: width, height: height }, iframeAttrs, attrs))); }, function () { return this$1.setError(item); } ); } } } ], methods: { loadItem: function(index) { if ( index === void 0 ) index = this.index; var item = this.getItem(index); if (!this.getSlide(item).childElementCount) { uikitUtil.trigger(this.$el, 'itemload', [item]); } }, getItem: function(index) { if ( index === void 0 ) index = this.index; return this.items[uikitUtil.getIndex(index, this.slides)]; }, setItem: function(item, content) { uikitUtil.trigger(this.$el, 'itemloaded', [this, uikitUtil.html(this.getSlide(item), content) ]); }, getSlide: function(item) { return this.slides[this.items.indexOf(item)]; }, setError: function(item) { this.setItem(item, '<span uk-icon="icon: bolt; ratio: 2"></span>'); }, showControls: function() { clearTimeout(this.controlsTimer); this.controlsTimer = setTimeout(this.hideControls, this.delayControls); uikitUtil.addClass(this.$el, 'uk-active', 'uk-transition-active'); }, hideControls: function() { uikitUtil.removeClass(this.$el, 'uk-active', 'uk-transition-active'); } } }; function createEl(tag, attrs) { var el = uikitUtil.fragment(("<" + tag + ">")); uikitUtil.attr(el, attrs); return el; } var Component = { install: install, props: {toggle: String}, data: {toggle: 'a'}, computed: { toggles: { get: function(ref, $el) { var toggle = ref.toggle; return uikitUtil.$$(toggle, $el); }, watch: function() { this.hide(); } } }, disconnected: function() { this.hide(); }, events: [ { name: 'click', delegate: function() { return ((this.toggle) + ":not(.uk-disabled)"); }, handler: function(e) { e.preventDefault(); this.show(e.current); } } ], methods: { show: function(index) { var this$1 = this; var items = uikitUtil.uniqueBy(this.toggles.map(toItem), 'source'); if (uikitUtil.isElement(index)) { var ref = toItem(index); var source = ref.source; index = uikitUtil.findIndex(items, function (ref) { var src = ref.source; return source === src; }); } this.panel = this.panel || this.$create('lightboxPanel', uikitUtil.assign({}, this.$props, {items: items})); uikitUtil.on(this.panel.$el, 'hidden', function () { return this$1.panel = false; }); return this.panel.show(index); }, hide: function() { return this.panel && this.panel.hide(); } } }; function install(UIkit, Lightbox) { if (!UIkit.lightboxPanel) { UIkit.component('lightboxPanel', LightboxPanel); } uikitUtil.assign( Lightbox.props, UIkit.component('lightboxPanel').options.props ); } function toItem(el) { var item = {}; ['href', 'caption', 'type', 'poster', 'alt', 'attrs'].forEach(function (attr) { item[attr === 'href' ? 'source' : attr] = uikitUtil.data(el, attr); }); item.attrs = uikitUtil.parseOptions(item.attrs); return item; } if (typeof window !== 'undefined' && window.UIkit) { window.UIkit.component('lightbox', Component); } return Component; })));<|fim▁end|>
return (selList + " " + (selSlides || '> *'));
<|file_name|>myNode.py<|end_file_name|><|fim▁begin|>from lib.flowchart.nodes.generalNode import NodeWithCtrlWidget class myNode(NodeWithCtrlWidget): '''This is test docstring''' nodeName = 'myTestNode' uiTemplate = [{'name': 'HNO3', 'type': 'list', 'value': 'Closest Time'}, {'name': 'C2H5OH', 'type': 'bool', 'value': 0}, {'name': 'H20', 'type': 'str', 'value': '?/?'}] def __init__(self, name, **kwargs): super(myNode, self).__init__(name, terminals={'In': {'io': 'in'}, 'Out': {'io': 'out'}}, **kwargs) <|fim▁hole|><|fim▁end|>
def process(self, In): print ('processing')
<|file_name|>test_treecontroller.py<|end_file_name|><|fim▁begin|>import unittest from robot.parsing.model import TestCase, TestCaseFile from robot.utils.asserts import assert_equals from robotide.controller.commands import ChangeTag from robotide.controller.filecontrollers import TestCaseFileController from robotide.controller.macrocontrollers import TestCaseController from robotide.controller.tablecontrollers import TestCaseTableController from robotide.controller.tags import Tag from robotide.controller.ui.treecontroller import TreeController, _History, \ TestSelectionController class ActionRegistererMock(object): def register_actions(self, action_collections): self.action_collections = action_collections def register_action(self, action): pass class TestTreeController(unittest.TestCase): def test_register_tree_actions(self): mocked_ar = ActionRegistererMock() TreeController(None, mocked_ar, None, None).register_tree_actions() self.assertEquals( ["Go &Back", "Go &Forward"], [a.name for a in mocked_ar.action_collections]) <|fim▁hole|>class _BaseTreeControllerTest(object): def setUp(self): self.history = _History() self.controller = TreeController( self._tree_mock(), None, None, None, history=self.history) self.controller.add_to_history("Top Suite") def _tree_mock(self): tree_mock = lambda: 0 self._tree_mock_items = [] tree_mock.SelectItem = lambda i: self._tree_mock_items.append(i) return tree_mock def _select_node(self, value): self.controller.add_to_history(value) def _go_back_and_return_selection(self): self.controller.OnGoBack(None) return self._tree_mock_items[-1] def _go_forward_and_return_selection(self): self.controller.OnGoForward(None) return self._tree_mock_items[-1] class TestNavigationHistory(_BaseTreeControllerTest, unittest.TestCase): def test_go_back_one_level(self): self._select_node('Top Suite Fake UK 2') self.assertEquals('Top Suite', self._go_back_and_return_selection()) def test_go_back_two_levels(self): nodes = ['Top Suite Fake UK 1', 'Sub Suite 1', 'Sub Suite 1 Fake UK 0'] for name in nodes: self._select_node(name) nodes.reverse() for name in nodes[1:]: self.assertEquals(name, self._go_back_and_return_selection()) def test_it_is_not_possible_to_go_back_farther_than_history(self): nodes = ['Top Suite Fake UK 1', 'Sub Suite 1', 'Sub Suite 1 Fake UK 0'] for name in nodes: self._select_node(name) nodes.reverse() for name in nodes[1:] + ['Top Suite']: self._go_back_and_assert_selection(name) self._go_back_and_assert_selection('Top Suite') def test_go_back_with_selecting_in_between(self): nodes = ['Top Suite Fake UK 1', 'Sub Suite 1', 'Sub Suite 1 Fake UK 0'] for name in nodes: self._select_node(name) self._go_back_and_assert_selection('Sub Suite 1') self._select_node('Sub Suite 2 Fake UK 0') self._go_back_and_assert_selection('Sub Suite 1') def test_go_forward(self): nodes = ['Top Suite Fake UK 1', 'Sub Suite 1', 'Sub Suite 1 Fake UK 0'] for name in nodes: self._select_node(name) for _ in range(3): self.controller.OnGoBack(None) for name in nodes: self._go_forward_and_assert_selection(name) def test_go_back_and_forward_between_suite_and_resource(self): nodes = ['Top Suite Fake UK 0', 'Resource Keyword', 'Sub Suite 0 Fake UK 2'] for name in nodes: self._select_node(name) self._go_back_and_assert_selection('Resource Keyword') self._go_back_and_assert_selection('Top Suite Fake UK 0') self._go_forward_and_assert_selection('Resource Keyword') self._go_forward_and_assert_selection('Sub Suite 0 Fake UK 2') def _go_back_and_assert_selection(self, expected_selection): assert_equals(self._go_back_and_return_selection(), expected_selection) def _go_forward_and_assert_selection(self, expected_selection): assert_equals( self._go_forward_and_return_selection(), expected_selection) class TestTestSelectionController(unittest.TestCase): def setUp(self): self._tsc = TestSelectionController() def test_test_selection_is_empty_by_default(self): self.assertTrue(self._tsc.is_empty()) def test_test_selection_is_not_empty_when_it_contains_a_test(self): self._tsc.select(self._create_test()) self.assertFalse(self._tsc.is_empty()) def test_test_selection_is_empty_after_removing_same_test_from_there_even_when_it_is_not_the_same_object(self): self._tsc.select(self._create_test()) self._tsc.select(self._create_test(), False) self.assertTrue(self._tsc.is_empty()) def test_adding_tag_to_selected_tests(self): tests = [self._create_test('test%d' % i) for i in range(10)] for t in tests: self._tsc.select(t) self._tsc.add_tag('foo') for t in tests: self.assertEqual([tag.name for tag in t.tags], ['foo']) def test_adding_a_tag_to_test_with_a_default_tag(self): test = self._create_test() test.datafile_controller.default_tags.execute( ChangeTag(Tag(None), 'default')) assert_equals([t.name for t in test.tags], ['default']) self._tsc.select(test) self._tsc.add_tag('custom') self.assertEqual([t.name for t in test.tags], ['default', 'custom']) def _create_test(self, name='test'): suite = TestCaseFile(source='suite') suite_controller = TestCaseFileController(suite) parent = TestCaseTableController( suite_controller, suite.testcase_table) test = TestCase(parent=lambda: 0, name=name) return TestCaseController(parent, test)<|fim▁end|>
<|file_name|>renew_service.py<|end_file_name|><|fim▁begin|># Copyright (c) 2014 by Ecreall under licence AGPL terms # available on http://www.gnu.org/licenses/agpl.html # licence: AGPL # author: Amen Souissi from pyramid.view import view_config from dace.processinstance.core import DEFAULTMAPPING_ACTIONS_VIEWS from pontus.view import BasicView from pontus.form import FormView from pontus.default_behavior import Cancel from pontus.view_operation import MultipleView from pontus.schema import select from lac.content.processes.services_processes.behaviors import ( RenewSellingTicketsService) from lac.content.service import ( SellingTicketsServiceSchema, SellingTicketsService) from lac import _ class RenewSellingTicketsServiceViewStudyReport(BasicView): title = 'Alert for renew' name = 'alertforrenew'<|fim▁hole|> result = {} values = {'context': self.context} body = self.content(args=values, template=self.template)['body'] item = self.adapt_item(body, self.viewid) result['coordinates'] = {self.coordinates: [item]} return result class RenewSellingTicketsServiceView(FormView): title = _('Renew the sellingtickets service') schema = select(SellingTicketsServiceSchema(factory=SellingTicketsService, editable=True), ['title']) behaviors = [RenewSellingTicketsService, Cancel] formid = 'formrenewsellingticketsservice' name = 'renewsellingticketsservice' validate_behaviors = False def default_data(self): return self.context @view_config( name='renewsellingticketsservice', context=SellingTicketsService, renderer='pontus:templates/views_templates/grid.pt', ) class RenewSellingTicketsServiceViewMultipleView(MultipleView): title = _('Renew the sellingtickets service') name = 'renewsellingticketsservice' viewid = 'renewsellingticketsservice' template = 'daceui:templates/simple_mergedmultipleview.pt' views = (RenewSellingTicketsServiceViewStudyReport, RenewSellingTicketsServiceView) validators = [RenewSellingTicketsService.get_validator()] DEFAULTMAPPING_ACTIONS_VIEWS.update( {RenewSellingTicketsService: RenewSellingTicketsServiceViewMultipleView})<|fim▁end|>
template = 'lac:views/services_processes/selling_tickets_service/templates/alert_renew.pt' def update(self):
<|file_name|>AtbashSpec.ts<|end_file_name|><|fim▁begin|>import { Atbash } from "../../../main/decryptor/converters/Atbash"; import { Converter } from "../../../main/decryptor/converters/Converter"; describe("Atbash", () => { describe("convert", () => { it("converts empty string to empty string", () => { expect(new Atbash().convert("")).toBe(""); }); it("keeps white-space only string", () => { expect(new Atbash().convert(" \n\r\t")).toBe(" \n\r\t"); }); it("keeps non-alphabet characters", () => { expect(new Atbash().convert("1ö_<")).toBe("1ö_<"); }); it("converts normal lower-case characters", () => { expect(new Atbash().convert("foobar")).toBe("ullyzi"); }); it("converts normal upper-case characters", () => { expect(new Atbash().convert("FOOBAR")).toBe("ULLYZI"); }); it("converts normal mixed-case characters", () => { expect(new Atbash().convert("FooBar")).toBe("UllYzi"); });<|fim▁hole|> }); }); describe("toJSON", () => { it("serializes the converter", () => { expect(new Atbash().toJSON()).toEqual({ "type": "atbash" }); }); }); describe("fromJSON", () => { it("deserializes a converter", () => { const converter = Converter.fromJSON<Atbash>({ "type": "atbash" }); expect(converter).toEqual(jasmine.any(Atbash)); }); }); });<|fim▁end|>
it("converts only normal characters in mixed string", () => { expect(new Atbash().convert("#12FooBar!")).toBe("#12UllYzi!");
<|file_name|>schema_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2011 Google Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package schema import ( "encoding/json" "io" "io/ioutil" "os" "path/filepath" "reflect" "strings" "testing"<|fim▁hole|> "camlistore.org/pkg/blob" "camlistore.org/pkg/osutil" . "camlistore.org/pkg/test/asserts" ) const kExpectedHeader = `{"camliVersion"` func TestJSON(t *testing.T) { fileName := "schema_test.go" fi, _ := os.Lstat(fileName) m := NewCommonFileMap(fileName, fi) json, err := m.JSON() if err != nil { t.Fatalf("Unexpected error: %v", err) } t.Logf("Got json: [%s]\n", json) // TODO: test it parses back if !strings.HasPrefix(json, kExpectedHeader) { t.Errorf("JSON does't start with expected header.") } } func TestRegularFile(t *testing.T) { fileName := "schema_test.go" fi, err := os.Lstat(fileName) AssertNil(t, err, "schema_test.go stat") m := NewCommonFileMap("schema_test.go", fi) json, err := m.JSON() if err != nil { t.Fatalf("Unexpected error: %v", err) } t.Logf("Got json for regular file: [%s]\n", json) } func TestSymlink(t *testing.T) { td, err := ioutil.TempDir("", "") if err != nil { t.Fatal(err) } defer os.RemoveAll(td) symFile := filepath.Join(td, "test-symlink") if err := os.Symlink("test-target", symFile); err != nil { t.Fatal(err) } // Shouldn't be accessed: if err := ioutil.WriteFile(filepath.Join(td, "test-target"), []byte("foo bar"), 0644); err != nil { t.Fatal(err) } fi, err := os.Lstat(symFile) if err != nil { t.Fatal(err) } m := NewCommonFileMap(symFile, fi) json, err := m.JSON() if err != nil { t.Fatalf("Unexpected error: %v", err) } if strings.Contains(string(json), "unixPermission") { t.Errorf("JSON unexpectedly contains unixPermission: [%s]\n", json) } } func TestUtf8StrLen(t *testing.T) { tests := []struct { in string want int }{ {"", 0}, {"a", 1}, {"foo", 3}, {"Здравствуйте!", 25}, {"foo\x80", 3}, {"\x80foo", 0}, } for _, tt := range tests { got := utf8StrLen(tt.in) if got != tt.want { t.Errorf("utf8StrLen(%q) = %v; want %v", tt.in, got, tt.want) } } } func TestMixedArrayFromString(t *testing.T) { b80 := byte('\x80') tests := []struct { in string want []interface{} }{ {"foo", []interface{}{"foo"}}, {"\x80foo", []interface{}{b80, "foo"}}, {"foo\x80foo", []interface{}{"foo", b80, "foo"}}, {"foo\x80", []interface{}{"foo", b80}}, {"\x80", []interface{}{b80}}, {"\x80\x80", []interface{}{b80, b80}}, } for _, tt := range tests { got := mixedArrayFromString(tt.in) if !reflect.DeepEqual(got, tt.want) { t.Errorf("mixedArrayFromString(%q) = %#v; want %#v", tt.in, got, tt.want) } } } type mixPartsTest struct { json, expected string } func TestStringFromMixedArray(t *testing.T) { tests := []mixPartsTest{ {`["brad"]`, "brad"}, {`["brad", 32, 70]`, "brad F"}, {`["brad", "fitz"]`, "bradfitz"}, {`["Am", 233, "lie.jpg"]`, "Am\xe9lie.jpg"}, } for idx, test := range tests { var v []interface{} if err := json.Unmarshal([]byte(test.json), &v); err != nil { t.Fatalf("invalid JSON in test %d", idx) } got := stringFromMixedArray(v) if got != test.expected { t.Errorf("test %d got %q; expected %q", idx, got, test.expected) } } } func TestParseInLocation_UnknownLocation(t *testing.T) { // Example of parsing a time from an API (e.g. Flickr) that // doesn't know its timezone. const format = "2006-01-02 15:04:05" const when = "2010-11-12 13:14:15" tm, err := time.ParseInLocation(format, when, UnknownLocation) if err != nil { t.Fatal(err) } got, want := RFC3339FromTime(tm), "2010-11-12T13:14:15-00:01" if got != want { t.Errorf("parsed %v to %s; want %s", tm, got, want) } } func TestIsZoneKnown(t *testing.T) { if !IsZoneKnown(time.Now()) { t.Errorf("should know Now's zone") } if !IsZoneKnown(time.Now().UTC()) { t.Errorf("UTC should be known") } if IsZoneKnown(time.Now().In(UnknownLocation)) { t.Errorf("with explicit unknown location, should be false") } if IsZoneKnown(time.Now().In(time.FixedZone("xx", -60))) { t.Errorf("with other fixed zone at -60, should be false") } } func TestRFC3339(t *testing.T) { tests := []string{ "2012-05-13T15:02:47Z", "2012-05-13T15:02:47.1234Z", "2012-05-13T15:02:47.123456789Z", "2012-05-13T15:02:47-00:01", } for _, in := range tests { tm, err := time.Parse(time.RFC3339, in) if err != nil { t.Errorf("error parsing %q", in) continue } knownZone := IsZoneKnown(tm) out := RFC3339FromTime(tm) if in != out { t.Errorf("RFC3339FromTime(%q) = %q; want %q", in, out, in) } sub := "Z" if !knownZone { sub = "-00:01" } if !strings.Contains(out, sub) { t.Errorf("expected substring %q in %q", sub, out) } } } func TestBlobFromReader(t *testing.T) { br := blob.MustParse("sha1-f1d2d2f924e986ac86fdf7b36c94bcdf32beec15") blob, err := BlobFromReader(br, strings.NewReader(`{"camliVersion": 1, "camliType": "foo"} `)) if err != nil { t.Error(err) } else if blob.Type() != "foo" { t.Errorf("got type %q; want foo", blob.Type()) } blob, err = BlobFromReader(br, strings.NewReader(`{"camliVersion": 1, "camliType": "foo"} X `)) if err == nil { // TODO(bradfitz): fix this somehow. Currently encoding/json's // decoder over-reads. // See: https://code.google.com/p/go/issues/detail?id=1955 , // which was "fixed", but not really. t.Logf("TODO(bradfitz): make sure bogus non-whitespace after the JSON object causes an error.") } } func TestAttribute(t *testing.T) { tm := time.Unix(123, 456) br := blob.MustParse("xxx-1234") tests := []struct { bb *Builder want string }{ { bb: NewSetAttributeClaim(br, "attr1", "val1"), want: `{"camliVersion": 1, "attribute": "attr1", "camliType": "claim", "claimDate": "1970-01-01T00:02:03.000000456Z", "claimType": "set-attribute", "permaNode": "xxx-1234", "value": "val1" }`, }, { bb: NewAddAttributeClaim(br, "tag", "funny"), want: `{"camliVersion": 1, "attribute": "tag", "camliType": "claim", "claimDate": "1970-01-01T00:02:03.000000456Z", "claimType": "add-attribute", "permaNode": "xxx-1234", "value": "funny" }`, }, { bb: NewDelAttributeClaim(br, "attr1", "val1"), want: `{"camliVersion": 1, "attribute": "attr1", "camliType": "claim", "claimDate": "1970-01-01T00:02:03.000000456Z", "claimType": "del-attribute", "permaNode": "xxx-1234", "value": "val1" }`, }, { bb: NewDelAttributeClaim(br, "attr2", ""), want: `{"camliVersion": 1, "attribute": "attr2", "camliType": "claim", "claimDate": "1970-01-01T00:02:03.000000456Z", "claimType": "del-attribute", "permaNode": "xxx-1234" }`, }, { bb: newClaim(&claimParam{ permanode: br, claimType: SetAttributeClaim, attribute: "foo", value: "bar", }, &claimParam{ permanode: br, claimType: DelAttributeClaim, attribute: "foo", value: "specific-del", }, &claimParam{ permanode: br, claimType: DelAttributeClaim, attribute: "foo", }), want: `{"camliVersion": 1, "camliType": "claim", "claimDate": "1970-01-01T00:02:03.000000456Z", "claimType": "multi", "claims": [ { "attribute": "foo", "claimType": "set-attribute", "permaNode": "xxx-1234", "value": "bar" }, { "attribute": "foo", "claimType": "del-attribute", "permaNode": "xxx-1234", "value": "specific-del" }, { "attribute": "foo", "claimType": "del-attribute", "permaNode": "xxx-1234" } ] }`, }, } for i, tt := range tests { tt.bb.SetClaimDate(tm) got, err := tt.bb.JSON() if err != nil { t.Errorf("%d. JSON error = %v", i, err) continue } if got != tt.want { t.Errorf("%d.\t got:\n%s\n\twant:q\n%s", i, got, tt.want) } } } func TestDeleteClaim(t *testing.T) { tm := time.Unix(123, 456) br := blob.MustParse("xxx-1234") delTest := struct { bb *Builder want string }{ bb: NewDeleteClaim(br), want: `{"camliVersion": 1, "camliType": "claim", "claimDate": "1970-01-01T00:02:03.000000456Z", "claimType": "delete", "target": "xxx-1234" }`, } delTest.bb.SetClaimDate(tm) got, err := delTest.bb.JSON() if err != nil { t.Fatalf("JSON error = %v", err) } if got != delTest.want { t.Fatalf("got:\n%s\n\twant:q\n%s", got, delTest.want) } } func TestAsClaimAndAsShare(t *testing.T) { br := blob.MustParse("xxx-1234") signer := blob.MustParse("yyy-5678") bb := NewSetAttributeClaim(br, "title", "Test Title") bb = bb.SetSigner(signer) bb = bb.SetClaimDate(time.Now()) c1 := bb.Blob() c1.ss.Sig = "non-null-sig" // required by AsShare bb = NewShareRef(ShareHaveRef, br, true) bb = bb.SetSigner(signer) bb = bb.SetClaimDate(time.Now()) c2 := bb.Blob() c2.ss.Sig = "non-null-sig" // required by AsShare if !br.Valid() { t.Error("Blobref not valid") } _, ok := c1.AsClaim() if !ok { t.Error("Claim 1 not returned as claim") } _, ok = c2.AsClaim() if !ok { t.Error("Claim 2 not returned as claim") } s, ok := c1.AsShare() if ok { t.Error("Title claim returned share", s) } s, ok = c2.AsShare() if !ok { t.Error("Share claim failed to return share") } } func TestShareExpiration(t *testing.T) { defer func() { clockNow = time.Now }() b, err := BlobFromReader( blob.MustParse("sha1-64ffa72fa9bcb2f825e7ed40b9451e5cadca4c2c"), strings.NewReader(`{"camliVersion": 1, "authType": "haveref", "camliSigner": "sha1-f2b0b7da718b97ce8c31591d8ed4645c777f3ef4", "camliType": "claim", "claimDate": "2013-09-08T23:58:53.656549677Z", "claimType": "share", "expires": "2013-09-09T23:58:53.65658012Z", "target": "sha1-f1d2d2f924e986ac86fdf7b36c94bcdf32beec15", "transitive": false ,"camliSig":"wsBcBAABCAAQBQJSLQ89CRApMaZ8JvWr2gAAcuEIABRQolhn+yKksfaBx6oLo18NWvWQ+aYweF+5Gu0TH0Ixur7t1o5HFtFSSfFISyggSZDJSjsxoxaawhWrvCe9dZuU2s/zgRpgUtd2xmBt82tLOn9JidnUavsNGFXbfCwdUBSkzN0vDYLmgXW0VtiybB354uIKfOInZor2j8Mq0p6pkWzK3qq9W0dku7iE96YFaTb4W7eOikqoSC6VpjC1/4MQWOYRHLcPcIEY6xJ8es2sYMMSNXuVaR9nMupz8ZcTygP4jh+lPR1OH61q/FSjpRp7GKt4wZ1PknYjMbnpIzVjiSz0MkYd65bpZwuPOwZh/h2kHW7wvHNQZfWUJHEsOAI==J2ID"}`), ) if err != nil { t.Fatal(err) } s, ok := b.AsShare() if !ok { t.Fatal("expected share") } clockNow = func() time.Time { return time.Unix(100, 0) } if s.IsExpired() { t.Error("expected not expired") } clockNow = func() time.Time { return time.Unix(1378687181+2*86400, 0) } if !s.IsExpired() { t.Error("expected expired") } // And without an expiration time: b, err = BlobFromReader( blob.MustParse("sha1-931875ec6b8d917b7aae9f672f4f92de1ffaeeb1"), strings.NewReader(`{"camliVersion": 1, "authType": "haveref", "camliSigner": "sha1-f2b0b7da718b97ce8c31591d8ed4645c777f3ef4", "camliType": "claim", "claimDate": "2013-09-09T01:01:09.907842963Z", "claimType": "share", "target": "sha1-64ffa72fa9bcb2f825e7ed40b9451e5cadca4c2c", "transitive": false ,"camliSig":"wsBcBAABCAAQBQJSLR3VCRApMaZ8JvWr2gAA14kIAKmi5rCI5JTBvHbBuAu7wPVA87BLXm/BaD6zjqOENB4U8B+6KxyuT6KXe9P591IDXdZmJTP5tesbLtKw0iAWiRf2ea0Y7Ms3K77nLnSZM5QIOzb4aQKd1668p/5KqU3VfNayoHt69YkXyKBkqyEPjHINzC03QuLz5NIEBMYJaNqKKtEtSgh4gG8BBYq5qQzdKFg/Hx7VhkhW1y/1wwGSFJjaiPFMIJsF4d/gaO01Ip7XLro63ccyCy81tqKHnVjv0uULmZdbpgd3RHGGSnW3c9BfqkGvc3Wl11UQKzqc9OT+WTAWp8TXg6bLES9sQNzerx2wUfjKB9J4Yrk14iBfjl8==AynO"}`), ) if err != nil { t.Fatal(err) } s, ok = b.AsShare() if !ok { t.Fatal("expected share") } clockNow = func() time.Time { return time.Unix(100, 0) } if s.IsExpired() { t.Error("expected not expired") } clockNow = func() time.Time { return time.Unix(1378687181+2*86400, 0) } if s.IsExpired() { t.Error("expected not expired") } } // camlistore.org/issue/305 func TestIssue305(t *testing.T) { var in = `{"camliVersion": 1, "camliType": "file", "fileName": "2012-03-10 15.03.18.m4v", "parts": [ { "bytesRef": "sha1-c76d8b17b887c207875e61a77b7eccc60289e61c", "size": 20032564 } ] }` var ss superset if err := json.NewDecoder(strings.NewReader(in)).Decode(&ss); err != nil { t.Fatal(err) } inref := blob.SHA1FromString(in) blob, err := BlobFromReader(inref, strings.NewReader(in)) if err != nil { t.Fatal(err) } if blob.BlobRef() != inref { t.Errorf("original ref = %s; want %s", blob.BlobRef(), inref) } bb := blob.Builder() jback, err := bb.JSON() if err != nil { t.Fatal(err) } if jback != in { t.Errorf("JSON doesn't match:\n got: %q\nwant: %q\n", jback, in) } out := bb.Blob() if got := out.BlobRef(); got != inref { t.Errorf("cloned ref = %v; want %v", got, inref) } } func TestStaticFileAndStaticSymlink(t *testing.T) { // TODO (marete): Split this into two test functions. fd, err := ioutil.TempFile("", "schema-test-") if err != nil { t.Fatalf("io.TempFile(): %v", err) } defer os.Remove(fd.Name()) defer fd.Close() fi, err := os.Lstat(fd.Name()) if err != nil { t.Fatalf("os.Lstat(): %v", err) } bb := NewCommonFileMap(fd.Name(), fi) bb.SetType("file") bb.SetFileName(fd.Name()) blob := bb.Blob() sf, ok := blob.AsStaticFile() if !ok { t.Fatalf("Blob.AsStaticFile(): Unexpected return value: false") } if want, got := filepath.Base(fd.Name()), sf.FileName(); want != got { t.Fatalf("StaticFile.FileName(): Expected %s, got %s", want, got) } _, ok = sf.AsStaticSymlink() if ok { t.Fatalf("StaticFile.AsStaticSymlink(): Unexpected return value: true") } dir, err := ioutil.TempDir("", "schema-test-") if err != nil { t.Fatalf("ioutil.TempDir(): %v", err) } defer os.RemoveAll(dir) target := "bar" src := filepath.Join(dir, "foo") err = os.Symlink(target, src) fi, err = os.Lstat(src) if err != nil { t.Fatalf("os.Lstat(): %v", err) } bb = NewCommonFileMap(src, fi) bb.SetType("symlink") bb.SetFileName(src) bb.SetSymlinkTarget(target) blob = bb.Blob() sf, ok = blob.AsStaticFile() if !ok { t.Fatalf("Blob.AsStaticFile(): Unexpected return value: false") } sl, ok := sf.AsStaticSymlink() if !ok { t.Fatalf("StaticFile.AsStaticSymlink(): Unexpected return value: false") } if want, got := filepath.Base(src), sl.FileName(); want != got { t.Fatalf("StaticSymlink.FileName(): Expected %s, got %s", want, got) } if want, got := target, sl.SymlinkTargetString(); got != want { t.Fatalf("StaticSymlink.SymlinkTargetString(): Expected %s, got %s", want, got) } } func TestStaticFIFO(t *testing.T) { tdir, err := ioutil.TempDir("", "schema-test-") if err != nil { t.Fatalf("ioutil.TempDir(): %v", err) } defer os.RemoveAll(tdir) fifoPath := filepath.Join(tdir, "fifo") err = osutil.Mkfifo(fifoPath, 0660) if err == osutil.ErrNotSupported { t.SkipNow() } if err != nil { t.Fatalf("osutil.Mkfifo(): %v", err) } fi, err := os.Lstat(fifoPath) if err != nil { t.Fatalf("os.Lstat(): %v", err) } bb := NewCommonFileMap(fifoPath, fi) bb.SetType("fifo") bb.SetFileName(fifoPath) blob := bb.Blob() t.Logf("Got JSON for fifo: %s\n", blob.JSON()) sf, ok := blob.AsStaticFile() if !ok { t.Fatalf("Blob.AsStaticFile(): Expected true, got false") } _, ok = sf.AsStaticFIFO() if !ok { t.Fatalf("StaticFile.AsStaticFIFO(): Expected true, got false") } } func TestStaticSocket(t *testing.T) { tdir, err := ioutil.TempDir("", "schema-test-") if err != nil { t.Fatalf("ioutil.TempDir(): %v", err) } defer os.RemoveAll(tdir) sockPath := filepath.Join(tdir, "socket") err = osutil.Mksocket(sockPath) if err == osutil.ErrNotSupported { t.SkipNow() } if err != nil { t.Fatalf("osutil.Mksocket(): %v", err) } fi, err := os.Lstat(sockPath) if err != nil { t.Fatalf("os.Lstat(): %v", err) } bb := NewCommonFileMap(sockPath, fi) bb.SetType("socket") bb.SetFileName(sockPath) blob := bb.Blob() t.Logf("Got JSON for socket: %s\n", blob.JSON()) sf, ok := blob.AsStaticFile() if !ok { t.Fatalf("Blob.AsStaticFile(): Expected true, got false") } _, ok = sf.AsStaticSocket() if !ok { t.Fatalf("StaticFile.AsStaticSocket(): Expected true, got false") } } func TestTimezoneEXIFCorrection(t *testing.T) { // Test that we get UTC times for photos taken in two // different timezones. // Both only have local time + GPS in the exif. tests := []struct { file, want, wantUTC string }{ {"coffee-sf.jpg", "2014-07-11 08:44:34 -0700 PDT", "2014-07-11 15:44:34 +0000 UTC"}, {"gocon-tokyo.jpg", "2014-05-31 13:34:04 +0900 JST", "2014-05-31 04:34:04 +0000 UTC"}, } for _, tt := range tests { f, err := os.Open("testdata/" + tt.file) if err != nil { t.Fatal(err) } // Hide *os.File type from FileTime, so it can't use modtime: tm, err := FileTime(struct{ io.ReaderAt }{f}) f.Close() if err != nil { t.Errorf("%s: %v", tt.file, err) continue } if got := tm.String(); got != tt.want { t.Errorf("%s: time = %q; want %q", tt.file, got, tt.want) } if got := tm.UTC().String(); got != tt.wantUTC { t.Errorf("%s: utc time = %q; want %q", tt.file, got, tt.wantUTC) } } }<|fim▁end|>
"time"
<|file_name|>dns_remove_reverse_record.py<|end_file_name|><|fim▁begin|>import subprocess, os, sys from reverseZone_naming import reverseZone_name from netaddr import * zone_files_path="/etc/bind/zones" def remove_reverse_record(): host_name_to_be_removed= sys.argv[1] reverse_zone_file_name,reverse_zone_name=reverseZone_name() os.chdir(zone_files_path) readFiles = open(reverse_zone_file_name,'r') reverse_zone_file_content = readFiles.read() readFiles.close() readFiles = open(reverse_zone_file_name,'r') lines = readFiles.readlines() readFiles.close() if host_name_to_be_removed in reverse_zone_file_content: file_content = open(reverse_zone_file_name,'w') for line in lines: if not host_name_to_be_removed in line: file_content.write(line) file_content.close() print "\nThe reverse record that you entered has been removed!\n" else: print "\nThe record you wanted to remove is already absent in the database!\n"<|fim▁hole|> def main(): remove_reverse_record() main()<|fim▁end|>
<|file_name|>mobilenetv2.py<|end_file_name|><|fim▁begin|>'''MobileNetV2 in PyTorch. See the paper "Inverted Residuals and Linear Bottlenecks: Mobile Networks for Classification, Detection and Segmentation" for more details. ''' import torch import torch.nn as nn import torch.nn.functional as F class Block(nn.Module): '''expand + depthwise + pointwise''' def __init__(self, in_planes, out_planes, expansion, stride): super(Block, self).__init__() self.stride = stride planes = expansion * in_planes self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, stride=1, padding=0, bias=False) self.bn1 = nn.BatchNorm2d(planes) self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, groups=planes, bias=False) self.bn2 = nn.BatchNorm2d(planes) self.conv3 = nn.Conv2d(planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False) self.bn3 = nn.BatchNorm2d(out_planes) self.shortcut = nn.Sequential() if stride == 1 and in_planes != out_planes: self.shortcut = nn.Sequential( nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=1, padding=0, bias=False), nn.BatchNorm2d(out_planes), ) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = F.relu(self.bn2(self.conv2(out))) out = self.bn3(self.conv3(out)) out = out + self.shortcut(x) if self.stride==1 else out return out class MobileNetV2(nn.Module): # (expansion, out_planes, num_blocks, stride)<|fim▁hole|> (6, 64, 4, 2), (6, 96, 3, 1), (6, 160, 3, 2), (6, 320, 1, 1)] def __init__(self, num_classes=10): super(MobileNetV2, self).__init__() # NOTE: change conv1 stride 2 -> 1 for CIFAR10 self.conv1 = nn.Conv2d(3, 32, kernel_size=3, stride=1, padding=1, bias=False) self.bn1 = nn.BatchNorm2d(32) self.layers = self._make_layers(in_planes=32) self.conv2 = nn.Conv2d(320, 1280, kernel_size=1, stride=1, padding=0, bias=False) self.bn2 = nn.BatchNorm2d(1280) self.linear = nn.Linear(1280, num_classes) def _make_layers(self, in_planes): layers = [] for expansion, out_planes, num_blocks, stride in self.cfg: strides = [stride] + [1]*(num_blocks-1) for stride in strides: layers.append(Block(in_planes, out_planes, expansion, stride)) in_planes = out_planes return nn.Sequential(*layers) def forward(self, x): out = F.relu(self.bn1(self.conv1(x))) out = self.layers(out) out = F.relu(self.bn2(self.conv2(out))) # NOTE: change pooling kernel_size 7 -> 4 for CIFAR10 out = F.avg_pool2d(out, 4) out = out.view(out.size(0), -1) out = self.linear(out) return out def test(): net = MobileNetV2() x = torch.randn(2,3,32,32) y = net(x) print(y.size()) # test()<|fim▁end|>
cfg = [(1, 16, 1, 1), (6, 24, 2, 1), # NOTE: change stride 2 -> 1 for CIFAR10 (6, 32, 3, 2),
<|file_name|>wxr_feed.py<|end_file_name|><|fim▁begin|>import datetime from django import template from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.contrib.sites.models import Site from django.contrib.syndication.views import Feed, add_domain from django.utils import feedgenerator, tzinfo from django.utils.encoding import iri_to_uri try: from django.utils.encoding import force_text except ImportError: # Django < 1.5 from django.utils.encoding import force_unicode as force_text USE_SINGLE_SIGNON = getattr(settings, "DISQUS_USE_SINGLE_SIGNON", False) class WxrFeedType(feedgenerator.Rss201rev2Feed): def rss_attributes(self): return { 'version': self._version, 'xmlns:content': 'http://purl.org/rss/1.0/modules/content/', 'xmlns:dsq': 'http://www.disqus.com/', 'xmlns:dc': 'http://purl.org/dc/elements/1.1/', 'xmlns:wp': 'http://wordpress.org/export/1.0/', } def format_date(self, date): return date.strftime('%Y-%m-%d %H:%M:%S') def add_item(self, title, link, description, author_email=None, author_name=None, author_link=None, pubdate=None, comments=None, unique_id=None, enclosure=None, categories=(), item_copyright=None, ttl=None, **kwargs): """ Adds an item to the feed. All args are expected to be Python Unicode objects except pubdate, which is a datetime.datetime object, and enclosure, which is an instance of the Enclosure class. """ to_unicode = lambda s: force_text(s, strings_only=True) if categories: categories = [to_unicode(c) for c in categories] if ttl is not None: # Force ints to unicode ttl = force_text(ttl) item = { 'title': to_unicode(title), 'link': iri_to_uri(link), 'description': to_unicode(description), 'author_email': to_unicode(author_email), 'author_name': to_unicode(author_name), 'author_link': iri_to_uri(author_link), 'pubdate': pubdate, 'comments': comments, 'unique_id': to_unicode(unique_id), 'enclosure': enclosure, 'categories': categories or (), 'item_copyright': to_unicode(item_copyright), 'ttl': ttl, } item.update(kwargs) self.items.append(item) def add_root_elements(self, handler): pass def add_item_elements(self, handler, item): if item['comments'] is None: return handler.addQuickElement('title', item['title']) handler.addQuickElement('link', item['link']) handler.addQuickElement('content:encoded', item['description']) handler.addQuickElement('dsq:thread_identifier', item['unique_id']) handler.addQuickElement('wp:post_date_gmt', self.format_date(item['pubdate']).decode('utf-8')) handler.addQuickElement('wp:comment_status', item['comment_status']) self.write_comments(handler, item['comments']) def add_comment_elements(self, handler, comment): if USE_SINGLE_SIGNON: handler.startElement('dsq:remote', {}) handler.addQuickElement('dsq:id', comment['user_id']) handler.addQuickElement('dsq:avatar', comment['avatar']) handler.endElement('dsq:remote') handler.addQuickElement('wp:comment_id', comment['id']) handler.addQuickElement('wp:comment_author', comment['user_name']) handler.addQuickElement('wp:comment_author_email', comment['user_email']) handler.addQuickElement('wp:comment_author_url', comment['user_url']) handler.addQuickElement('wp:comment_author_IP', comment['ip_address']) handler.addQuickElement('wp:comment_date_gmt', self.format_date(comment['submit_date']).decode('utf-8')) handler.addQuickElement('wp:comment_content', comment['comment']) handler.addQuickElement('wp:comment_approved', comment['is_approved']) if comment['parent'] is not None: handler.addQuickElement('wp:comment_parent', comment['parent']) def write_comments(self, handler, comments): for comment in comments: handler.startElement('wp:comment', {}) self.add_comment_elements(handler, comment) handler.endElement('wp:comment') class BaseWxrFeed(Feed): feed_type = WxrFeedType def get_feed(self, obj, request): current_site = Site.objects.get_current() link = self._Feed__get_dynamic_attr('link', obj) link = add_domain(current_site.domain, link) feed = self.feed_type( title = self._Feed__get_dynamic_attr('title', obj), link = link, description = self._Feed__get_dynamic_attr('description', obj), ) title_tmp = None if self.title_template is not None: try: title_tmp = template.loader.get_template(self.title_template) except template.TemplateDoesNotExist: pass description_tmp = None if self.description_template is not None: try: description_tmp = template.loader.get_template(self.description_template) except template.TemplateDoesNotExist: pass for item in self._Feed__get_dynamic_attr('items', obj): if title_tmp is not None: title = title_tmp.render( template.RequestContext(request, { 'obj': item, 'site': current_site })) else: title = self._Feed__get_dynamic_attr('item_title', item) if description_tmp is not None: description = description_tmp.render( template.RequestContext(request, { 'obj': item, 'site': current_site })) else: description = self._Feed__get_dynamic_attr('item_description', item) link = add_domain( current_site.domain, self._Feed__get_dynamic_attr('item_link', item), ) pubdate = self._Feed__get_dynamic_attr('item_pubdate', item) if pubdate and not hasattr(pubdate, 'tzinfo'): ltz = tzinfo.LocalTimezone(pubdate) pubdate = pubdate.replace(tzinfo=ltz) feed.add_item( title = title, link = link, description = description, unique_id = self._Feed__get_dynamic_attr('item_guid', item, link), pubdate = pubdate, comment_status = self._Feed__get_dynamic_attr('item_comment_status', item, 'open'), comments = self._get_comments(item) ) return feed def _get_comments(self, item): cmts = self._Feed__get_dynamic_attr('item_comments', item) output = [] for comment in cmts: output.append({ 'user_id': self._Feed__get_dynamic_attr('comment_user_id', comment), 'avatar': self._Feed__get_dynamic_attr('comment_avatar', comment), 'id': str(self._Feed__get_dynamic_attr('comment_id', comment)), 'user_name': self._Feed__get_dynamic_attr('comment_user_name', comment), 'user_email': self._Feed__get_dynamic_attr('comment_user_email', comment), 'user_url': self._Feed__get_dynamic_attr('comment_user_url', comment), 'ip_address': self._Feed__get_dynamic_attr('comment_ip_address', comment), 'submit_date': self._Feed__get_dynamic_attr('comment_submit_date', comment), 'comment': self._Feed__get_dynamic_attr('comment_comment', comment), 'is_approved': str(self._Feed__get_dynamic_attr('comment_is_approved', comment)), 'parent': str(self._Feed__get_dynamic_attr('comment_parent', comment)), }) return output class ContribCommentsWxrFeed(BaseWxrFeed): link = "/" def item_comments(self, item): from django.contrib.comments.models import Comment ctype = ContentType.objects.get_for_model(item) return Comment.objects.filter(content_type=ctype, object_pk=item.pk) def item_guid(self, item): ctype = ContentType.objects.get_for_model(item) return "%s_%s" % (ctype.name, item.pk)<|fim▁hole|> def comment_id(self, comment): return comment.pk def comment_user_id(self, comment): return force_text(comment.user_id) def comment_user_name(self, comment): return force_text(comment.user_name) def comment_user_email(self, comment): return force_text(comment.user_email) def comment_user_url(self, comment): return force_text(comment.user_url) def comment_ip_address(self, comment): return force_text(comment.ip_address) def comment_submit_date(self, comment): return comment.submit_date def comment_comment(self, comment): return comment.comment def comment_is_approved(self, comment): return int(comment.is_public) comment_parent = 0<|fim▁end|>
<|file_name|>alt-pattern-lit.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT.<|fim▁hole|>// option. This file may not be copied, modified, or distributed // except according to those terms. fn altlit(f: int) -> int { match f { 10 => { debug!("case 10"); return 20; } 11 => { debug!("case 11"); return 22; } _ => fail!(~"the impossible happened") } } pub fn main() { assert!((altlit(10) == 20)); assert!((altlit(11) == 22)); }<|fim▁end|>
// // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
<|file_name|>city-info.tsx<|end_file_name|><|fim▁begin|>import * as React from 'react'; import ICityInfoProps from './i-city-info-props'; import ICityInfoState from './i-city-info-state'; // import { UISref } from 'ui-router-react'; import * as moment from 'moment'; import { splitGrades } from '../../../utils'; import './city-info.scss'; class CityInfo extends React.Component<ICityInfoProps, ICityInfoState> { protected timer: number; constructor(props: ICityInfoProps) { super(props); let time = props.timeZone ? moment().utcOffset(props.timeZone) : null; this.state = { time }; } componentWillReceiveProps(props: ICityInfoProps) { let time = props.timeZone ? moment().utcOffset(props.timeZone) : null; this.setState({ time }); } componentDidMount() { this.timer = setInterval(this.tick.bind(this), 1000); } componentWillUnmount() { clearInterval(this.timer); } protected tick() { if (!this.props.timeZone) return; let time = this.state.time.add(1, 's');<|fim▁hole|> this.setState({ time }); } protected renderProps() { let props = []; if (this.props.timeZone) { const time = this.state.time.format('HH:mm'); props.push(<li key='timeZone' className='city-info-props-item'>Текущее время: {time}</li>); } if (this.props.inception) { const inceptionYear = this.props.inception.getUTCFullYear(); props.push(<li key='inception' className='city-info-props-item'>Основание: {inceptionYear} г.</li>); } if (this.props.area) { const area = splitGrades(this.props.area); props.push(<li key='area' className='city-info-props-item'>Площадь: {area} км²</li>); } if (this.props.population) { const population = splitGrades(this.props.population); props.push(<li key='population' className='city-info-props-item'>Население: {population} чел.</li>); } if (props.length < 1) return null; return ( <ul className='city-info-props text-muted'> {props} </ul> ); } render() { return ( <div className='city-info' style={{backgroundImage: this.props.photoUrl ? `url(${this.props.photoUrl}?width=640)` : null }}> {this.props.children ? <div className='city-info-header'>{this.props.children}</div> : null} <div className='city-info-content'> <div className='city-info-title'> <h1>{this.props.cityName}</h1> <h5 className='text-muted'>{this.props.fullName}</h5> </div> {this.renderProps()} </div> </div> ); } } export default CityInfo;<|fim▁end|>
<|file_name|>voter_star_on_save_doc.py<|end_file_name|><|fim▁begin|># apis_v1/documentation_source/voter_star_on_save_doc.py # Brought to you by We Vote. Be good. # -*- coding: UTF-8 -*- def voter_star_on_save_doc_template_values(url_root): """ Show documentation about voterStarOnSave """ required_query_parameter_list = [ { 'name': 'api_key', 'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string 'description': 'The unique key provided to any organization using the WeVoteServer APIs', }, { 'name': 'voter_device_id', 'value': 'string', # boolean, integer, long, string 'description': 'An 88 character unique identifier linked to a voter record on the server', }, { 'name': 'kind_of_ballot_item', 'value': 'string', # boolean, integer, long, string 'description': 'What is the type of ballot item for which we are saving the \'on\' status? ' '(kind_of_ballot_item is either "OFFICE", "CANDIDATE", "POLITICIAN" or "MEASURE")', }, { 'name': 'ballot_item_id', 'value': 'integer', # boolean, integer, long, string 'description': 'The unique internal identifier for this ballot_item ' '(either ballot_item_id OR ballot_item_we_vote_id required -- not both. ' 'If it exists, ballot_item_id is used instead of ballot_item_we_vote_id)', }, { 'name': 'ballot_item_we_vote_id', 'value': 'string', # boolean, integer, long, string 'description': 'The unique identifier for this ballot_item across all networks ' '(either ballot_item_id OR ballot_item_we_vote_id required -- not both. ' 'NOTE: In the future we might support other identifiers used in the industry.', }, ] optional_query_parameter_list = [ ] <|fim▁hole|> potential_status_codes_list = [ { 'code': 'VALID_VOTER_DEVICE_ID_MISSING', 'description': 'Cannot proceed. A valid voter_device_id parameter was not included.', }, { 'code': 'VALID_VOTER_ID_MISSING', 'description': 'Cannot proceed. Missing voter_id while trying to save.', }, { 'code': 'STAR_ON_OFFICE CREATE/UPDATE ITEM_STARRED', 'description': '', }, { 'code': 'STAR_ON_CANDIDATE CREATE/UPDATE ITEM_STARRED', 'description': '', }, { 'code': 'STAR_ON_MEASURE CREATE/UPDATE ITEM_STARRED', 'description': '', }, ] try_now_link_variables_dict = { 'kind_of_ballot_item': 'CANDIDATE', 'ballot_item_id': '5655', } api_response = '{\n' \ ' "status": string (description of what happened),\n' \ ' "success": boolean (did the save happen?),\n' \ ' "ballot_item_id": integer,\n' \ ' "ballot_item_we_vote_id": string,\n' \ ' "kind_of_ballot_item": string (CANDIDATE, MEASURE),\n' \ '}' template_values = { 'api_name': 'voterStarOnSave', 'api_slug': 'voterStarOnSave', 'api_introduction': "Save or create private 'star on' state for the current voter for a measure, an office or candidate.", 'try_now_link': 'apis_v1:voterStarOnSaveView', 'try_now_link_variables_dict': try_now_link_variables_dict, 'url_root': url_root, 'get_or_post': 'GET', 'required_query_parameter_list': required_query_parameter_list, 'optional_query_parameter_list': optional_query_parameter_list, 'api_response': api_response, 'api_response_notes': "", 'potential_status_codes_list': potential_status_codes_list, } return template_values<|fim▁end|>
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
pub mod fixed_property;
<|file_name|>signup.js<|end_file_name|><|fim▁begin|>/*Copyright (c) Shelloid Systems LLP. All rights reserved. The use and distribution terms for this software are covered by the GNU Affero General Public License 3.0 (http://www.gnu.org/licenses/agpl-3.0.html) which can be found in the file LICENSE at the root of this distribution. By using this software in any fashion, you are agreeing to be bound by the terms of this license. You must not remove this notice, or any other, from this software. */<|fim▁hole|> * Created by Harikrishnan on 21/5/14. */ exports.index = function (req, res) { easydb(dbPool) .query(function () { return { query: "SELECT email, name, gen_code FROM pending_registrations WHERE email = ? and gen_code = ? and name = ?", params: [decodeURIComponent(req.body.email), decodeURIComponent(req.body.id), decodeURIComponent(req.body.name)] }; }) .success(function (rows) { if (rows.length > 0) { res.send({status: 200, name: rows[0].name}); } else { throw new Error("Validation failed."); } }). query(function () { return { query: "INSERT INTO users (`email`, `md5_password`, `salt`, `name`, `last_login_time`) VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP)", params: [req.body.email, req.body.md5_secret, req.body.salt, req.body.name] }; }). query(function () { return { query: "DELETE FROM pending_registrations WHERE email = ?", params: [decodeURIComponent(req.body.email)] }; }). success(function (rows) { res.send({status: 200}); }). error(function (err) { console.log(err); res.send({msg: err, status: 500}); }).execute({transaction: true}); };<|fim▁end|>
/**
<|file_name|>package.js<|end_file_name|><|fim▁begin|>Package.describe({ summary: "Github OAuth flow", version: "1.1.4-plugins.0" }); Package.onUse(function(api) { api.use('oauth2', ['client', 'server']); api.use('oauth', ['client', 'server']); api.use('http', ['server']); api.use('underscore', 'client'); api.use('templating', 'client'); api.use('random', 'client'); api.use('service-configuration', ['client', 'server']); api.export('Github'); api.addFiles(<|fim▁hole|> api.addFiles('github_server.js', 'server'); api.addFiles('github_client.js', 'client'); });<|fim▁end|>
['github_configure.html', 'github_configure.js'], 'client');
<|file_name|>testbench.go<|end_file_name|><|fim▁begin|>// Copyright 2020 The gVisor Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Package testbench has utilities to send and receive packets, and also command // the DUT to run POSIX functions. It is the packetimpact test API. package testbench import ( "encoding/json" "flag" "fmt" "math/rand" "net" "testing" "time" ) var ( // Native indicates that the test is being run natively. Native = false // RPCKeepalive is the gRPC keepalive. RPCKeepalive = 10 * time.Second // dutInfosJSON is the json string that describes information about all the // duts available to use. dutInfosJSON string // dutInfo is the pool among which the testbench can choose a DUT to work // with. dutInfo chan *DUTInfo ) // DUTInfo has both network and uname information about the DUT. type DUTInfo struct { Uname *DUTUname Net *DUTTestNet } // DUTUname contains information about the DUT from uname. type DUTUname struct { Machine string KernelName string KernelRelease string KernelVersion string OperatingSystem string } // IsLinux returns true if the DUT is running Linux. func (n *DUTUname) IsLinux() bool { return Native && n.OperatingSystem == "GNU/Linux" } // IsGvisor returns true if the DUT is running gVisor. func (*DUTUname) IsGvisor() bool { return !Native }<|fim▁hole|>} // DUTTestNet describes the test network setup on dut and how the testbench // should connect with an existing DUT. type DUTTestNet struct { // LocalMAC is the local MAC address on the test network. LocalMAC net.HardwareAddr // RemoteMAC is the DUT's MAC address on the test network. RemoteMAC net.HardwareAddr // LocalIPv4 is the local IPv4 address on the test network. LocalIPv4 net.IP // RemoteIPv4 is the DUT's IPv4 address on the test network. RemoteIPv4 net.IP // IPv4PrefixLength is the network prefix length of the IPv4 test network. IPv4PrefixLength int // LocalIPv6 is the local IPv6 address on the test network. LocalIPv6 net.IP // RemoteIPv6 is the DUT's IPv6 address on the test network. RemoteIPv6 net.IP // LocalDevID is the ID of the local interface on the test network. LocalDevID uint32 // RemoteDevID is the ID of the remote interface on the test network. RemoteDevID uint32 // LocalDevName is the device that testbench uses to inject traffic. LocalDevName string // RemoteDevName is the device name on the DUT, individual tests can // use the name to construct tests. RemoteDevName string // The following two fields on actually on the control network instead // of the test network, including them for convenience. // POSIXServerIP is the POSIX server's IP address on the control network. POSIXServerIP net.IP // POSIXServerPort is the UDP port the POSIX server is bound to on the // control network. POSIXServerPort uint16 } // SubnetBroadcast returns the test network's subnet broadcast address. func (n *DUTTestNet) SubnetBroadcast() net.IP { addr := append([]byte(nil), n.RemoteIPv4...) mask := net.CIDRMask(n.IPv4PrefixLength, net.IPv4len*8) for i := range addr { addr[i] |= ^mask[i] } return addr } // registerFlags defines flags and associates them with the package-level // exported variables above. It should be called by tests in their init // functions. func registerFlags(fs *flag.FlagSet) { fs.BoolVar(&Native, "native", Native, "whether the test is running natively") fs.DurationVar(&RPCKeepalive, "rpc_keepalive", RPCKeepalive, "gRPC keepalive") fs.StringVar(&dutInfosJSON, "dut_infos_json", dutInfosJSON, "json that describes the DUTs") } // Initialize initializes the testbench, it parse the flags and sets up the // pool of test networks for testbench's later use. func Initialize(fs *flag.FlagSet) { testing.Init() registerFlags(fs) flag.Parse() if err := loadDUTInfos(); err != nil { panic(err) } } // loadDUTInfos loads available DUT test infos from the json file, it // must be called after flag.Parse(). func loadDUTInfos() error { var dutInfos []DUTInfo if err := json.Unmarshal([]byte(dutInfosJSON), &dutInfos); err != nil { return fmt.Errorf("failed to unmarshal JSON: %w", err) } if got, want := len(dutInfos), 1; got < want { return fmt.Errorf("got %d DUTs, the test requires at least %d DUTs", got, want) } // Using a buffered channel as semaphore dutInfo = make(chan *DUTInfo, len(dutInfos)) for i := range dutInfos { dutInfos[i].Net.LocalIPv4 = dutInfos[i].Net.LocalIPv4.To4() dutInfos[i].Net.RemoteIPv4 = dutInfos[i].Net.RemoteIPv4.To4() dutInfo <- &dutInfos[i] } return nil } // GenerateRandomPayload generates a random byte slice of the specified length, // causing a fatal test failure if it is unable to do so. func GenerateRandomPayload(t *testing.T, n int) []byte { t.Helper() buf := make([]byte, n) if _, err := rand.Read(buf); err != nil { t.Fatalf("rand.Read(buf) failed: %s", err) } return buf } // getDUTInfo returns information about an available DUT from the pool. If no // DUT is readily available, getDUTInfo blocks until one becomes available. func getDUTInfo() *DUTInfo { return <-dutInfo } // release returns the DUTInfo back to the pool. func (info *DUTInfo) release() { dutInfo <- info }<|fim▁end|>
// IsFuchsia returns true if the DUT is running Fuchsia. func (n *DUTUname) IsFuchsia() bool { return Native && n.OperatingSystem == "Fuchsia"
<|file_name|>baseclass.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ############################################################################## # # diffpy.pyfullprof by DANSE Diffraction group # Simon J. L. Billinge # (c) 2010 Trustees of the Columbia University # in the City of New York. All rights reserved. # # File coded by: Jiwu Liu, Wenduo Zhou and Peng Tian<|fim▁hole|>############################################################################## __id__ = "$Id: baseclass.py 6843 2013-01-09 22:14:20Z juhas $" from diffpy.pyfullprof.containerclass import * from diffpy.pyfullprof.exception import * class BaseClass: """BaseClass defines the basic parameters and objects(i.e., subclasses in the SubClassDict and ObjectListDict). The definition can be used for initializing and configuring. Data member: parent -- the reference to the owner """ ParamDict = {} ParamListDict = {} ObjectDict = {} ObjectListDict = {} def __init__(self, parent=None): """ Initialization. parent -- the reference to the owner object. """ # parent and key record the location of the object self.parent = parent for name,info in self.ParamDict.items(): self.__dict__[name] = info.default for name,info in self.ParamListDict.items(): self.__dict__[name] = ParamList(self, info.minsize, info.maxsize, name) for name in self.ObjectDict.keys(): self.__dict__[name] = None for name,info in self.ObjectListDict.items(): self.__dict__[name] = ObjectList(self, info.minsize, info.maxsize, name) return def __str__(self): """Form a string representation. return: a string object """ from diffpy.pyfullprof.infoclass import EnumInfo s = "object of class %-10s: \n"%(self.__class__.__name__) for name in sorted(self.ParamDict.keys()): s += "%-15s: "%(name) val = self.__dict__[name] info = self.ParamDict[name] if isinstance(info, EnumInfo): s += "%-20s %-5s\n"%(str(info.getValueStr(val)), str(val)) else: s += "%-20s\n"%(str(self.__dict__[name])) for name in sorted(self.ParamListDict.keys()): s += name + ":\n" subcontainer = self.__dict__[name] s += str(subcontainer) + "\n" for name in sorted(self.ObjectDict.keys()): s += name + ":\n" subobject = self.__dict__[name] s += str(subobject) + "\n" for name in sorted(self.ObjectListDict.keys()): s += name + ":\n" subcontainer = self.__dict__[name] s += str(subcontainer) + "\n" return s def clear(self): """Clear myself completely. """ for v in self.ObjectDict.keys(): self.__dict__[v].clear() for v in self.ObjectListDict.keys(): self.__dict__[v].clear() return def delete(self, name, id=None): """Delete a parameter(s) or an object(s). name -- the key name in ParamDict/ParamListDic/ObjectDict/ObjectListDict id -- additional object id to delete it from the ObjectListDict """ if self.ParamDict.has_key(name): self.__dict__[name].clear() elif self.ParamListDict.has_key(name): self.__dict__[name].delete(id) elif self.ObjectDict.has_key(name): self.__dict__[name].clear() elif self.ObjectListDict.has_key(name): self.__dict__[name].delete(id) return def duplicate(self): """Make a deep copy of this BaseClass instance and return the copy return: BaseClass instance """ errmsg = "BaseClass.duplicate is virtual" raise NotImplementedError(errmsg) return @property def path(self): """Get the full path of the object return: Dot separated string. """ name = self.name if self.parent: path = self.parent.path if path: return path +'.'+name else: return name return name @property def name(self): """Get the full name of the constraint, with index return: a string """ # an object with empty key has no name. if not self.key: return '' if self.key in self.parent.ObjectDict: return self.key if self.key in self.parent.ObjectListDict: index = getattr(self.parent, self.key)._list.index(self) return '%s[%i]'%(self.key, index) # else an internal bug raise RietError("'%s' is not a valid object of '%s'."%(self.key, self.parent.path), 'Internal Error') return def getByPath(self, path): """Get a value by path path -- a full path, e.g., x.y.z[i].a return: the value/object corresponding to this address """ # In the case a None or an empty string is passed in if not path: return self # If the name has hierarchy, keep breaking it to the end if path.count('.') > 0: try: objpath,paramname = path.rsplit('.',1) except ValueError: raise RietError('Invalid format for a parameter name: ' + path) # The code below check if the return is a list or a single object # and handle it accordingly. objects = self.getByPath(objpath) if isinstance(objects, list): results = [] for object in objects: result = object.getByPath(paramname) if isinstance(result, list): results.extend(result) else: results.append(result) return results # else it is a single object return objects.getByPath(paramname) # check if the path contains [], i.e., for ObjectListDict or ParamListDict name, index = self._parseIndex(path) return self.get(name, index) def setByPath(self, path, value): """Set a value by path path -- a full path, e.g., x.y.z[i].a value -- the value/object corresponding to this address """ if not path: raise RietError("Path is empty") if path.count('.') > 0: try: objpath,paramname = path.rsplit('.',1) except: raise RietError('Invalid format for a parameter name: ' + path) objects = self.getByPath(objpath) if isinstance(objects, list): for object in objects: object.setByPath(paramname, value) else: objects.setByPath(paramname, value) return # check if the path contains [], i.e., for ObjectListDict or ParamListDict name, index = self._parseIndex(path) self.set(name, value, index) return def _parseIndex(self, path): """Parse a path having a form as ABC[1], without '.' path -- the name return: name and index """ if path.count('[')==1 and path.count(']')==1: import re res = re.search(r'([^][]+)\[([0-9:]+)\]',path) if res and len(res.groups()) == 2: name,index= res.groups() # The code below build either a slice or an int from the string if index.count(':') > 0: # try to make a slice index = slice(*[{True: lambda n: None, False: int}[x == ''](x) for x in (index.split(':') + ['', '', ''])[:3]]) else: index = int(index) return name,index else: raise RietError('Invalid format for a parameter name: ' + name) return path, None def _rangeParam(self, name, index): """Generate a range of indices for the parameter list name -- the name in the ParamListDict index -- a slice object return: a range of slices """ if name not in self.ParamListDict: raise RietError('The parameter "%s" is not a list.'%name) n = len(getattr(self, name)) start, stop, step = index.indices(n) return range(start, stop, step) def get(self, name, index=None): """Get a value name -- a key in ParamDict, ParamListDict, ObjectDict or ObjectListDict index -- only for ObjectListDict object, to give the location of the object return: 1. ParamDict: return the value 2. ObjectDict: return the RietveldClass object 3. ObjectListDict: return the RietveldClass object(s) """ if self.ParamDict.has_key(name): if index is not None: raise RietError('The parameter "%s" is not a list.'%name) value = self.__dict__[name] elif self.ParamListDict.has_key(name): value = self.__dict__[name].get(index) elif self.ObjectDict.has_key(name): if index is not None: raise RietError('The object "%s" is not a list.'%name) value = self.__dict__[name] elif self.ObjectListDict.has_key(name): value = self.__dict__[name].get(index) else: errmsg = "Class '%-15s' does not have '%-15s'"%\ (self.__class__.__name__, str(name)) raise RietError(errmsg) return value def set(self, name, value, index=None): """Set the value for a member. name -- a key in ParamDict, ParamListDict, ObjectDict or ObjectListDict value -- the value/object to be set index -- only for ObjectListDict object, to give the location of the object """ if name in self.ParamDict: if index is not None: raise RietError('The parameter "%s" is not a list.'%name) setattr(self, name, self.ParamDict[name].convert(value)) elif name in self.ParamListDict: getattr(self, name).set(self.ParamListDict[name].convert(value),index) elif name in self.ObjectDict: if index is not None: raise RietError('The object "%s" is not a list.'%name) self.ObjectDict[name].validate(value) object = getattr(self, name) if object is not None: object.clear() setattr(self, name, value) value.parent = self value.key = name _param_indices = getattr(self.getRoot(), '_param_indices', None) if _param_indices is not None: value.updateParamIndices(_param_indices) elif name in self.ObjectListDict: self.ObjectListDict[name].validate(value) getattr(self, name).set(value, index) value.parent = self value.key = name _param_indices = getattr(self.getRoot(), '_param_indices', None) if _param_indices is not None: value.updateParamIndices(_param_indices) else: raise RietError("%s does not have the parameter '%s'\n" % \ (self.__class__.__name__, name)) return def validate(self): """Check if the object are valid. return: True for valid, otherwise False. """ rvalue = True # 1. check subclass for name in self.ObjectDict.keys(): obj = self.__dict__[name] if obj is None: rvalue = False wmsg = "Warning! Class %-20s: UniObjectList %-20s Not Set-Up"%\ (self.__class__.__name__, name) print wmsg else: if not obj.validate(): rvalue = False # 2. check container for name in self.ObjectListDict.keys(): containerobj = self.__dict__[name] objlen = len(containerobj) minlen = self.ObjectListDict[name].minsize maxlen = self.ObjectListDict[name].maxsize if (objlen < minlen): print "class " + self.__class__.__name__ + ":\tcontainer " + name + "\t not set-up\n" rvalue = False for obj in containerobj.get(): if not obj.validate(): rvalue = False return rvalue def getRoot(self): '''Get the root object. return: the root BaseClass object ''' root = self while root.parent is not None: root = root.parent return root def isDescendant(self, object): '''Check if it is a descendant of the object, or is the object. object: a baseclass object return: True or False ''' node = self while node is not object: node = node.parent if node is None: return False return True def updateParamIndices(self, indices): '''Update the global index dictionary to incorporate my parameters. indices -- an indexing dictionary ''' # obtain an index dictionary # update the root index dictionary with child for name in self.ParamDict: try: indices[name.lower()].append((self, name)) except: indices[name.lower()] = [(self, name)] for name in self.ParamListDict: try: indices[name.lower()].append((self, name)) except: indices[name.lower()] = [(self, name)] for name in self.ObjectDict: o = getattr(self, name) if o: o.updateParamIndices(indices) for name in self.ObjectListDict: for p in getattr(self, name)._list: p.updateParamIndices(indices) return def listParameters(self, prefix=''): """List the paths to all the Rietveld parameters. prefix -- a prefix string to be appended return: list of strings """ from diffpy.pyfullprof.refine import Refine pathlist = [] for name in sorted(self.ParamDict.keys()): pathlist.append(prefix+name) for name in sorted(self.ParamListDict.keys()): paramlist = self.__dict__[name].listParameters(prefix) pathlist.extend(paramlist) for name in sorted(self.ObjectDict.keys()): if isinstance(self.__dict__[name], Refine): continue paramlist = self.__dict__[name].listParameters(prefix+name+'.') pathlist.extend(paramlist) for name in sorted(self.ObjectListDict.keys()): paramlist = self.__dict__[name].listParameters(prefix) pathlist.extend(paramlist) return pathlist def locateParameter(self, name): """Find a parameter under this object with the given name. name -- the parameter name return: 1. (None, name) if the name is not found 2. (owner, key) where key is the strict name 3. (owners, keys) where owners is a list of owner and key is a list of keys """ index = getattr(self.getRoot(), '_param_indices', None) if index is not None: try: values = index[name.lower()] except KeyError: return None, name if self.parent is None: # all the values should be under self results = values else: # also check if the results belong to self results = [] for object, name in values: if object.isDescendant(self): results.append((object, name)) if len(results) < 1: return None, name elif len(results) == 1: return results[0] else: return ([result[0] for result in results], [result[1] for result in results]) # when there is no global index parameters = self.listParameters() for parameter in parameters: if parameter.count('.') == 0: # it is a parameter under fit parpath, parname = '', parameter else: parpath, parname = parameter.rsplit('.', 1) if parname.lower() == name.lower(): return self.getByPath(parpath), parname return None, name # EOF<|fim▁end|>
# # See AUTHORS.txt for a list of people who contributed. # See LICENSE.txt for license information. #
<|file_name|>webpack.config.build.ci.js<|end_file_name|><|fim▁begin|>/* Configures webpack to build only assets required for integration environments. */ const webpack = require('webpack'); const merge = require('webpack-merge'); const { source, sourceAll } = require('../lib/path-helpers'); const ciBuildWorkflow = require('./workflow/build.ci'); const { entries } = require(source('fc-config')); // eslint-disable-line // remove the styleguide dll references const modify = (config) => {<|fim▁hole|> return config; }; module.exports = modify(merge(ciBuildWorkflow, { entry: sourceAll(entries.ci), plugins: [ new webpack.DefinePlugin({ 'process.env.CI_MODE': true }) ] }));<|fim▁end|>
config.plugins = config.plugins.slice(1);
<|file_name|>light.py<|end_file_name|><|fim▁begin|>"""Support for Nanoleaf Lights.""" import logging import voluptuous as vol from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_COLOR_TEMP, ATTR_EFFECT, ATTR_HS_COLOR, ATTR_TRANSITION, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, SUPPORT_COLOR, SUPPORT_COLOR_TEMP, SUPPORT_EFFECT, SUPPORT_TRANSITION, Light) from homeassistant.const import CONF_HOST, CONF_NAME, CONF_TOKEN import homeassistant.helpers.config_validation as cv from homeassistant.util import color as color_util<|fim▁hole|>from homeassistant.util.json import load_json, save_json _LOGGER = logging.getLogger(__name__) DEFAULT_NAME = 'Nanoleaf' DATA_NANOLEAF = 'nanoleaf' CONFIG_FILE = '.nanoleaf.conf' ICON = 'mdi:triangle-outline' SUPPORT_NANOLEAF = (SUPPORT_BRIGHTNESS | SUPPORT_COLOR_TEMP | SUPPORT_EFFECT | SUPPORT_COLOR | SUPPORT_TRANSITION) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_TOKEN): cv.string, vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up the Nanoleaf light.""" from pynanoleaf import Nanoleaf, Unavailable if DATA_NANOLEAF not in hass.data: hass.data[DATA_NANOLEAF] = dict() token = '' if discovery_info is not None: host = discovery_info['host'] name = discovery_info['hostname'] # if device already exists via config, skip discovery setup if host in hass.data[DATA_NANOLEAF]: return _LOGGER.info("Discovered a new Nanoleaf: %s", discovery_info) conf = load_json(hass.config.path(CONFIG_FILE)) if conf.get(host, {}).get('token'): token = conf[host]['token'] else: host = config[CONF_HOST] name = config[CONF_NAME] token = config[CONF_TOKEN] nanoleaf_light = Nanoleaf(host) if not token: token = nanoleaf_light.request_token() if not token: _LOGGER.error("Could not generate the auth token, did you press " "and hold the power button on %s" "for 5-7 seconds?", name) return conf = load_json(hass.config.path(CONFIG_FILE)) conf[host] = {'token': token} save_json(hass.config.path(CONFIG_FILE), conf) nanoleaf_light.token = token try: nanoleaf_light.available except Unavailable: _LOGGER.error( "Could not connect to Nanoleaf Light: %s on %s", name, host) return hass.data[DATA_NANOLEAF][host] = nanoleaf_light add_entities([NanoleafLight(nanoleaf_light, name)], True) class NanoleafLight(Light): """Representation of a Nanoleaf Light.""" def __init__(self, light, name): """Initialize an Nanoleaf light.""" self._available = True self._brightness = None self._color_temp = None self._effect = None self._effects_list = None self._light = light self._name = name self._hs_color = None self._state = None @property def available(self): """Return availability.""" return self._available @property def brightness(self): """Return the brightness of the light.""" if self._brightness is not None: return int(self._brightness * 2.55) return None @property def color_temp(self): """Return the current color temperature.""" if self._color_temp is not None: return color_util.color_temperature_kelvin_to_mired( self._color_temp) return None @property def effect(self): """Return the current effect.""" return self._effect @property def effect_list(self): """Return the list of supported effects.""" return self._effects_list @property def min_mireds(self): """Return the coldest color_temp that this light supports.""" return 154 @property def max_mireds(self): """Return the warmest color_temp that this light supports.""" return 833 @property def name(self): """Return the display name of this light.""" return self._name @property def icon(self): """Return the icon to use in the frontend, if any.""" return ICON @property def is_on(self): """Return true if light is on.""" return self._state @property def hs_color(self): """Return the color in HS.""" return self._hs_color @property def supported_features(self): """Flag supported features.""" return SUPPORT_NANOLEAF def turn_on(self, **kwargs): """Instruct the light to turn on.""" brightness = kwargs.get(ATTR_BRIGHTNESS) hs_color = kwargs.get(ATTR_HS_COLOR) color_temp_mired = kwargs.get(ATTR_COLOR_TEMP) effect = kwargs.get(ATTR_EFFECT) transition = kwargs.get(ATTR_TRANSITION) if hs_color: hue, saturation = hs_color self._light.hue = int(hue) self._light.saturation = int(saturation) if color_temp_mired: self._light.color_temperature = mired_to_kelvin(color_temp_mired) if transition: if brightness: # tune to the required brightness in n seconds self._light.brightness_transition( int(brightness / 2.55), int(transition)) else: # If brightness is not specified, assume full brightness self._light.brightness_transition(100, int(transition)) else: # If no transition is occurring, turn on the light self._light.on = True if brightness: self._light.brightness = int(brightness / 2.55) if effect: self._light.effect = effect def turn_off(self, **kwargs): """Instruct the light to turn off.""" transition = kwargs.get(ATTR_TRANSITION) if transition: self._light.brightness_transition(0, int(transition)) else: self._light.on = False def update(self): """Fetch new state data for this light.""" from pynanoleaf import Unavailable try: self._available = self._light.available self._brightness = self._light.brightness self._color_temp = self._light.color_temperature self._effect = self._light.effect self._effects_list = self._light.effects self._hs_color = self._light.hue, self._light.saturation self._state = self._light.on except Unavailable as err: _LOGGER.error("Could not update status for %s (%s)", self.name, err) self._available = False<|fim▁end|>
from homeassistant.util.color import \ color_temperature_mired_to_kelvin as mired_to_kelvin
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import email.utils import collections import time import cgi from .core import py3 if py3: def parse_return(content): if isinstance(content, str): content = content.encode('utf-8', 'xmlcharrefreplace') if isinstance(content, bytes): return (content,) elif isinstance(content, collections.Iterable): return (i.encode('utf-8', 'xmlcharrefreplace') for i in content) else: return '' else: def parse_return(content): if isinstance(content, unicode): content = content.encode('utf-8', 'xmlcharrefreplace') if isinstance(content, str): return (content,) elif isinstance(content, collections.Iterable): return (i.encode('utf-8', 'xmlcharrefreplace') for i in content) else: return '' def parse_date(ims): """Adapted from Bottle""" try: ts = email.utils.parsedate_tz(ims) return time.mktime(ts[:8] + (0,)) - (ts[9] or 0) - time.timezone except (TypeError, ValueError, IndexError, OverflowError): return None def parse_range_header(header, maxlen=0): """Adapted from Bottle""" if not header or header[:6] != 'bytes=': return ranges = [r.split('-', 1) for r in header[6:].split(',') if '-' in r] for start, end in ranges: try: if not start: # bytes=-100 -> last 100 bytes start, end = max(0, maxlen-int(end)), maxlen elif not end: # bytes=100- -> all but the first 99 bytes start, end = int(start), maxlen else: # bytes=100-200 -> bytes 100-200 (inclusive) start, end = int(start), min(int(end)+1, maxlen) if 0 <= start < end <= maxlen: yield start, end except ValueError: pass def file_iter_range(fp, offset, bytes_, maxread=1024*1024): """Adapted from Bottle""" fp.seek(offset) while bytes_ > 0: part = fp.read(min(bytes_, maxread)) if not part: break bytes_ -= len(part) yield part class FieldStorage(cgi.FieldStorage): def get(self, key): try:<|fim▁hole|> return None<|fim▁end|>
return self[key].value except KeyError:
<|file_name|>aaf2xml.py<|end_file_name|><|fim▁begin|>import aaf<|fim▁hole|>(options, args) = parser.parse_args() if not args: parser.error("not enough argements") path = args[0] name, ext = os.path.splitext(path) f = aaf.open(path, 'r') f.save(name + ".xml") f.close()<|fim▁end|>
import os from optparse import OptionParser parser = OptionParser()
<|file_name|>dupefilter.py<|end_file_name|><|fim▁begin|>import logging import time from scrapy.dupefilters import BaseDupeFilter from scrapy.utils.request import request_fingerprint from . import defaults from .connection import get_redis_from_settings logger = logging.getLogger(__name__) # TODO: Rename class to RedisDupeFilter. class RFPDupeFilter(BaseDupeFilter): """Redis-based request duplicates filter. This class can also be used with default Scrapy's scheduler. """ logger = logger def __init__(self, server, key, debug=False): """Initialize the duplicates filter. Parameters ---------- server : redis.StrictRedis The redis server instance. key : str Redis key Where to store fingerprints. debug : bool, optional Whether to log filtered requests. """ self.server = server self.key = key self.debug = debug self.logdupes = True <|fim▁hole|> This uses by default the key ``dupefilter:<timestamp>``. When using the ``scrapy_redis.scheduler.Scheduler`` class, this method is not used as it needs to pass the spider name in the key. Parameters ---------- settings : scrapy.settings.Settings Returns ------- RFPDupeFilter A RFPDupeFilter instance. """ server = get_redis_from_settings(settings) # XXX: This creates one-time key. needed to support to use this # class as standalone dupefilter with scrapy's default scheduler # if scrapy passes spider on open() method this wouldn't be needed # TODO: Use SCRAPY_JOB env as default and fallback to timestamp. key = defaults.DUPEFILTER_KEY % {'timestamp': int(time.time())} debug = settings.getbool('DUPEFILTER_DEBUG') return cls(server, key=key, debug=debug) @classmethod def from_crawler(cls, crawler): """Returns instance from crawler. Parameters ---------- crawler : scrapy.crawler.Crawler Returns ------- RFPDupeFilter Instance of RFPDupeFilter. """ return cls.from_settings(crawler.settings) def request_seen(self, request): """Returns True if request was already seen. Parameters ---------- request : scrapy.http.Request Returns ------- bool """ fp = self.request_fingerprint(request) # This returns the number of values added, zero if already exists. added = self.server.sadd(self.key, fp) return added == 0 def request_fingerprint(self, request): """Returns a fingerprint for a given request. Parameters ---------- request : scrapy.http.Request Returns ------- str """ return request_fingerprint(request) def close(self, reason=''): """Delete data on close. Called by Scrapy's scheduler. Parameters ---------- reason : str, optional """ self.clear() def clear(self): """Clears fingerprints data.""" self.server.delete(self.key) def log(self, request, spider): """Logs given request. Parameters ---------- request : scrapy.http.Request spider : scrapy.spiders.Spider """ if self.debug: msg = "Filtered duplicate request: %(request)s" self.logger.debug(msg, {'request': request}, extra={'spider': spider}) elif self.logdupes: msg = ("Filtered duplicate request %(request)s" " - no more duplicates will be shown" " (see DUPEFILTER_DEBUG to show all duplicates)") self.logger.debug(msg, {'request': request}, extra={'spider': spider}) self.logdupes = False<|fim▁end|>
@classmethod def from_settings(cls, settings): """Returns an instance from given settings.
<|file_name|>bench.rs<|end_file_name|><|fim▁begin|>use cargo::core::Workspace; use cargo::ops; use cargo::util::{CliResult, CliError, Human, Config, human}; use cargo::util::important_paths::{find_root_manifest_for_wd}; #[derive(RustcDecodable)] pub struct Options { flag_no_run: bool, flag_package: Vec<String>, flag_jobs: Option<u32>, flag_features: Vec<String>, flag_no_default_features: bool, flag_target: Option<String>, flag_manifest_path: Option<String>, flag_verbose: u32, flag_quiet: Option<bool>, flag_color: Option<String>, flag_lib: bool, flag_bin: Vec<String>, flag_example: Vec<String>, flag_test: Vec<String>, flag_bench: Vec<String>, flag_frozen: bool, flag_locked: bool, arg_args: Vec<String>, } pub const USAGE: &'static str = " Execute all benchmarks of a local package Usage: cargo bench [options] [--] [<args>...] Options: -h, --help Print this message --lib Benchmark only this package's library --bin NAME Benchmark only the specified binary --example NAME Benchmark only the specified example --test NAME Benchmark only the specified test target --bench NAME Benchmark only the specified bench target --no-run Compile, but don't run benchmarks -p SPEC, --package SPEC ... Package to run benchmarks for -j N, --jobs N Number of parallel jobs, defaults to # of CPUs --features FEATURES Space-separated list of features to also build --no-default-features Do not build the `default` feature --target TRIPLE Build for the target triple --manifest-path PATH Path to the manifest to build benchmarks for -v, --verbose ... Use verbose output -q, --quiet No output printed to stdout --color WHEN Coloring: auto, always, never --frozen Require Cargo.lock and cache are up to date --locked Require Cargo.lock is up to date All of the trailing arguments are passed to the benchmark binaries generated for filtering benchmarks and generally providing options configuring how they run. If the --package argument is given, then SPEC is a package id specification which indicates which package should be benchmarked. If it is not given, then the current package is benchmarked. For more information on SPEC and its format, see the `cargo help pkgid` command. The --jobs argument affects the building of the benchmark executable but does not affect how many jobs are used when running the benchmarks. Compilation can be customized with the `bench` profile in the manifest. "; pub fn execute(options: Options, config: &Config) -> CliResult<Option<()>> { let root = try!(find_root_manifest_for_wd(options.flag_manifest_path, config.cwd())); try!(config.configure(options.flag_verbose, options.flag_quiet, &options.flag_color, options.flag_frozen, options.flag_locked)); let ops = ops::TestOptions { no_run: options.flag_no_run, no_fail_fast: false, only_doc: false, compile_opts: ops::CompileOptions { config: config, jobs: options.flag_jobs, target: options.flag_target.as_ref().map(|s| &s[..]), features: &options.flag_features, no_default_features: options.flag_no_default_features, spec: &options.flag_package, exec_engine: None, release: true, mode: ops::CompileMode::Bench, filter: ops::CompileFilter::new(options.flag_lib, &options.flag_bin, &options.flag_test, &options.flag_example, &options.flag_bench), target_rustdoc_args: None, target_rustc_args: None, }, }; let ws = try!(Workspace::new(&root, config)); let err = try!(ops::run_benches(&ws, &ops, &options.arg_args)); match err { None => Ok(None), Some(err) => {<|fim▁hole|> }) } } }<|fim▁end|>
Err(match err.exit.as_ref().and_then(|e| e.code()) { Some(i) => CliError::new(human("bench failed"), i), None => CliError::new(Box::new(Human(err)), 101)
<|file_name|>core.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core';<|fim▁hole|> @NgModule() export class CoreModule { }<|fim▁end|>
<|file_name|>stickers.py<|end_file_name|><|fim▁begin|>LIKES={ 'l': '369239383222810', 'm': '369239343222814', 's': '369239263222822'<|fim▁hole|>LIKES['medium'] =LIKES['m'] LIKES['small'] = LIKES['s']<|fim▁end|>
} LIKES['large'] = LIKES['l']
<|file_name|>configment.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python<|fim▁hole|># Copyright 2014 Simone Campagna # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Configment interface >>> class TestCfg(Configment): ... CONFIGSPEC_SOURCE = ''' ... [abc] ... x = integer(default=3) ... ''' >>> cfg = TestCfg() >>> cfg["abc"]["x"] 3 >>> """ import os import validate import six from .configobj_wrap import ConfigObjWrap from .meta_configment import MetaConfigment from .configment_validator import ConfigmentValidator from .pathname import Pathname from .environment import load_configspec __author__ = "Simone Campagna" __all__ = [ 'create_configment_class', 'Configment', 'ConfigmentValidateError', ] class ConfigmentValidateError(validate.ValidateError): def __str__(self): return "validation failed: {}".format(self.args[0]) class BaseConfigment(ConfigObjWrap): CONFIGSPEC = None DEFAULT_MODE_HIDE = "hide" DEFAULT_MODE_SHOW = "show" DEFAULT_MODES = [DEFAULT_MODE_HIDE, DEFAULT_MODE_SHOW] DEFAULT_MODE = DEFAULT_MODE_HIDE def __init__(self, filename=None, default_mode=None): super(BaseConfigment, self).__init__( infile=None, configspec=self.__class__.CONFIGSPEC, unrepr=True, interpolation=False, indent_type=" ", stringify=True, ) if default_mode is None: default_mode = self.DEFAULT_MODE self.default_mode = default_mode self.set_filename(filename) if self.filename is not None: self.load_file(filename, throw_on_errors=True) else: self.initialize(throw_on_errors=False) def set_filename(self, filename=None): super(BaseConfigment, self).set_filename(filename) if self.filename is None: self._base_dir = os.getcwd() else: self._base_dir = os.path.dirname(os.path.abspath(filename)) def do_validation(self, base_dir=None, reset=False, throw_on_errors=False): if base_dir is None: base_dir = self._base_dir validator = ConfigmentValidator() copy = self.default_mode == self.DEFAULT_MODE_SHOW result = super(BaseConfigment, self).validate(validator, preserve_errors=True, copy=copy) result = self.filter_validation_result(result) self.set_paths(base_dir, reset=reset) if throw_on_errors and result: raise ConfigmentValidateError(result) c_result = ConfigObjWrap( infile=result, stringify=True, unrepr=True, indent_type=' ', ) return c_result @six.add_metaclass(MetaConfigment) class Configment(BaseConfigment): def __init__(self, filename=None, default_mode=None): super(Configment, self).__init__( filename=filename, default_mode=default_mode, ) def impl_initialize(self, throw_on_errors=False): try: return self.do_validation(reset=False, throw_on_errors=throw_on_errors) except: # pylint: disable=bare-except return False def impl_load_file(self, filename, throw_on_errors=False): default_base_dir = Pathname.get_default_base_dir() Pathname.set_default_base_dir(self._base_dir) self.set_filename(filename) self.reload() try: result = self.do_validation(base_dir=self._base_dir, reset=True, throw_on_errors=throw_on_errors) finally: Pathname.set_default_base_dir(default_base_dir) return result def impl_dump_s(self, stream=None, filename=None, throw_on_errors=False): default_base_dir = Pathname.get_default_base_dir() try: if filename is not None: base_dir = os.path.dirname(os.path.normpath(os.path.abspath(filename))) else: base_dir = self._base_dir Pathname.set_default_base_dir(base_dir) self.do_validation(base_dir=base_dir, reset=False, throw_on_errors=throw_on_errors) self.write(stream) finally: Pathname.set_default_base_dir(default_base_dir) def create_configment_class(configspec_filename, class_name=None, dir_list=None): if class_name is None: class_name = os.path.splitext(os.path.basename(configspec_filename))[0] class_bases = (Configment, ) class_dict = { 'CONFIGSPEC_SOURCE': load_configspec(configspec_filename, dir_list=dir_list), } return MetaConfigment(class_name, class_bases, class_dict)<|fim▁end|>
#
<|file_name|>app.js<|end_file_name|><|fim▁begin|>/*** * Copyright (c) 2013 John Krauss. * * This file is part of Crashmapper. * * Crashmapper is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * Crashmapper is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with Crashmapper. If not, see <http://www.gnu.org/licenses/>. * ***/ /*jslint browser: true, nomen: true, sloppy: true*/ /*globals Backbone, Crashmapper */ /** * @param {Object} options * @constructor * @extends Backbone.View */ Crashmapper.AppView = Backbone.View.extend({ id: 'app', /** * @this {Crashmapper.AppView} */ initialize: function () { this.about = new Crashmapper.AboutView({}).render();<|fim▁hole|> }, /** * @this {Crashmapper.AppView} */ render: function () { return this; } });<|fim▁end|>
this.about.$el.appendTo(this.$el).hide(); this.map = new Crashmapper.MapView({}); this.map.$el.appendTo(this.$el);
<|file_name|>guitester.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- encoding: utf-8; py-indent-offset: 4 -*- # +------------------------------------------------------------------+ # | ____ _ _ __ __ _ __ | # | / ___| |__ ___ ___| | __ | \/ | |/ / | # | | | | '_ \ / _ \/ __| |/ / | |\/| | ' / | # | | |___| | | | __/ (__| < | | | | . \ | # | \____|_| |_|\___|\___|_|\_\___|_| |_|_|\_\ | # | | # | Copyright Mathias Kettner 2014 [email protected] | # +------------------------------------------------------------------+ # # This file is part of Check_MK. # The official homepage is at http://mathias-kettner.de/check_mk. # # check_mk is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation in version 2. check_mk is distributed # in the hope that it will be useful, but WITHOUT ANY WARRANTY; with- # out even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. See the GNU General Public License for more de- # ails. You should have received a copy of the GNU General Public # License along with GNU Make; see the file COPYING. If not, write # to the Free Software Foundation, Inc., 51 Franklin St, Fifth Floor, # Boston, MA 02110-1301 USA. import re import defaults from lib import * class MKGuitestFailed(MKException): def __init__(self, errors): self.errors = errors MKException.__init__(self, _("GUI Test failed")) class GUITester: def __init__(self): self.guitest = None self.replayed_guitest_step = None self.guitest_repair_step = None def init_guitests(self): if self.myfile == "guitest": self.replay_guitest() elif self.guitest_recording_active(): self.begin_guitest_recording() def begin_guitest_recording(self): self.guitest = { "variables" : self.vars.copy(), "filename" : self.myfile, "output" : {}, } # Fix transaction ID: We are just interested in whether it is valid or not if "_transid" in self.vars: if self.transaction_valid(): self.guitest["variables"]["_transid"] = "valid" else: self.guitest["variables"]["_transid"] = "invalid" self.add_status_icon("guitest", _("GUI test recording is active")) def end_guitest_recording(self): if self.guitest != None: self.guitest["user"] = self.user self.guitest["elapsed_time"] = time.time() - self.start_time self.save_guitest_step(self.guitest) def save_guitest_step(self, step): path = defaults.var_dir + "/guitests/RECORD" if not os.path.exists(path): test_steps = [] else: test_steps = eval(file(path).read()) if self.guitest_repair_step != None: mod_step = test_steps[self.guitest_repair_step] mod_step["output"] = step["output"] mod_step["user"] = step["user"] mod_step["elapsed_time"] = step["elapsed_time"] else: test_steps.append(step) file(path, "w").write("%s\n" % pprint.pformat(test_steps)) def load_guitest(self, name): path = defaults.var_dir + "/guitests/" + name + ".mk" try: return eval(file(path).read()) except IOError, e: raise MKGeneralException(_("Cannot load GUI test file %s: %s") % (self.attrencode(path), e)) def replay_guitest(self): test_name = self.var("test") if not test_name: raise MKGuitestFailed([_("Missing the name of the GUI test to run (URL variable 'test')")]) guitest = self.load_guitest(test_name) step_nr_text = self.var("step") try: step_nr = int(step_nr_text) except: raise MKGuitestFailed([_("Invalid or missing test step number (URL variable 'step')")]) if step_nr >= len(guitest) or step_nr < 0: raise MKGuitestFailed([_("Invalid test step number %d (only 0...%d)") % (step_nr, len(guitest)-1)]) repair = self.var("repair") == "1" if repair: self.guitest_repair_step = step_nr self.begin_guitest_recording() self.replayed_guitest_step = guitest[step_nr] self.replayed_guitest_step["replay"] = {} self.myfile = self.replayed_guitest_step["filename"] self.guitest_fake_login(self.replayed_guitest_step["user"]) self.vars = self.replayed_guitest_step["variables"] if "_transid" in self.vars and self.vars["_transid"] == "valid": self.vars["_transid"] = self.get_transid() self.store_new_transids() <|fim▁hole|> self.myfile in self.guitest_ignored_pages() def guitest_ignored_pages(self): return [ "run_cron", "index", "side", "sidebar_snapin", "dashboard", "dashboard_dashlet", "login" ] def guitest_record_output(self, key, value): if self.guitest: self.guitest["output"].setdefault(key, []).append(value) elif self.replayed_guitest_step: self.replayed_guitest_step["replay"].setdefault(key, []).append(value) def finalize_guitests(self): if self.guitest: self.end_guitest_recording() if self.replayed_guitest_step: try: self.end_guitest_replay() except MKGuitestFailed, e: self.write("\n[[[GUITEST FAILED]]]\n%s" % ("\n".join(e.errors))) def end_guitest_replay(self): if self.replayed_guitest_step and self.guitest_repair_step == None: errors = [] for varname in self.replayed_guitest_step["output"].keys(): method = self.guitest_test_method(varname) errors += [ "%s: %s" % (varname, error) for error in method( self.replayed_guitest_step["output"][varname], self.replayed_guitest_step["replay"].get(varname, [])) ] if errors: raise MKGuitestFailed(errors) def guitest_test_method(self, varname): if varname == "data_tables": return guitest_check_datatables elif varname == "page_title": return guitest_check_single_value else: return guitest_check_element_list def guitest_check_single_value(reference, reality): if len(reference) > 1: errors.append("More than one reference value: %s" % ", ".join(reference)) if len(reality) > 1: errors.append("More than one value: %s" % ", ".join(reality)) diff_text = guitest_check_text(reference[0], reality[0]) if diff_text: return [ diff_text ] else: return [] def guitest_check_element_list(reference, reality): errors = [] one_missing = False for entry in reference: if not guitest_entry_in_reference_list(entry, reality): errors.append("missing entry %r" % (entry,)) one_missing = True if one_missing: for entry in reality: if not guitest_entry_in_reference_list(entry, reference): errors.append("exceeding entry %r" % (entry,)) return errors def guitest_entry_in_reference_list(entry, ref_list): for ref_entry in ref_list: if guitest_entries_match(ref_entry, entry): return True return False def guitest_entries_match(ref, real): if type(ref) in (list, tuple): return len(ref) == len(real) and \ map(guitest_drop_dynamic_ids, ref) == map(guitest_drop_dynamic_ids, real) else: return guitest_drop_dynamic_ids(ref) == guitest_drop_dynamic_ids(real) def guitest_check_datatables(reference, reality): if len(reference) != len(reality): return [ _("Expected %d data tables, but got %d") % (len(reference), len(reality)) ] errors = [] for ref_table, real_table in zip(reference, reality): errors += guitest_check_datatable(ref_table, real_table) return errors def guitest_check_datatable(ref_table, real_table): if ref_table["id"] != real_table["id"]: return [ "Table id %s expected, but got %s" % (ref_table["id"], real_table["id"]) ] if len(ref_table["rows"]) != len(real_table["rows"]): return [ "Table %s: expected %d rows, but got %d" % ( ref_table["id"], len(ref_table["rows"]), len(real_table["rows"])) ] for row_nr, (ref_row, real_row) in enumerate(zip(ref_table["rows"], real_table["rows"])): if len(ref_row) != len(real_row): return [ "Table %s, row %d: expected %d columns, but got %d" % ( ref_table["id"], row_nr+1, len(ref_row), len(real_row)) ] # Note: Rows are tuples. The first component is the list of cells for cell_nr, (ref_cell, real_cell) in enumerate(zip(ref_row[0], real_row[0])): # Note: cell is a triple. The first component contains the text diff_text = guitest_check_text(ref_cell[0], real_cell[0]) if diff_text: return [ "Row %d, Column %d: %s" % (row_nr, cell_nr, diff_text) ] return [] def guitest_check_text(ref, real): ref_clean = guitest_drop_dynamic_ids(ref) real_clean = guitest_drop_dynamic_ids(real) if ref_clean == real_clean: return "" prefix, ref_rest, real_rest = find_common_prefix(ref_clean, real_clean) return "expected %s[[[%s]]], got %s[[[%s]]]" % (prefix, ref_rest, prefix, real_rest) def find_common_prefix(a, b): if len(a) > len(b) and a.startswith(b): return b, a[:len(b)], "" if len(b) > len(a) and b.startswith(a): return a, "", b[:len(a)] for i in range(min(len(a), len(b))): if a[i] != b[i]: return a[:i], a[i:], b[i:] return a, "", "" def guitest_drop_dynamic_ids(text): return re.sub("selection(%3d|=)[a-f0-9---]{36}", "selection=*", re.sub("_transid=1[4-6][0-9]{8}/[0-9]+", "_transid=TRANSID", text))<|fim▁end|>
def guitest_recording_active(self): # Activated by symoblic link pointing to recording file return os.path.lexists(defaults.var_dir + "/guitests/RECORD") and not \
<|file_name|>Class3_Sub28_Sub2.java<|end_file_name|><|fim▁begin|>final class Class3_Sub28_Sub2 extends Class3_Sub28 { private static Class94 aClass94_3541 = Class3_Sub4.buildString("yellow:"); static int anInt3542; private static Class94 aClass94_3543 = Class3_Sub4.buildString("Loading config )2 "); static Class94 aClass94_3544 = aClass94_3541; Class140_Sub2 aClass140_Sub2_3545; static Class94 aClass94_3546 = aClass94_3543; static Class94 aClass94_3547 = Class3_Sub4.buildString("Speicher wird zugewiesen)3"); static Class94 aClass94_3548 = aClass94_3541; public static void method534(int var0) { try { aClass94_3546 = null; aClass94_3548 = null; aClass94_3543 = null; int var1 = 101 % ((-29 - var0) / 45); aClass94_3544 = null; aClass94_3547 = null; aClass94_3541 = null; } catch (RuntimeException var2) { throw Class44.method1067(var2, "bk.B(" + var0 + ')'); } } static final void method535(byte var0, int var1) { try { Class151.aFloatArray1934[0] = (float)Class3_Sub28_Sub15.method633(255, var1 >> 16) / 255.0F; Class151.aFloatArray1934[1] = (float)Class3_Sub28_Sub15.method633(var1 >> 8, 255) / 255.0F; Class151.aFloatArray1934[2] = (float)Class3_Sub28_Sub15.method633(255, var1) / 255.0F; Class3_Sub18.method383(-32584, 3); Class3_Sub18.method383(-32584, 4); if(var0 != 56) { method535((byte)127, 99); } } catch (RuntimeException var3) { throw Class44.method1067(var3, "bk.A(" + var0 + ',' + var1 + ')'); } } static final Class75_Sub3 method536(byte var0, Class3_Sub30 var1) { try { if(var0 != 54) { method534(117); } return new Class75_Sub3(var1.method787((byte)25), var1.method787((byte)73), var1.method787((byte)114), var1.method787((byte)33), var1.method787((byte)78), var1.method787((byte)91), var1.method787((byte)120), var1.method787((byte)113), var1.method794((byte)115), var1.method803((byte)-64)); } catch (RuntimeException var3) { throw Class44.method1067(var3, "bk.C(" + var0 + ',' + (var1 != null?"{...}":"null") + ')'); } } Class3_Sub28_Sub2(Class140_Sub2 var1) { try { this.aClass140_Sub2_3545 = var1; <|fim▁hole|> throw Class44.method1067(var3, "bk.<init>(" + (var1 != null?"{...}":"null") + ')'); } } }<|fim▁end|>
} catch (RuntimeException var3) {
<|file_name|>serve-signups-with-flask.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import subprocess import praw from hashlib import sha1 from flask import Flask from flask import Response from flask import request from cStringIO import StringIO from base64 import b64encode from base64 import b64decode from ConfigParser import ConfigParser import OAuth2Util import os import markdown import bleach # encoding=utf8 import sys from participantCollection import ParticipantCollection reload(sys) sys.setdefaultencoding('utf8') # Edit Me! # Each day after you post a signup post, copy its 6-character ID to this array. signupPageSubmissionIds = [ '7zrrj1', '7zxkpq', '8055hn', '80ddrf', '80nbm1', '80waq3' ] flaskport = 8993 app = Flask(__name__) app.debug = True commentHashesAndComments = {} def loginAndReturnRedditSession(): config = ConfigParser() config.read("../reddit-password-credentials.cfg") user = config.get("Reddit", "user") password = config.get("Reddit", "password") # TODO: password auth is going away, and we will soon need to do oauth. redditSession = praw.Reddit(user_agent='Test Script by /u/foobarbazblarg') redditSession.login(user, password, disable_warning=True) # submissions = redditSession.get_subreddit('pornfree').get_hot(limit=5) # print [str(x) for x in submissions] return redditSession def loginOAuthAndReturnRedditSession(): redditSession = praw.Reddit(user_agent='Test Script by /u/foobarbazblarg') # New version of praw does not require explicit use of the OAuth2Util object. Presumably because reddit now REQUIRES oauth. # o = OAuth2Util.OAuth2Util(redditSession, print_log=True, configfile="../reddit-oauth-credentials.cfg") # TODO: Testing comment of refresh. We authenticate fresh every time, so presumably no need to do o.refresh(). # o.refresh(force=True) return redditSession def getSubmissionsForRedditSession(redditSession): # submissions = [redditSession.get_submission(submission_id=submissionId) for submissionId in signupPageSubmissionIds] submissions = [redditSession.submission(id=submissionId) for submissionId in signupPageSubmissionIds] for submission in submissions: submission.comments.replace_more(limit=None) # submission.replace_more_comments(limit=None, threshold=0) return submissions def getCommentsForSubmissions(submissions): comments = [] for submission in submissions: commentForest = submission.comments comments += [comment for comment in commentForest.list() if comment.__class__ == praw.models.Comment] return comments def retireCommentHash(commentHash): with open("retiredcommenthashes.txt", "a") as commentHashFile: commentHashFile.write(commentHash + '\n') def retiredCommentHashes(): with open("retiredcommenthashes.txt", "r") as commentHashFile: # return commentHashFile.readlines() return commentHashFile.read().splitlines() @app.route('/moderatesignups.html') def moderatesignups(): global commentHashesAndComments commentHashesAndComments = {} stringio = StringIO() stringio.write('<html>\n<head>\n</head>\n\n') # redditSession = loginAndReturnRedditSession() redditSession = loginOAuthAndReturnRedditSession() submissions = getSubmissionsForRedditSession(redditSession) flat_comments = getCommentsForSubmissions(submissions) retiredHashes = retiredCommentHashes() i = 1 stringio.write('<iframe name="invisibleiframe" style="display:none;"></iframe>\n') stringio.write("<h3>") stringio.write(os.getcwd()) stringio.write("<br>\n") for submission in submissions: stringio.write(submission.title) stringio.write("<br>\n") stringio.write("</h3>\n\n") stringio.write('<form action="copydisplayduringsignuptoclipboard.html" method="post" target="invisibleiframe">') stringio.write('<input type="submit" value="Copy display-during-signup.py stdout to clipboard">') stringio.write('</form>') for comment in flat_comments: # print comment.is_root # print comment.score i += 1 commentHash = sha1() commentHash.update(comment.fullname) commentHash.update(comment.body.encode('utf-8')) commentHash = commentHash.hexdigest() if commentHash not in retiredHashes: commentHashesAndComments[commentHash] = comment authorName = str(comment.author) # can be None if author was deleted. So check for that and skip if it's None. stringio.write("<hr>\n") stringio.write('<font color="blue"><b>') stringio.write(authorName) # can be None if author was deleted. So check for that and skip if it's None. stringio.write('</b></font><br>') if ParticipantCollection().hasParticipantNamed(authorName): stringio.write(' <small><font color="green">(member)</font></small>') # if ParticipantCollection().participantNamed(authorName).isStillIn: # stringio.write(' <small><font color="green">(in)</font></small>') # else: # stringio.write(' <small><font color="red">(out)</font></small>') else: stringio.write(' <small><font color="red">(not a member)</font></small>') stringio.write('<form action="takeaction.html" method="post" target="invisibleiframe">') stringio.write('<input type="submit" name="actiontotake" value="Signup" style="color:white;background-color:green">') # stringio.write('<input type="submit" name="actiontotake" value="Signup and checkin">') # stringio.write('<input type="submit" name="actiontotake" value="Relapse">') # stringio.write('<input type="submit" name="actiontotake" value="Reinstate">') stringio.write('<input type="submit" name="actiontotake" value="Skip comment">') stringio.write('<input type="submit" name="actiontotake" value="Skip comment and don\'t upvote">') stringio.write('<input type="hidden" name="username" value="' + b64encode(authorName) + '">') stringio.write('<input type="hidden" name="commenthash" value="' + commentHash + '">') # stringio.write('<input type="hidden" name="commentpermalink" value="' + comment.permalink + '">') stringio.write('</form>') stringio.write(bleach.clean(markdown.markdown(comment.body.encode('utf-8')), tags=['p'])) stringio.write("\n<br><br>\n\n") stringio.write('</html>') pageString = stringio.getvalue() stringio.close() return Response(pageString, mimetype='text/html') @app.route('/takeaction.html', methods=["POST"]) def takeaction(): username = b64decode(request.form["username"]) commentHash = str(request.form["commenthash"]) # commentPermalink = request.form["commentpermalink"] actionToTake = request.form["actiontotake"] # print commentHashesAndComments comment = commentHashesAndComments[commentHash] # print "comment: " + str(comment) if actionToTake == 'Signup': print "signup - " + username subprocess.call(['./signup.py', username]) comment.upvote()<|fim▁hole|> # print "signup and checkin - " + username # subprocess.call(['./signup-and-checkin.sh', username]) # comment.upvote() # retireCommentHash(commentHash) # elif actionToTake == 'Relapse': # print "relapse - " + username # subprocess.call(['./relapse.py', username]) # comment.upvote() # retireCommentHash(commentHash) # elif actionToTake == 'Reinstate': # print "reinstate - " + username # subprocess.call(['./reinstate.py', username]) # comment.upvote() # retireCommentHash(commentHash) elif actionToTake == 'Skip comment': print "Skip comment - " + username comment.upvote() retireCommentHash(commentHash) elif actionToTake == "Skip comment and don't upvote": print "Skip comment and don't upvote - " + username retireCommentHash(commentHash) return Response("hello", mimetype='text/html') @app.route('/copydisplayduringsignuptoclipboard.html', methods=["POST"]) def copydisplayduringsignuptoclipboard(): print "TODO: Copy display to clipboard" subprocess.call(['./display-during-signup.py']) return Response("hello", mimetype='text/html') if __name__ == '__main__': app.run(host='127.0.0.1', port=flaskport)<|fim▁end|>
retireCommentHash(commentHash) # if actionToTake == 'Signup and checkin':
<|file_name|>types.ts<|end_file_name|><|fim▁begin|>// TYPESCRIPT TODO: move this to a larger shared types file, preferably within otp-ui export interface StopData { bikeRental: BikeRental fetchStatus: number id: string lat: number locationType: number lon: number name: string nearbyStops: string[] parkAndRideLocations: any[] routes: Route[] stopTimes: StopTime[]<|fim▁hole|> vehicleType: number vehicleTypeSet: boolean wheelchairBoarding: number } export interface BikeRental { stations: any[] } export interface Route { agencyId: string agencyName: string id: string longName: string mode: string sortOrder: number } export interface StopTime { pattern: Pattern times: Time[] } export interface Pattern { desc: string headsign: string id: string } export interface Time { arrivalDelay: number continuousDropOff: number continuousPickup: number departureDelay: number headsign: string realtime: boolean realtimeArrival: number realtimeDeparture: number realtimeState: string scheduledArrival: number scheduledDeparture: number serviceAreaRadius: number serviceDay: number stopCount: number stopId: string stopIndex: number timepoint: boolean tripId: string } export interface VehicleRental { errorsByNetwork: { [key: string]: { message?: string; severity?: string } } systemInformationDataByNetwork: { [key: string]: { message?: string; severity?: string } } }<|fim▁end|>
stopTimesLastUpdated: number vehicleRental: VehicleRental
<|file_name|>add_loss_correctness_test.py<|end_file_name|><|fim▁begin|># Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests add_loss API correctness.""" import numpy as np from tensorflow.python.eager import backprop from tensorflow.python.eager import context from tensorflow.python.eager import def_function from tensorflow.python.keras import Input from tensorflow.python.keras import keras_parameterized from tensorflow.python.keras import layers from tensorflow.python.keras import losses from tensorflow.python.keras import Model from tensorflow.python.keras import optimizer_v2 from tensorflow.python.keras import Sequential from tensorflow.python.keras import testing_utils from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import test from tensorflow.python.platform import tf_logging as logging from tensorflow.python.training.rmsprop import RMSPropOptimizer MAE = losses.MeanAbsoluteError mae = losses.mean_absolute_error def get_ctl_train_step(model): optimizer = optimizer_v2.gradient_descent.SGD(0.05) def train_step(x, y, w=None): with backprop.GradientTape() as tape: if w is not None: model([x, y, w]) else: model([x, y]) loss = math_ops.reduce_sum(model.losses) gradients = tape.gradient(loss, model.trainable_weights) optimizer.apply_gradients(zip(gradients, model.trainable_weights)) return loss return train_step # TODO(psv): Add tests cases where a model is used in loss function but is # not part of the training model. class TestAddLossCorrectness(keras_parameterized.TestCase): def setUp(self): super(TestAddLossCorrectness, self).setUp() self.x = np.array([[0.], [1.], [2.]], dtype='float32') self.y = np.array([[0.5], [2.], [3.5]], dtype='float32') self.w = np.array([[1.25], [0.5], [1.25]], dtype='float32') @keras_parameterized.run_all_keras_modes def test_loss_on_model_fit(self): inputs = Input(shape=(1,)) targets = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model([inputs, targets], outputs) model.add_loss(MAE()(targets, outputs)) model.add_loss(math_ops.reduce_mean(mae(targets, outputs))) model.compile( optimizer_v2.gradient_descent.SGD(0.05), run_eagerly=testing_utils.should_run_eagerly()) history = model.fit([self.x, self.y], batch_size=3, epochs=5) self.assertAllClose(history.history['loss'], [2., 1.8, 1.6, 1.4, 1.2], 1e-3) @keras_parameterized.run_with_all_model_types(exclude_models=['sequential']) @keras_parameterized.run_all_keras_modes(always_skip_v1=True) def test_loss_callable_on_model_fit(self): model = testing_utils.get_model_from_layers([testing_utils.Bias()], input_shape=(1,)) def callable_loss(): return math_ops.reduce_sum(model.weights) model.add_loss(callable_loss) model.compile( optimizer_v2.gradient_descent.SGD(0.1), run_eagerly=testing_utils.should_run_eagerly()) history = model.fit(self.x, batch_size=3, epochs=5) self.assertAllClose(history.history['loss'], [0., -.1, -.2, -.3, -.4], 1e-3) @keras_parameterized.run_all_keras_modes(always_skip_v1=True) def test_loss_on_model_ctl(self): def get_model_and_train_step(): inputs = Input(shape=(1,)) targets = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model([inputs, targets], outputs) model.add_loss(MAE()(targets, outputs)) model.add_loss(math_ops.reduce_mean(mae(targets, outputs))) return get_ctl_train_step(model) train_step = get_model_and_train_step() loss = [train_step(self.x, self.y) for _ in range(5)] self.assertAllClose(loss, [2., 1.8, 1.6, 1.4, 1.2], 1e-3) train_step = def_function.function(get_model_and_train_step()) loss = [train_step(self.x, self.y) for _ in range(5)] self.assertAllClose(loss, [2., 1.8, 1.6, 1.4, 1.2], 1e-3) @keras_parameterized.run_all_keras_modes(always_skip_v1=True) def test_loss_callable_on_model_ctl(self): def get_model_and_train_step(): inputs = Input(shape=(1,)) targets = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model([inputs, targets], outputs) def callable_loss(): return math_ops.reduce_sum(model.weights) model.add_loss(callable_loss) return get_ctl_train_step(model) train_step = get_model_and_train_step() loss = [train_step(self.x, self.y) for _ in range(5)] self.assertAllClose(loss, [0., -0.05, -0.1, -0.15, -0.2], 1e-3) train_step = def_function.function(get_model_and_train_step()) loss = [train_step(self.x, self.y) for _ in range(5)] self.assertAllClose(loss, [0., -0.05, -0.1, -0.15, -0.2], 1e-3) @keras_parameterized.run_all_keras_modes def test_loss_with_sample_weight_on_model_fit(self): inputs = Input(shape=(1,)) targets = Input(shape=(1,)) sw = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model([inputs, targets, sw], outputs) model.add_loss(MAE()(targets, outputs, sw)) model.add_loss(3 * math_ops.reduce_mean(sw * mae(targets, outputs))) model.compile( optimizer_v2.gradient_descent.SGD(0.025), run_eagerly=testing_utils.should_run_eagerly()) history = model.fit([self.x, self.y, self.w], batch_size=3, epochs=5) self.assertAllClose(history.history['loss'], [4., 3.6, 3.2, 2.8, 2.4], 1e-3) @keras_parameterized.run_all_keras_modes(always_skip_v1=True) def test_loss_with_sample_weight_on_model_ctl(self): def get_model_and_train_step(): inputs = Input(shape=(1,)) targets = Input(shape=(1,)) sw = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model([inputs, targets, sw], outputs) model.add_loss(MAE()(targets, outputs, sw)) model.add_loss(math_ops.reduce_mean(sw * mae(targets, outputs))) return get_ctl_train_step(model) train_step = get_model_and_train_step() loss = [train_step(self.x, self.y, self.w) for _ in range(5)] self.assertAllClose(loss, [2., 1.8, 1.6, 1.4, 1.2], 1e-3) train_step = def_function.function(get_model_and_train_step()) loss = [train_step(self.x, self.y, self.w) for _ in range(5)] self.assertAllClose(loss, [2., 1.8, 1.6, 1.4, 1.2], 1e-3) @keras_parameterized.run_all_keras_modes def test_loss_with_sample_weight_in_model_call(self): class MyModel(Model): def __init__(self): super(MyModel, self).__init__() self.bias = testing_utils.Bias() def call(self, inputs): outputs = self.bias(inputs[0]) self.add_loss(MAE()(inputs[1], outputs, inputs[2])) self.add_loss(math_ops.reduce_mean(inputs[2] * mae(inputs[1], outputs))) return outputs model = MyModel() model.predict([self.x, self.y, self.w]) model.compile( optimizer_v2.gradient_descent.SGD(0.05), run_eagerly=testing_utils.should_run_eagerly()) history = model.fit([self.x, self.y, self.w], batch_size=3, epochs=5) self.assertEqual(len(model.losses), 2) self.assertAllClose(history.history['loss'], [2., 1.8, 1.6, 1.4, 1.2], 1e-3) eval_out = model.evaluate([self.x, self.y, self.w]) self.assertAlmostEqual(eval_out, 1.0, 3) @keras_parameterized.run_all_keras_modes def test_loss_with_sample_weight_in_layer_call(self): class MyLayer(layers.Layer): def __init__(self): super(MyLayer, self).__init__() self.bias = testing_utils.Bias() def call(self, inputs): out = self.bias(inputs[0]) self.add_loss(MAE()(inputs[1], out, inputs[2])) self.add_loss(math_ops.reduce_mean(inputs[2] * mae(inputs[1], out))) return out inputs = Input(shape=(1,)) targets = Input(shape=(1,)) sw = Input(shape=(1,))<|fim▁hole|> model.compile( optimizer_v2.gradient_descent.SGD(0.05), run_eagerly=testing_utils.should_run_eagerly()) history = model.fit([self.x, self.y, self.w], batch_size=3, epochs=5) self.assertAllClose(history.history['loss'], [2., 1.8, 1.6, 1.4, 1.2], 1e-3) output = model.evaluate([self.x, self.y, self.w]) self.assertAlmostEqual(output, 1.0, 3) output = model.test_on_batch([self.x, self.y, self.w]) self.assertAlmostEqual(output, 1.0, 3) @keras_parameterized.run_all_keras_modes def test_loss_on_layer(self): class MyLayer(layers.Layer): def call(self, inputs): self.add_loss(math_ops.reduce_sum(inputs)) return inputs inputs = Input((3,)) layer = MyLayer() outputs = layer(inputs) model = Model(inputs, outputs) self.assertEqual(len(model.losses), 1) model.compile( 'sgd', 'mse', run_eagerly=testing_utils.should_run_eagerly()) loss = model.train_on_batch(np.ones((2, 3)), np.ones((2, 3))) self.assertEqual(loss, 2 * 3) @keras_parameterized.run_all_keras_modes @keras_parameterized.run_with_all_model_types def test_activity_regularizer(self): loss = {} for reg in [None, 'l2']: model_layers = [ layers.Dense( 10, activation='relu', activity_regularizer=reg, kernel_initializer='ones', use_bias=False), layers.Dense( 1, activation='sigmoid', kernel_initializer='ones', use_bias=False), ] model = testing_utils.get_model_from_layers( model_layers, input_shape=(10,)) x = np.ones((10, 10), 'float32') y = np.zeros((10, 1), 'float32') optimizer = RMSPropOptimizer(learning_rate=0.001) model.compile( optimizer, 'binary_crossentropy', run_eagerly=testing_utils.should_run_eagerly()) model.fit(x, y, batch_size=2, epochs=5) loss[reg] = model.evaluate(x, y) self.assertLess(loss[None], loss['l2']) @keras_parameterized.run_all_keras_modes @keras_parameterized.run_with_all_model_types def test_activity_regularizer_loss_value(self): layer = layers.Dense( 1, kernel_initializer='zeros', bias_initializer='ones', activity_regularizer='l2') model = testing_utils.get_model_from_layers([layer], input_shape=(10,)) x = np.ones((10, 10), 'float32') optimizer = RMSPropOptimizer(learning_rate=0.001) model.compile( optimizer, run_eagerly=testing_utils.should_run_eagerly()) loss = model.test_on_batch(x) self.assertAlmostEqual(0.01, loss, places=4) @keras_parameterized.run_all_keras_modes def test_activity_regularizer_batch_independent(self): inputs = layers.Input(shape=(10,)) x = layers.Dense(10, activation='relu', activity_regularizer='l2')(inputs) outputs = layers.Dense(1, activation='sigmoid')(x) model = Model(inputs, outputs) optimizer = RMSPropOptimizer(learning_rate=0.001) model.compile( optimizer, run_eagerly=testing_utils.should_run_eagerly()) loss_small_batch = model.test_on_batch(np.ones((10, 10), 'float32')) loss_big_batch = model.test_on_batch(np.ones((20, 10), 'float32')) self.assertAlmostEqual(loss_small_batch, loss_big_batch, places=4) @keras_parameterized.run_all_keras_modes def test_with_shared_layer(self): class LayerWithLoss(layers.Layer): def call(self, inputs): self.add_loss(math_ops.reduce_sum(inputs), inputs=inputs) return inputs * 2 shared_layer = LayerWithLoss() m = Sequential([shared_layer]) m2 = Sequential([shared_layer, m]) m2(array_ops.constant([1, 2, 3])) self.assertEqual(len(m2.losses), 2) self.assertAllClose(m2.losses, [6, 12]) @keras_parameterized.run_all_keras_modes def test_with_shared_nested_layer(self): class LayerWithLoss(layers.Layer): def call(self, inputs): self.add_loss(math_ops.reduce_sum(inputs), inputs=inputs) return inputs * 2 class LayerWithNestedLayerWithLoss(layers.Layer): def __init__(self): super(LayerWithNestedLayerWithLoss, self).__init__() self.loss_layer = LayerWithLoss() def call(self, inputs): return self.loss_layer(inputs) shared_layer = LayerWithNestedLayerWithLoss() m = Sequential([shared_layer]) m2 = Sequential([shared_layer, m]) m2(array_ops.constant([1, 2, 3])) self.assertEqual(len(m2.losses), 2) self.assertAllClose(m2.losses, [6, 12]) @keras_parameterized.run_all_keras_modes def test_clear_losses(self): class LayerWithSharedNestedLossLayer(layers.Layer): def __init__(self): super(LayerWithSharedNestedLossLayer, self).__init__() self.loss_layer = layers.ActivityRegularization(l2=0.001) self.add_weight(shape=(1,), regularizer='l2') def call(self, x): x = self.loss_layer(x) return self.loss_layer(x) inputs = Input(shape=(1,)) l = LayerWithSharedNestedLossLayer() # Weight loss + 2 activity losses. x1 = array_ops.ones((1, 1)) _ = l(x1) if not context.executing_eagerly(): self.assertEqual(len(l.get_losses_for(x1)), 2) self.assertEqual(len(l.get_losses_for(None)), 1) x2 = array_ops.ones((1, 1)) _ = l(x2) if not context.executing_eagerly(): self.assertEqual(len(l.get_losses_for(x1)), 2) self.assertEqual(len(l.get_losses_for(x2)), 2) self.assertEqual(len(l.get_losses_for(None)), 1) outputs = l(inputs) model = Model(inputs, outputs) if not context.executing_eagerly(): self.assertEqual(len(model.losses), 7) self.assertEqual(len(l.get_losses_for(x1)), 2) self.assertEqual(len(l.get_losses_for(x2)), 2) self.assertEqual(len(l.get_losses_for(None)), 1) x3 = array_ops.ones((1, 1)) model(x3) x4 = array_ops.ones((1, 1)) model(x4) if context.executing_eagerly(): # Eager losses are cleared every `__call__`. self.assertEqual(len(model.losses), 3) else: self.assertEqual(len(model.losses), 11) self.assertEqual(len(model.get_losses_for(x3)), 2) self.assertEqual(len(model.get_losses_for(x4)), 2) self.assertEqual(len(model.get_losses_for(None)), 1) @keras_parameterized.run_all_keras_modes(always_skip_v1=True) def test_invalid_constant_input(self): inputs = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model(inputs, outputs) with self.assertRaisesRegex( ValueError, 'Expected a symbolic Tensors or a callable for the loss value'): model.add_loss(1.) @keras_parameterized.run_all_keras_modes(always_skip_v1=True) def test_invalid_variable_input(self): inputs = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model(inputs, outputs) with self.assertRaisesRegex( ValueError, 'Expected a symbolic Tensors or a callable for the loss value'): model.add_loss(model.weights[0]) @keras_parameterized.run_all_keras_modes def test_add_entropy_loss_on_functional_model(self): inputs = Input(shape=(1,)) targets = Input(shape=(1,)) outputs = testing_utils.Bias()(inputs) model = Model([inputs, targets], outputs) model.add_loss(losses.binary_crossentropy(targets, outputs)) model.compile('sgd', run_eagerly=testing_utils.should_run_eagerly()) with test.mock.patch.object(logging, 'warning') as mock_log: model.fit([self.x, self.y], batch_size=3, epochs=5) self.assertNotIn('Gradients do not exist for variables', str(mock_log.call_args)) if __name__ == '__main__': test.main()<|fim▁end|>
outputs = MyLayer()([inputs, targets, sw]) model = Model([inputs, targets, sw], outputs) model.predict([self.x, self.y, self.w])
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>extern crate uu_readlink; fn main() {<|fim▁hole|> std::process::exit(uu_readlink::uumain(std::env::args().collect())); }<|fim▁end|>
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Bindings for [netfilter_queue](http://netfilter.org/projects/libnetfilter_queue/doxygen/index.html) //! //! These bindings allow you to have access to the `QUEUE` and `NFQUEUE`, set in `iptables`, //! and write your own userspace programs to process these queues. #![deny(missing_docs)]<|fim▁hole|>#[macro_use] extern crate lazy_static; mod ffi; mod error; mod util; mod lock; pub mod handle; pub mod queue; pub mod message; //#[cfg(test)] //mod test;<|fim▁end|>
extern crate libc; extern crate num;
<|file_name|>object-streams.js<|end_file_name|><|fim▁begin|>var PassThrough = require('stream').PassThrough describe('Object Streams', function () { it('should be supported', function (done) { var app = koala() app.use(function* (next) { var body = this.body = new PassThrough({ objectMode: true }) body.write({ message: 'a' }) body.write({ message: 'b' })<|fim▁hole|> }) request(app.listen()) .get('/') .expect(200) .expect([{ message: 'a' }, { message: 'b' }], done) }) })<|fim▁end|>
body.end()
<|file_name|>test_cover.py<|end_file_name|><|fim▁begin|>"""Tests for the Bond cover device.""" from datetime import timedelta from bond_api import Action, DeviceType from homeassistant import core from homeassistant.components.cover import DOMAIN as COVER_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, SERVICE_CLOSE_COVER, SERVICE_OPEN_COVER, SERVICE_STOP_COVER, ) from homeassistant.helpers.entity_registry import EntityRegistry from homeassistant.util import utcnow from .common import ( help_test_entity_available, patch_bond_action, patch_bond_device_state, setup_platform, ) from tests.common import async_fire_time_changed def shades(name: str): """Create motorized shades with given name.""" return {"name": name, "type": DeviceType.MOTORIZED_SHADES} async def test_entity_registry(hass: core.HomeAssistant): """Tests that the devices are registered in the entity registry.""" await setup_platform( hass, COVER_DOMAIN, shades("name-1"), bond_version={"bondid": "test-hub-id"}, bond_device_id="test-device-id", ) registry: EntityRegistry = await hass.helpers.entity_registry.async_get_registry() entity = registry.entities["cover.name_1"] assert entity.unique_id == "test-hub-id_test-device-id" async def test_open_cover(hass: core.HomeAssistant): """Tests that open cover command delegates to API.""" await setup_platform( hass, COVER_DOMAIN, shades("name-1"), bond_device_id="test-device-id" ) with patch_bond_action() as mock_open, patch_bond_device_state(): await hass.services.async_call( COVER_DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: "cover.name_1"}, blocking=True, ) await hass.async_block_till_done() mock_open.assert_called_once_with("test-device-id", Action.open()) async def test_close_cover(hass: core.HomeAssistant): """Tests that close cover command delegates to API.""" await setup_platform( hass, COVER_DOMAIN, shades("name-1"), bond_device_id="test-device-id" ) with patch_bond_action() as mock_close, patch_bond_device_state(): await hass.services.async_call( COVER_DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: "cover.name_1"}, blocking=True, ) await hass.async_block_till_done() mock_close.assert_called_once_with("test-device-id", Action.close()) async def test_stop_cover(hass: core.HomeAssistant): """Tests that stop cover command delegates to API.""" await setup_platform( hass, COVER_DOMAIN, shades("name-1"), bond_device_id="test-device-id" ) with patch_bond_action() as mock_hold, patch_bond_device_state(): await hass.services.async_call( COVER_DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: "cover.name_1"}, blocking=True, ) await hass.async_block_till_done() mock_hold.assert_called_once_with("test-device-id", Action.hold()) async def test_update_reports_open_cover(hass: core.HomeAssistant): """Tests that update command sets correct state when Bond API reports cover is open.""" await setup_platform(hass, COVER_DOMAIN, shades("name-1")) with patch_bond_device_state(return_value={"open": 1}): async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() assert hass.states.get("cover.name_1").state == "open" async def test_update_reports_closed_cover(hass: core.HomeAssistant):<|fim▁hole|> async_fire_time_changed(hass, utcnow() + timedelta(seconds=30)) await hass.async_block_till_done() assert hass.states.get("cover.name_1").state == "closed" async def test_cover_available(hass: core.HomeAssistant): """Tests that available state is updated based on API errors.""" await help_test_entity_available( hass, COVER_DOMAIN, shades("name-1"), "cover.name_1" )<|fim▁end|>
"""Tests that update command sets correct state when Bond API reports cover is closed.""" await setup_platform(hass, COVER_DOMAIN, shades("name-1")) with patch_bond_device_state(return_value={"open": 0}):
<|file_name|>issue-53789-2.rs<|end_file_name|><|fim▁begin|>// Regression test for #53789. // // compile-pass #![feature(nll)] #![allow(unused_variables)] use std::collections::BTreeMap; use std::ops::Range; use std::cmp::Ord; macro_rules! valuetree { () => { type ValueTree = <Self::Strategy as $crate::Strategy>::Value; }; } macro_rules! product_unpack { ($factor: pat) => { ($factor,) }; ($($factor: pat),*) => { ( $( $factor ),* ) }; ($($factor: pat),*,) => { ( $( $factor ),* ) }; } macro_rules! product_type { ($factor: ty) => { ($factor,) }; ($($factor: ty),*) => { ( $( $factor, )* ) }; ($($factor: ty),*,) => { ( $( $factor, )* ) }; } macro_rules! default { ($type: ty, $val: expr) => { impl Default for $type { fn default() -> Self { $val.into() } } }; } // Pervasive internal sugar macro_rules! mapfn { ($(#[$meta:meta])* [$($vis:tt)*] fn $name:ident[$($gen:tt)*]($parm:ident: $input:ty) -> $output:ty { $($body:tt)* }) => { $(#[$meta])* #[derive(Clone, Copy)] $($vis)* struct $name; impl $($gen)* statics::MapFn<$input> for $name { type Output = $output; } } } macro_rules! opaque_strategy_wrapper { ($(#[$smeta:meta])* pub struct $stratname:ident [$($sgen:tt)*][$($swhere:tt)*] ($innerstrat:ty) -> $stratvtty:ty; $(#[$vmeta:meta])* pub struct $vtname:ident [$($vgen:tt)*][$($vwhere:tt)*] ($innervt:ty) -> $actualty:ty; ) => { $(#[$smeta])* struct $stratname $($sgen)* (std::marker::PhantomData<(K, V)>) $($swhere)*; $(#[$vmeta])* struct $vtname $($vgen)* ($innervt) $($vwhere)*; impl $($sgen)* Strategy for $stratname $($sgen)* $($swhere)* { type Value = $stratvtty; } impl $($vgen)* ValueTree for $vtname $($vgen)* $($vwhere)* { type Value = $actualty; } } } trait ValueTree { type Value; } trait Strategy { type Value : ValueTree; } #[derive(Clone)] struct VecStrategy<T : Strategy> { element: T, size: Range<usize>, } fn vec<T : Strategy>(element: T, size: Range<usize>) -> VecStrategy<T> { VecStrategy { element: element, size: size, } } type ValueFor<S> = <<S as Strategy>::Value as ValueTree>::Value; trait Arbitrary<'a>: Sized { fn arbitrary_with(args: Self::Parameters) -> Self::Strategy; type Parameters: Default; type Strategy: Strategy<Value = Self::ValueTree>; type ValueTree: ValueTree<Value = Self>; } type StrategyFor<A> = StrategyType<'static, A>; type StrategyType<'a, A> = <A as Arbitrary<'a>>::Strategy; //#[derive(Clone, PartialEq, Eq, Hash, Debug, From, Into)] struct SizeBounds(Range<usize>); default!(SizeBounds, 0..100); <|fim▁hole|> impl From<Range<usize>> for SizeBounds { fn from(high: Range<usize>) -> Self { unimplemented!() } } impl From<SizeBounds> for Range<usize> { fn from(high: SizeBounds) -> Self { unimplemented!() } } fn any_with<'a, A: Arbitrary<'a>>(args: A::Parameters) -> StrategyType<'a, A> { unimplemented!() } impl<K: ValueTree, V: ValueTree> Strategy for (K, V) where <K as ValueTree>::Value: Ord { type Value = TupleValueTree<(K, V)>; } impl<K: ValueTree, V: ValueTree> ValueTree for TupleValueTree<(K, V)> where <K as ValueTree>::Value: Ord { type Value = BTreeMapValueTree<K, V>; } #[derive(Clone)] struct VecValueTree<T : ValueTree> { elements: Vec<T>, } #[derive(Clone, Copy)] struct TupleValueTree<T> { tree: T, } opaque_strategy_wrapper! { #[derive(Clone)] pub struct BTreeMapStrategy[<K, V>] [where K : Strategy, V : Strategy, ValueFor<K> : Ord]( statics::Filter<statics::Map<VecStrategy<(K,V)>, VecToBTreeMap>, MinSize>) -> BTreeMapValueTree<K::Value, V::Value>; #[derive(Clone)] pub struct BTreeMapValueTree[<K, V>] [where K : ValueTree, V : ValueTree, K::Value : Ord]( statics::Filter<statics::Map<VecValueTree<TupleValueTree<(K, V)>>, VecToBTreeMap>, MinSize>) -> BTreeMap<K::Value, V::Value>; } type RangedParams2<A, B> = product_type![SizeBounds, A, B]; impl<'a, A, B> Arbitrary<'a> for BTreeMap<A, B> where A: Arbitrary<'static> + Ord, B: Arbitrary<'static>, StrategyFor<A>: 'static, StrategyFor<B>: 'static, { valuetree!(); type Parameters = RangedParams2<A::Parameters, B::Parameters>; type Strategy = BTreeMapStrategy<A::Strategy, B::Strategy>; fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { let product_unpack![range, a, b] = args; btree_map(any_with::<A>(a), any_with::<B>(b), range.into()) } } #[derive(Clone, Copy)] struct MinSize(usize); mapfn! { [] fn VecToBTreeMap[<K : Ord, V>] (vec: Vec<(K, V)>) -> BTreeMap<K, V> { vec.into_iter().collect() } } fn btree_map<K : Strategy + 'static, V : Strategy + 'static> (key: K, value: V, size: Range<usize>) -> BTreeMapStrategy<K, V> where ValueFor<K> : Ord { unimplemented!() } mod statics { pub(super) trait MapFn<T> { type Output; } #[derive(Clone)] pub struct Filter<S, F> { source: S, fun: F, } impl<S, F> Filter<S, F> { pub fn new(source: S, whence: String, filter: F) -> Self { unimplemented!() } } #[derive(Clone)] pub struct Map<S, F> { source: S, fun: F, } impl<S, F> Map<S, F> { pub fn new(source: S, fun: F) -> Self { unimplemented!() } } } fn main() { }<|fim▁end|>
<|file_name|>speakers.rs<|end_file_name|><|fim▁begin|>// Cala // Copyright © 2017-2021 Jeron Aldaron Lau. // // Licensed under any of:<|fim▁hole|>// - MIT License (https://mit-license.org/) // - Boost Software License, Version 1.0 (https://www.boost.org/LICENSE_1_0.txt) // At your choosing (See accompanying files LICENSE_APACHE_2_0.txt, // LICENSE_MIT.txt and LICENSE_BOOST_1_0.txt). //! Audio playback device //! //! # Getting Started //! **TODO** pub use wavy::{Speakers, SpeakersSink};<|fim▁end|>
// - Apache License, Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0)
<|file_name|>type-punctuation.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> fn Foo<T = Foo, Output = Expr<'tcx> + Foo>() { let i = 6; }<|fim▁end|>
// rustfmt-type_punctuation_density: Compressed
<|file_name|>Calendar.js<|end_file_name|><|fim▁begin|>var EPOCH = 1900; var PATTERN = /(\d+)[^\/|-]/g; function iskabisat(year) { if(year % 4 === 0){ if(year % 100 === 0 && year % 400 !== 0) return false; else return true; } return false; } /* * kabisats: * Calculating how many kabisats' years in time span between * given year and primordial year, Y0 */ function kabisats(year) { var kabcount = 0; for(var i=1900; i<year; i++) { if(iskabisat(i)) kabcount++; } return kabcount; } function calcdays(date) { function months(mth) { var days = 0; for(var i=1; i<mth; i++) { if(extraday.indexOf(i) !== -1) days += 31; else if (i == 2) days += 28; else days += 30; } return days+1; } var extraday = [1, 3, 5, 7, 8, 10, 12]; var dayarr = date.match(PATTERN); var day = parseInt(dayarr[2]); var month = parseInt(dayarr[1]); var year = parseInt(dayarr[0]); var days = day + months(month) + (year - EPOCH) * 365 + kabisats(year); return days; } function triwara(day) { return ['Pasah', 'Beteng', 'Kajeng'][(day-1) % 3]; } function pancawara(day) { var pancalist = ["Umanis", "Paing", "Pon", "Wage", "Kliwon"]; return pancalist[(day-1) % 5]; } function saptawara(day) { return ["Redite", "Coma", "Anggara","Buda", "Wrespati", "Sukra", "Saniscara"][(day-1) % 7]; } function wuku(day) { var wukulist = [ "Sinta", "Landep", "Ukir", "Kulantir", "Tolu", "Gumbreg", "Wariga", "Warigadean", "Julungwangi", "Sungsang", "Dungulan", "Kuningan", "Langkir", "Medangsya", "Pujut", "Pahang", "Krulut", "Merakih", "Tambir", "Medangkungan", "Matal", "Uye", "Menail", "Prangbakat", "Bala", "Ugu", "Wayang", "Kelawu", "Dukut", "Watugunung" ]; idx = (Math.floor((day-1) / 7) + 12) % 30; return wukulist[idx]; }<|fim▁hole|>function bali_calendar(date) { var day = calcdays(date); return { Triwara: triwara(day), Pancawara: pancawara(day), Saptawara: saptawara(day), Wuku: wuku(day) }; } module.exports.bali_calendar = bali_calendar;<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># Copyright © 2017 Collabora Ltd. # # This file is part of pfg. # # pfg is free software: you can redistribute it and/or modify it under the # terms of the GNU Lesser General Public License as published by the Free # Software Foundation, either version 2.1 of the License, or (at your option) # any later version. # # pfg is distributed in the hope that it will be useful, but WITHOUT ANY # WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for # more details. #<|fim▁hole|># Authors: # Alexandros Frantzis <[email protected]><|fim▁end|>
# You should have received a copy of the GNU Lesser General Public License # along with pfg. If not, see <http://www.gnu.org/licenses/>. #
<|file_name|>cnc.py<|end_file_name|><|fim▁begin|>import serial import sys from time import sleep def move(dir): print dir ser.write(dir) # write a string ser.sendBreak(0.25) ser.flush() sleep(1) def ResetCoords(): dir = 'r' print dir ser.write(dir) # write a string ser.sendBreak(0.25) ser.flush() sleep(1) def DrawRect(dim): print "" print "Drawing 10-size rectangle" out = "" k = 2; while(k > 1): print "First side:" dir = 'd' for i in range(0, dim[0]): move(dir) print "Second side:" dir = 'x' for i in range(0, dim[1]): move(dir) print "Third side:" dir = 'a' for i in range(0, dim[0]): move(dir) print "Fourth side:" dir = 'w' for i in range(0, dim[1]): move(dir) print "Finished, starting over." print "________________________" k = k - 1 def ManualControl(): run = 1 while run == 1: print "" print "" print "___________________________" print "Use Keypad or following keys to control motors" print "Direction:" print "q w e" print "a s d" print "z x c" print "Drill control:" print " Up: f" print "Down: v" print "" print "Press m to exit to menu" print "___________________________" select = raw_input(": ") if select == "m": run = 0 else: move(select) def DrawText(): print "This option is not ready yet" <|fim▁hole|> ser.close() # close port def OpenPort(port): print "" print "Initializing Com-port to device." ser = serial.Serial(port, 9600, 8, serial.PARITY_NONE, 1, None, False, False, None, False, None) print "" + ser.portstr + " is open" return ser def Menu(): print "___________________________" print "Menu" print "1. Manual Control" print "2. Demonstration" print "3. Text carving" print "4. Quit" print "" select = raw_input("Select: ") if select == "1": ManualControl() if select == "2": DrawRect([5,5]) if select == "3": DrawText() if select == "4": Quit() def Quit(): ClosePort() sys.exit() print "Welcome to PyCNC 0.5" print "Author: Heikki Juva @ 2011" print "" print "___________________________" port = raw_input("Give CNC port name ") ser = OpenPort(port) print "" while(1): Menu()<|fim▁end|>
return 0 def ClosePort():
<|file_name|>out_parameters.rs<|end_file_name|><|fim▁begin|>use crate::{ analysis::{ self, conversion_type::ConversionType, function_parameters::CParameter, functions::is_carray_with_direct_elements, imports::Imports, return_value, rust_type::RustType, }, config::{self, parameter_matchable::ParameterMatchable}, env::Env, library::{ self, Function, Fundamental, Nullable, ParameterDirection, Type, TypeId, INTERNAL_NAMESPACE, }, nameutil, }; use log::error; use std::slice::Iter; #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum Mode { None, Normal, Optional, Combined, //<use function return> Throws(bool), } impl Default for Mode { fn default() -> Mode { Mode::None } } #[derive(Debug, Default)] pub struct Info { pub mode: Mode, pub params: Vec<analysis::Parameter>, } impl Info { pub fn is_empty(&self) -> bool { self.mode == Mode::None } pub fn iter(&self) -> Iter<'_, analysis::Parameter> { self.params.iter() } } pub fn analyze( env: &Env, func: &Function, func_c_params: &[CParameter], func_ret: &return_value::Info, configured_functions: &[&config::functions::Function], ) -> (Info, bool) { let mut info: Info = Default::default(); let mut unsupported_outs = false;<|fim▁hole|> info.mode = Mode::Throws(use_ret); } else if func.ret.typ == TypeId::tid_none() { info.mode = Mode::Normal; } else if func.ret.typ == TypeId::tid_bool() || func.ret.typ == TypeId::tid_c_bool() { if nullable_override == Some(Nullable(false)) { info.mode = Mode::Combined; } else { info.mode = Mode::Optional; } } else { info.mode = Mode::Combined; } for lib_par in &func.parameters { if lib_par.direction != ParameterDirection::Out { continue; } if can_as_return(env, lib_par) { let mut lib_par = lib_par.clone(); lib_par.name = nameutil::mangle_keywords(&lib_par.name).into_owned(); let configured_parameters = configured_functions.matched_parameters(&lib_par.name); let mut out = analysis::Parameter::from_parameter(env, &lib_par, &configured_parameters); // FIXME: temporary solution for string_type, nullable override. This should completely // work based on the analyzed parameters instead of the library parameters. if let Some(c_par) = func_c_params .iter() .find(|c_par| c_par.name == lib_par.name) { out.lib_par.typ = c_par.typ; out.lib_par.nullable = c_par.nullable; } info.params.push(out); } else { unsupported_outs = true; } } if info.params.is_empty() { info.mode = Mode::None; } if info.mode == Mode::Combined || info.mode == Mode::Throws(true) { let mut ret = analysis::Parameter::from_return_value(env, &func.ret, configured_functions); //TODO: fully switch to use analyzed returns (it add too many Return<Option<>>) if let Some(ref par) = func_ret.parameter { ret.lib_par.typ = par.lib_par.typ; } if let Some(val) = nullable_override { ret.lib_par.nullable = val; } info.params.insert(0, ret); } (info, unsupported_outs) } pub fn analyze_imports<'a>( env: &Env, parameters: impl IntoIterator<Item = &'a library::Parameter>, imports: &mut Imports, ) { for par in parameters { if par.direction == ParameterDirection::Out { analyze_type_imports(env, par.typ, par.caller_allocates, imports); } } } fn analyze_type_imports(env: &Env, typ: TypeId, caller_allocates: bool, imports: &mut Imports) { match env.library.type_(typ) { Type::Alias(alias) => analyze_type_imports(env, alias.typ, caller_allocates, imports), Type::Bitfield(..) | Type::Enumeration(..) => imports.add("std::mem"), Type::Fundamental(fund) if !matches!( fund, Fundamental::Utf8 | Fundamental::OsString | Fundamental::Filename ) => { imports.add("std::mem") } _ if !caller_allocates => match ConversionType::of(env, typ) { ConversionType::Direct | ConversionType::Scalar | ConversionType::Option | ConversionType::Result { .. } => (), _ => imports.add("std::ptr"), }, _ => (), } } pub fn can_as_return(env: &Env, par: &library::Parameter) -> bool { use super::conversion_type::ConversionType::*; match ConversionType::of(env, par.typ) { Direct | Scalar | Option | Result { .. } => true, Pointer => { // Disallow fundamental arrays without length if is_carray_with_direct_elements(env, par.typ) && par.array_length.is_none() { return false; } RustType::builder(env, par.typ) .direction(ParameterDirection::Out) .scope(par.scope) .try_build_param() .is_ok() } Borrow => false, Unknown => false, } } pub fn use_return_value_for_result( env: &Env, ret: &return_value::Info, func_name: &str, configured_functions: &[&config::functions::Function], ) -> bool { let typ = ret .parameter .as_ref() .map(|par| par.lib_par.typ) .unwrap_or_default(); use_function_return_for_result(env, typ, func_name, configured_functions) } pub fn use_function_return_for_result( env: &Env, typ: TypeId, func_name: &str, configured_functions: &[&config::functions::Function], ) -> bool { // Configuration takes precendence over everything. let use_return_for_result = configured_functions .iter() .find_map(|f| f.ret.use_return_for_result.as_ref()); if let Some(use_return_for_result) = use_return_for_result { if typ == Default::default() { error!("Function \"{}\": use_return_for_result set to true, but function has no return value", func_name); return false; } return *use_return_for_result; } if typ == Default::default() { return false; } if typ.ns_id != INTERNAL_NAMESPACE { return true; } let type_ = env.type_(typ); !matches!(&*type_.get_name(), "UInt" | "Boolean" | "Bool") }<|fim▁end|>
let nullable_override = configured_functions.iter().find_map(|f| f.ret.nullable); if func.throws { let use_ret = use_return_value_for_result(env, func_ret, &func.name, configured_functions);
<|file_name|>bitcoin_uk.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="uk" version="2.1"> <context> <name>AboutDialog</name> <message> <source>About UnattainiumV2 Core</source> <translation type="unfinished"/> </message> <message> <source>&lt;b&gt;UnattainiumV2 Core&lt;/b&gt; version</source> <translation type="unfinished"/> </message> <message> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation> Це програмне забезпечення є експериментальним. Поширюється за ліцензією MIT/X11, додаткова інформація міститься у файлі COPYING, а також за адресою http://www.opensource.org/licenses/mit-license.php. Цей продукт включає в себе програмне забезпечення, розроблене в рамках проекту OpenSSL (http://www.openssl.org/), криптографічне програмне забезпечення, написане Еріком Янгом ([email protected]), та функції для роботи з UPnP, написані Томасом Бернардом.</translation> </message> <message> <source>Copyright</source> <translation>Авторське право</translation> </message> <message> <source>The Bitcoin and UnattainiumV2 Core developers</source> <translation type="unfinished"/> </message> <message> <source>(%1-bit)</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressBookPage</name> <message> <source>Double-click to edit address or label</source> <translation>Двічі клікніть на адресу чи назву для їх зміни</translation> </message> <message> <source>Create a new address</source> <translation>Створити нову адресу</translation> </message> <message> <source>&amp;New</source> <translation type="unfinished"/> </message> <message> <source>Copy the currently selected address to the system clipboard</source> <translation>Копіювати виділену адресу в буфер обміну</translation> </message> <message> <source>&amp;Copy</source> <translation type="unfinished"/> </message> <message> <source>C&amp;lose</source> <translation type="unfinished"/> </message> <message> <source>&amp;Copy Address</source> <translation>&amp;Скопіювати адресу</translation> </message> <message> <source>Delete the currently selected address from the list</source> <translation>Вилучити вибрані адреси з переліку</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Експортувати дані з поточної вкладки в файл</translation> </message> <message> <source>&amp;Export</source> <translation>&amp; Експорт</translation> </message> <message> <source>&amp;Delete</source> <translation>&amp;Видалити</translation> </message> <message> <source>Choose the address to send coins to</source> <translation type="unfinished"/> </message> <message> <source>Choose the address to receive coins with</source> <translation type="unfinished"/> </message> <message> <source>C&amp;hoose</source> <translation type="unfinished"/> </message> <message> <source>Sending addresses</source> <translation type="unfinished"/> </message> <message> <source>Receiving addresses</source> <translation type="unfinished"/> </message> <message> <source>These are your UnattainiumV2 addresses for sending payments. Always check the amount and the receiving address before sending coins.</source> <translation>Це ваші UnattainiumV2 адреси для відправки платежів. Перед відправкою монети Завжди перевіряйте суму та адресу прийому.</translation> </message> <message> <source>These are your UnattainiumV2 addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source> <translation type="unfinished"/> </message> <message> <source>Copy &amp;Label</source> <translation>Скопіювати &amp;мітку</translation><|fim▁hole|> </message> <message> <source>&amp;Edit</source> <translation>&amp;Редагувати</translation> </message> <message> <source>Export Address List</source> <translation type="unfinished"/> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Файли відділені комами (*.csv)</translation> </message> <message> <source>Exporting Failed</source> <translation type="unfinished"/> </message> <message> <source>There was an error trying to save the address list to %1.</source> <translation type="unfinished"/> </message> </context> <context> <name>AddressTableModel</name> <message> <source>Label</source> <translation>Назва</translation> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>(no label)</source> <translation>(немає назви)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <source>Passphrase Dialog</source> <translation>Діалог введення паролю</translation> </message> <message> <source>Enter passphrase</source> <translation>Введіть пароль</translation> </message> <message> <source>New passphrase</source> <translation>Новий пароль</translation> </message> <message> <source>Repeat new passphrase</source> <translation>Повторіть пароль</translation> </message> <message> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Введіть новий пароль для гаманця.&lt;br/&gt;Будь ласка, використовуйте паролі що містять &lt;b&gt;як мінімум 10 випадкових символів&lt;/b&gt;, або &lt;b&gt;як мінімум 8 слів&lt;/b&gt;.</translation> </message> <message> <source>Encrypt wallet</source> <translation>Зашифрувати гаманець</translation> </message> <message> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Ця операція потребує пароль для розблокування гаманця.</translation> </message> <message> <source>Unlock wallet</source> <translation>Розблокувати гаманець</translation> </message> <message> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Ця операція потребує пароль для дешифрування гаманця.</translation> </message> <message> <source>Decrypt wallet</source> <translation>Дешифрувати гаманець</translation> </message> <message> <source>Change passphrase</source> <translation>Змінити пароль</translation> </message> <message> <source>Enter the old and new passphrase to the wallet.</source> <translation>Ввести старий та новий паролі для гаманця.</translation> </message> <message> <source>Confirm wallet encryption</source> <translation>Підтвердити шифрування гаманця</translation> </message> <message> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR UNATTAINIUMV2S&lt;/b&gt;!</source> <translation>УВАГА: Якщо ви зашифруєте гаманець і забудете пароль, ви &lt;b&gt;ВТРАТИТЕ ВСІ СВОЇ БІТКОІНИ&lt;/b&gt;!</translation> </message> <message> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Ви дійсно хочете зашифрувати свій гаманець?</translation> </message> <message> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>ВАЖЛИВО: Всі попередні резервні копії, які ви зробили з вашого гаманця файл повинен бути замінений новоствореному, зашифрованому файлі гаманця. З міркувань безпеки, попередні резервні копії в незашифрованому файлі гаманець стане марним, як тільки ви починаєте використовувати нову, зашифрований гаманець.</translation> </message> <message> <source>Warning: The Caps Lock key is on!</source> <translation>Увага: Ввімкнено Caps Lock!</translation> </message> <message> <source>Wallet encrypted</source> <translation>Гаманець зашифровано</translation> </message> <message> <source>UnattainiumV2 will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your unattainiumv2s from being stolen by malware infecting your computer.</source> <translation>Біткоін-клієнт буде закрито для завершення процесу шифрування. Пам&apos;ятайте, що шифрування гаманця не може повністю захистити ваші біткоіни від крадіжки, у випадку якщо ваш комп&apos;ютер буде інфіковано шкідливими програмами.</translation> </message> <message> <source>Wallet encryption failed</source> <translation>Не вдалося зашифрувати гаманець</translation> </message> <message> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>Виникла помилка під час шифрування гаманця. Ваш гаманець не було зашифровано.</translation> </message> <message> <source>The supplied passphrases do not match.</source> <translation>Введені паролі не співпадають.</translation> </message> <message> <source>Wallet unlock failed</source> <translation>Не вдалося розблокувати гаманець</translation> </message> <message> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>Введений пароль є невірним.</translation> </message> <message> <source>Wallet decryption failed</source> <translation>Не вдалося розшифрувати гаманець</translation> </message> <message> <source>Wallet passphrase was successfully changed.</source> <translation>Пароль було успішно змінено.</translation> </message> </context> <context> <name>UnattainiumV2GUI</name> <message> <source>Sign &amp;message...</source> <translation>&amp;Підписати повідомлення...</translation> </message> <message> <source>Synchronizing with network...</source> <translation>Синхронізація з мережею...</translation> </message> <message> <source>&amp;Overview</source> <translation>&amp;Огляд</translation> </message> <message> <source>Node</source> <translation type="unfinished"/> </message> <message> <source>Show general overview of wallet</source> <translation>Показати загальний огляд гаманця</translation> </message> <message> <source>&amp;Transactions</source> <translation>Транзакції</translation> </message> <message> <source>Browse transaction history</source> <translation>Переглянути історію транзакцій</translation> </message> <message> <source>E&amp;xit</source> <translation>&amp;Вихід</translation> </message> <message> <source>Quit application</source> <translation>Вийти</translation> </message> <message> <source>Show information about UnattainiumV2</source> <translation>Показати інформацію про UnattainiumV2</translation> </message> <message> <source>About &amp;Qt</source> <translation>&amp;Про Qt</translation> </message> <message> <source>Show information about Qt</source> <translation>Показати інформацію про Qt</translation> </message> <message> <source>&amp;Options...</source> <translation>&amp;Параметри...</translation> </message> <message> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Шифрування гаманця...</translation> </message> <message> <source>&amp;Backup Wallet...</source> <translation>&amp;Резервне копіювання гаманця...</translation> </message> <message> <source>&amp;Change Passphrase...</source> <translation>Змінити парол&amp;ь...</translation> </message> <message> <source>&amp;Sending addresses...</source> <translation type="unfinished"/> </message> <message> <source>&amp;Receiving addresses...</source> <translation type="unfinished"/> </message> <message> <source>Open &amp;URI...</source> <translation type="unfinished"/> </message> <message> <source>Importing blocks from disk...</source> <translation>Імпорт блоків з диску...</translation> </message> <message> <source>Reindexing blocks on disk...</source> <translation>Переіндексація блоків на диску ...</translation> </message> <message> <source>Send coins to a UnattainiumV2 address</source> <translation>Відправити монети на вказану адресу</translation> </message> <message> <source>Modify configuration options for UnattainiumV2</source> <translation>Редагувати параметри</translation> </message> <message> <source>Backup wallet to another location</source> <translation>Резервне копіювання гаманця в інше місце</translation> </message> <message> <source>Change the passphrase used for wallet encryption</source> <translation>Змінити пароль, який використовується для шифрування гаманця</translation> </message> <message> <source>&amp;Debug window</source> <translation>Вікно зневадження</translation> </message> <message> <source>Open debugging and diagnostic console</source> <translation>Відкрити консоль зневадження і діагностики</translation> </message> <message> <source>&amp;Verify message...</source> <translation>Перевірити повідомлення...</translation> </message> <message> <source>UnattainiumV2</source> <translation>UnattainiumV2</translation> </message> <message> <source>Wallet</source> <translation>Гаманець</translation> </message> <message> <source>&amp;Send</source> <translation>&amp;Відправити</translation> </message> <message> <source>&amp;Receive</source> <translation>&amp;Отримати</translation> </message> <message> <source>&amp;Show / Hide</source> <translation>Показати / Приховати</translation> </message> <message> <source>Show or hide the main Window</source> <translation>Показує або приховує головне вікно</translation> </message> <message> <source>Encrypt the private keys that belong to your wallet</source> <translation>Шифрування закритих ключів, які належать вашому гаманці</translation> </message> <message> <source>Sign messages with your UnattainiumV2 addresses to prove you own them</source> <translation>Підтвердіть, що Ви є власником повідомлення підписавши його Вашою UnattainiumV2-адресою </translation> </message> <message> <source>Verify messages to ensure they were signed with specified UnattainiumV2 addresses</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною UnattainiumV2-адресою</translation> </message> <message> <source>&amp;File</source> <translation>&amp;Файл</translation> </message> <message> <source>&amp;Settings</source> <translation>&amp;Налаштування</translation> </message> <message> <source>&amp;Help</source> <translation>&amp;Довідка</translation> </message> <message> <source>Tabs toolbar</source> <translation>Панель вкладок</translation> </message> <message> <source>[testnet]</source> <translation>[тестова мережа]</translation> </message> <message> <source>UnattainiumV2 Core</source> <translation>UnattainiumV2 Ядро</translation> </message> <message> <source>Request payments (generates QR codes and unattainiumv2: URIs)</source> <translation type="unfinished"/> </message> <message> <source>&amp;About UnattainiumV2 Core</source> <translation type="unfinished"/> </message> <message> <source>Show the list of used sending addresses and labels</source> <translation type="unfinished"/> </message> <message> <source>Show the list of used receiving addresses and labels</source> <translation type="unfinished"/> </message> <message> <source>Open a unattainiumv2: URI or payment request</source> <translation type="unfinished"/> </message> <message> <source>&amp;Command-line options</source> <translation type="unfinished"/> </message> <message> <source>Show the UnattainiumV2 Core help message to get a list with possible UnattainiumV2 command-line options</source> <translation type="unfinished"/> </message> <message> <source>UnattainiumV2 client</source> <translation>UnattainiumV2-клієнт</translation> </message> <message numerus="yes"> <source>%n active connection(s) to UnattainiumV2 network</source> <translation><numerusform>%n активне з&apos;єднання з мережею</numerusform><numerusform>%n активні з&apos;єднання з мережею</numerusform><numerusform>%n активних з&apos;єднань з мережею</numerusform></translation> </message> <message> <source>No block source available...</source> <translation>Ні блок джерела доступні ...</translation> </message> <message> <source>Processed %1 of %2 (estimated) blocks of transaction history.</source> <translation type="unfinished"/> </message> <message> <source>Processed %1 blocks of transaction history.</source> <translation>Оброблено %1 блоків історії транзакцій.</translation> </message> <message numerus="yes"> <source>%n hour(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <source>%n day(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message numerus="yes"> <source>%n week(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <source>%1 and %2</source> <translation type="unfinished"/> </message> <message numerus="yes"> <source>%n year(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <source>%1 behind</source> <translation type="unfinished"/> </message> <message> <source>Last received block was generated %1 ago.</source> <translation type="unfinished"/> </message> <message> <source>Transactions after this will not yet be visible.</source> <translation>Угоди після цього буде ще не буде видно.</translation> </message> <message> <source>Error</source> <translation>Помилка</translation> </message> <message> <source>Warning</source> <translation>Увага</translation> </message> <message> <source>Information</source> <translation>Інформація</translation> </message> <message> <source>Up to date</source> <translation>Синхронізовано</translation> </message> <message> <source>Catching up...</source> <translation>Синхронізується...</translation> </message> <message> <source>Sent transaction</source> <translation>Надіслані транзакції</translation> </message> <message> <source>Incoming transaction</source> <translation>Отримані перекази</translation> </message> <message> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Дата: %1 Кількість: %2 Тип: %3 Адреса: %4 </translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>&lt;b&gt;Зашифрований&lt;/b&gt; гаманець &lt;b&gt;розблоковано&lt;/b&gt;</translation> </message> <message> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>&lt;b&gt;Зашифрований&lt;/b&gt; гаманець &lt;b&gt;заблоковано&lt;/b&gt;</translation> </message> <message> <source>A fatal error occurred. UnattainiumV2 can no longer continue safely and will quit.</source> <translation>Сталася фатальна помилка. UnattainiumV2 більше не може продовжувати безпечно і піде.</translation> </message> </context> <context> <name>ClientModel</name> <message> <source>Network Alert</source> <translation>Сповіщення мережі</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <source>Coin Control Address Selection</source> <translation type="unfinished"/> </message> <message> <source>Quantity:</source> <translation type="unfinished"/> </message> <message> <source>Bytes:</source> <translation type="unfinished"/> </message> <message> <source>Amount:</source> <translation>Кількість:</translation> </message> <message> <source>Priority:</source> <translation type="unfinished"/> </message> <message> <source>Fee:</source> <translation type="unfinished"/> </message> <message> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <source>Change:</source> <translation type="unfinished"/> </message> <message> <source>(un)select all</source> <translation type="unfinished"/> </message> <message> <source>Tree mode</source> <translation type="unfinished"/> </message> <message> <source>List mode</source> <translation type="unfinished"/> </message> <message> <source>Amount</source> <translation>Кількість</translation> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Confirmations</source> <translation type="unfinished"/> </message> <message> <source>Confirmed</source> <translation>Підтверджені</translation> </message> <message> <source>Priority</source> <translation type="unfinished"/> </message> <message> <source>Copy address</source> <translation>Скопіювати адресу</translation> </message> <message> <source>Copy label</source> <translation>Скопіювати мітку</translation> </message> <message> <source>Copy amount</source> <translation>Копіювати кількість</translation> </message> <message> <source>Copy transaction ID</source> <translation>Копіювати ID транзакції </translation> </message> <message> <source>Lock unspent</source> <translation type="unfinished"/> </message> <message> <source>Unlock unspent</source> <translation type="unfinished"/> </message> <message> <source>Copy quantity</source> <translation type="unfinished"/> </message> <message> <source>Copy fee</source> <translation type="unfinished"/> </message> <message> <source>Copy after fee</source> <translation type="unfinished"/> </message> <message> <source>Copy bytes</source> <translation type="unfinished"/> </message> <message> <source>Copy priority</source> <translation type="unfinished"/> </message> <message> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <source>Copy change</source> <translation type="unfinished"/> </message> <message> <source>highest</source> <translation type="unfinished"/> </message> <message> <source>higher</source> <translation type="unfinished"/> </message> <message> <source>high</source> <translation type="unfinished"/> </message> <message> <source>medium-high</source> <translation type="unfinished"/> </message> <message> <source>medium</source> <translation type="unfinished"/> </message> <message> <source>low-medium</source> <translation type="unfinished"/> </message> <message> <source>low</source> <translation type="unfinished"/> </message> <message> <source>lower</source> <translation type="unfinished"/> </message> <message> <source>lowest</source> <translation type="unfinished"/> </message> <message> <source>(%1 locked)</source> <translation type="unfinished"/> </message> <message> <source>none</source> <translation type="unfinished"/> </message> <message> <source>Dust</source> <translation type="unfinished"/> </message> <message> <source>yes</source> <translation type="unfinished"/> </message> <message> <source>no</source> <translation type="unfinished"/> </message> <message> <source>This label turns red, if the transaction size is greater than 1000 bytes.</source> <translation type="unfinished"/> </message> <message> <source>This means a fee of at least %1 per kB is required.</source> <translation type="unfinished"/> </message> <message> <source>Can vary +/- 1 byte per input.</source> <translation type="unfinished"/> </message> <message> <source>Transactions with higher priority are more likely to get included into a block.</source> <translation type="unfinished"/> </message> <message> <source>This label turns red, if the priority is smaller than &quot;medium&quot;.</source> <translation type="unfinished"/> </message> <message> <source>This label turns red, if any recipient receives an amount smaller than %1.</source> <translation type="unfinished"/> </message> <message> <source>This means a fee of at least %1 is required.</source> <translation type="unfinished"/> </message> <message> <source>Amounts below 0.546 times the minimum relay fee are shown as dust.</source> <translation type="unfinished"/> </message> <message> <source>This label turns red, if the change is smaller than %1.</source> <translation type="unfinished"/> </message> <message> <source>(no label)</source> <translation>(немає назви)</translation> </message> <message> <source>change from %1 (%2)</source> <translation type="unfinished"/> </message> <message> <source>(change)</source> <translation type="unfinished"/> </message> </context> <context> <name>EditAddressDialog</name> <message> <source>Edit Address</source> <translation>Редагувати адресу</translation> </message> <message> <source>&amp;Label</source> <translation>&amp;Мітка</translation> </message> <message> <source>The label associated with this address list entry</source> <translation type="unfinished"/> </message> <message> <source>The address associated with this address list entry. This can only be modified for sending addresses.</source> <translation type="unfinished"/> </message> <message> <source>&amp;Address</source> <translation>&amp;Адреса</translation> </message> <message> <source>New receiving address</source> <translation>Нова адреса для отримання</translation> </message> <message> <source>New sending address</source> <translation>Нова адреса для відправлення</translation> </message> <message> <source>Edit receiving address</source> <translation>Редагувати адресу для отримання</translation> </message> <message> <source>Edit sending address</source> <translation>Редагувати адресу для відправлення</translation> </message> <message> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>Введена адреса «%1» вже присутня в адресній книзі.</translation> </message> <message> <source>The entered address &quot;%1&quot; is not a valid UnattainiumV2 address.</source> <translation>Введена адреса «%1» не є коректною адресою в мережі UnattainiumV2.</translation> </message> <message> <source>Could not unlock wallet.</source> <translation>Неможливо розблокувати гаманець.</translation> </message> <message> <source>New key generation failed.</source> <translation>Не вдалося згенерувати нові ключі.</translation> </message> </context> <context> <name>FreespaceChecker</name> <message> <source>A new data directory will be created.</source> <translation type="unfinished"/> </message> <message> <source>name</source> <translation>назва</translation> </message> <message> <source>Directory already exists. Add %1 if you intend to create a new directory here.</source> <translation type="unfinished"/> </message> <message> <source>Path already exists, and is not a directory.</source> <translation type="unfinished"/> </message> <message> <source>Cannot create data directory here.</source> <translation type="unfinished"/> </message> </context> <context> <name>HelpMessageDialog</name> <message> <source>UnattainiumV2 Core - Command-line options</source> <translation type="unfinished"/> </message> <message> <source>UnattainiumV2 Core</source> <translation>UnattainiumV2 Ядро</translation> </message> <message> <source>version</source> <translation>версія</translation> </message> <message> <source>Usage:</source> <translation>Використання:</translation> </message> <message> <source>command-line options</source> <translation>параметри командного рядка</translation> </message> <message> <source>UI options</source> <translation>Параметри інтерфейсу</translation> </message> <message> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Встановлення мови, наприклад &quot;de_DE&quot; (типово: системна)</translation> </message> <message> <source>Start minimized</source> <translation>Запускати згорнутим</translation> </message> <message> <source>Set SSL root certificates for payment request (default: -system-)</source> <translation type="unfinished"/> </message> <message> <source>Show splash screen on startup (default: 1)</source> <translation>Показувати заставку під час запуску (типово: 1)</translation> </message> <message> <source>Choose data directory on startup (default: 0)</source> <translation type="unfinished"/> </message> </context> <context> <name>Intro</name> <message> <source>Welcome</source> <translation>Вітання</translation> </message> <message> <source>Welcome to UnattainiumV2 Core.</source> <translation type="unfinished"/> </message> <message> <source>As this is the first time the program is launched, you can choose where UnattainiumV2 Core will store its data.</source> <translation type="unfinished"/> </message> <message> <source>UnattainiumV2 Core will download and store a copy of the UnattainiumV2 block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source> <translation type="unfinished"/> </message> <message> <source>Use the default data directory</source> <translation type="unfinished"/> </message> <message> <source>Use a custom data directory:</source> <translation type="unfinished"/> </message> <message> <source>UnattainiumV2</source> <translation>UnattainiumV2</translation> </message> <message> <source>Error: Specified data directory &quot;%1&quot; can not be created.</source> <translation type="unfinished"/> </message> <message> <source>Error</source> <translation>Помилка</translation> </message> <message> <source>GB of free space available</source> <translation>ГБ вільного простору доступно</translation> </message> <message> <source>(of %1GB needed)</source> <translation type="unfinished"/> </message> </context> <context> <name>OpenURIDialog</name> <message> <source>Open URI</source> <translation type="unfinished"/> </message> <message> <source>Open payment request from URI or file</source> <translation type="unfinished"/> </message> <message> <source>URI:</source> <translation type="unfinished"/> </message> <message> <source>Select payment request file</source> <translation type="unfinished"/> </message> <message> <source>Select payment request file to open</source> <translation type="unfinished"/> </message> </context> <context> <name>OptionsDialog</name> <message> <source>Options</source> <translation>Параметри</translation> </message> <message> <source>&amp;Main</source> <translation>&amp;Головні</translation> </message> <message> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source> <translation>Оплата додаткових транзакцій за Кб, що допомагає переконатися, що ваші транзакції обробляються швидко. Велика частина операцій проводиться 1 Кб.</translation> </message> <message> <source>Pay transaction &amp;fee</source> <translation>Заплатити комісі&amp;ю</translation> </message> <message> <source>Automatically start UnattainiumV2 after logging in to the system.</source> <translation>Автоматично запускати гаманець при вході до системи.</translation> </message> <message> <source>&amp;Start UnattainiumV2 on system login</source> <translation>&amp;Запускати гаманець при вході в систему</translation> </message> <message> <source>Size of &amp;database cache</source> <translation type="unfinished"/> </message> <message> <source>MB</source> <translation type="unfinished"/> </message> <message> <source>Number of script &amp;verification threads</source> <translation type="unfinished"/> </message> <message> <source>Connect to the UnattainiumV2 network through a SOCKS proxy.</source> <translation type="unfinished"/> </message> <message> <source>&amp;Connect through SOCKS proxy (default proxy):</source> <translation type="unfinished"/> </message> <message> <source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source> <translation type="unfinished"/> </message> <message> <source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source> <translation type="unfinished"/> </message> <message> <source>Third party transaction URLs</source> <translation type="unfinished"/> </message> <message> <source>Active command-line options that override above options:</source> <translation type="unfinished"/> </message> <message> <source>Reset all client options to default.</source> <translation>Скинути всі параметри клієнта на типові.</translation> </message> <message> <source>&amp;Reset Options</source> <translation>Скинути параметри</translation> </message> <message> <source>&amp;Network</source> <translation>&amp;Мережа</translation> </message> <message> <source>(0 = auto, &lt;0 = leave that many cores free)</source> <translation type="unfinished"/> </message> <message> <source>W&amp;allet</source> <translation type="unfinished"/> </message> <message> <source>Expert</source> <translation type="unfinished"/> </message> <message> <source>Enable coin &amp;control features</source> <translation type="unfinished"/> </message> <message> <source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source> <translation type="unfinished"/> </message> <message> <source>&amp;Spend unconfirmed change</source> <translation type="unfinished"/> </message> <message> <source>Automatically open the UnattainiumV2 client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Автоматично відкривати порт для клієнту біткоін на роутері. Працює лише якщо ваш роутер підтримує UPnP і ця функція увімкнена.</translation> </message> <message> <source>Map port using &amp;UPnP</source> <translation>Відображення порту через &amp;UPnP</translation> </message> <message> <source>Proxy &amp;IP:</source> <translation>&amp;IP проксі:</translation> </message> <message> <source>&amp;Port:</source> <translation>&amp;Порт:</translation> </message> <message> <source>Port of the proxy (e.g. 9050)</source> <translation>Порт проксі-сервера (наприклад 9050)</translation> </message> <message> <source>SOCKS &amp;Version:</source> <translation>SOCKS версії:</translation> </message> <message> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Версія SOCKS-проксі (наприклад 5)</translation> </message> <message> <source>&amp;Window</source> <translation>&amp;Вікно</translation> </message> <message> <source>Show only a tray icon after minimizing the window.</source> <translation>Показувати лише іконку в треї після згортання вікна.</translation> </message> <message> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>Мінімізувати &amp;у трей</translation> </message> <message> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Згортати замість закриття. Якщо ця опція включена, програма закриється лише після вибору відповідного пункту в меню.</translation> </message> <message> <source>M&amp;inimize on close</source> <translation>Згортати замість закритт&amp;я</translation> </message> <message> <source>&amp;Display</source> <translation>&amp;Відображення</translation> </message> <message> <source>User Interface &amp;language:</source> <translation>Мова інтерфейсу користувача:</translation> </message> <message> <source>The user interface language can be set here. This setting will take effect after restarting UnattainiumV2.</source> <translation>Встановлює мову інтерфейсу. Зміни набудуть чинності після перезапуску UnattainiumV2.</translation> </message> <message> <source>&amp;Unit to show amounts in:</source> <translation>В&amp;имірювати монети в:</translation> </message> <message> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Виберіть одиницю вимірювання монет, яка буде відображатись в гаманці та при відправленні.</translation> </message> <message> <source>Whether to show UnattainiumV2 addresses in the transaction list or not.</source> <translation>Незалежно від того, щоб показати UnattainiumV2 адреси в списку транзакцій чи ні.</translation> </message> <message> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Відображати адресу в списку транзакцій</translation> </message> <message> <source>Whether to show coin control features or not.</source> <translation type="unfinished"/> </message> <message> <source>&amp;OK</source> <translation>&amp;Гаразд</translation> </message> <message> <source>&amp;Cancel</source> <translation>&amp;Скасувати</translation> </message> <message> <source>default</source> <translation>типово</translation> </message> <message> <source>none</source> <translation type="unfinished"/> </message> <message> <source>Confirm options reset</source> <translation>Підтвердження скидання параметрів</translation> </message> <message> <source>Client restart required to activate changes.</source> <translation type="unfinished"/> </message> <message> <source>Client will be shutdown, do you want to proceed?</source> <translation type="unfinished"/> </message> <message> <source>This change would require a client restart.</source> <translation type="unfinished"/> </message> <message> <source>The supplied proxy address is invalid.</source> <translation>Невірно вказано адресу проксі.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <source>Form</source> <translation>Форма</translation> </message> <message> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the UnattainiumV2 network after a connection is established, but this process has not completed yet.</source> <translation>Показана інформація вже може бути застарілою. Ваш гаманець буде автоматично синхронізовано з мережею UnattainiumV2 після встановлення підключення, але цей процес ще не завершено.</translation> </message> <message> <source>Wallet</source> <translation>Гаманець</translation> </message> <message> <source>Available:</source> <translation type="unfinished"/> </message> <message> <source>Your current spendable balance</source> <translation>Ваш поточний баланс расходуемого</translation> </message> <message> <source>Pending:</source> <translation type="unfinished"/> </message> <message> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source> <translation>Всього угод, які ще мають бути підтверджені, і до цих пір не враховуються в расходуемого балансу</translation> </message> <message> <source>Immature:</source> <translation>незрілі:</translation> </message> <message> <source>Mined balance that has not yet matured</source> <translation>Замінований баланс, який ще не дозрів</translation> </message> <message> <source>Total:</source> <translation>всього:</translation> </message> <message> <source>Your current total balance</source> <translation>Ваше поточне Сукупний баланс</translation> </message> <message> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Недавні транзакції&lt;/b&gt;</translation> </message> <message> <source>out of sync</source> <translation>не синхронізовано</translation> </message> </context> <context> <name>PaymentServer</name> <message> <source>URI handling</source> <translation>Обробка URI</translation> </message> <message> <source>URI can not be parsed! This can be caused by an invalid UnattainiumV2 address or malformed URI parameters.</source> <translation>Неможливо обробити URI! Це може бути викликано неправильною UnattainiumV2-адресою, чи невірними параметрами URI.</translation> </message> <message> <source>Requested payment amount of %1 is too small (considered dust).</source> <translation type="unfinished"/> </message> <message> <source>Payment request error</source> <translation type="unfinished"/> </message> <message> <source>Cannot start unattainiumv2: click-to-pay handler</source> <translation type="unfinished"/> </message> <message> <source>Net manager warning</source> <translation type="unfinished"/> </message> <message> <source>Your active proxy doesn&apos;t support SOCKS5, which is required for payment requests via proxy.</source> <translation type="unfinished"/> </message> <message> <source>Payment request fetch URL is invalid: %1</source> <translation type="unfinished"/> </message> <message> <source>Payment request file handling</source> <translation type="unfinished"/> </message> <message> <source>Payment request file can not be read or processed! This can be caused by an invalid payment request file.</source> <translation type="unfinished"/> </message> <message> <source>Unverified payment requests to custom payment scripts are unsupported.</source> <translation type="unfinished"/> </message> <message> <source>Refund from %1</source> <translation type="unfinished"/> </message> <message> <source>Error communicating with %1: %2</source> <translation type="unfinished"/> </message> <message> <source>Payment request can not be parsed or processed!</source> <translation type="unfinished"/> </message> <message> <source>Bad response from server %1</source> <translation type="unfinished"/> </message> <message> <source>Payment acknowledged</source> <translation type="unfinished"/> </message> <message> <source>Network request error</source> <translation type="unfinished"/> </message> </context> <context> <name>QObject</name> <message> <source>UnattainiumV2</source> <translation>UnattainiumV2</translation> </message> <message> <source>Error: Specified data directory &quot;%1&quot; does not exist.</source> <translation type="unfinished"/> </message> <message> <source>Error: Cannot parse configuration file: %1. Only use key=value syntax.</source> <translation type="unfinished"/> </message> <message> <source>Error: Invalid combination of -regtest and -testnet.</source> <translation type="unfinished"/> </message> <message> <source>UnattainiumV2 Core didn&apos;t yet exit safely...</source> <translation type="unfinished"/> </message> <message> <source>Enter a UnattainiumV2 address (e.g. DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу UnattainiumV2 (наприклад DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> </context> <context> <name>QRImageWidget</name> <message> <source>&amp;Save Image...</source> <translation>&amp;Зберегти зображення...</translation> </message> <message> <source>&amp;Copy Image</source> <translation>&amp;Копіювати зображення</translation> </message> <message> <source>Save QR Code</source> <translation>Зберегти QR-код</translation> </message> <message> <source>PNG Image (*.png)</source> <translation type="unfinished"/> </message> </context> <context> <name>RPCConsole</name> <message> <source>Client name</source> <translation>Назва клієнту</translation> </message> <message> <source>N/A</source> <translation>Н/Д</translation> </message> <message> <source>Client version</source> <translation>Версія клієнту</translation> </message> <message> <source>&amp;Information</source> <translation>&amp;Інформація</translation> </message> <message> <source>Debug window</source> <translation type="unfinished"/> </message> <message> <source>General</source> <translation type="unfinished"/> </message> <message> <source>Using OpenSSL version</source> <translation>Використовується OpenSSL версії</translation> </message> <message> <source>Startup time</source> <translation>Час запуску</translation> </message> <message> <source>Network</source> <translation>Мережа</translation> </message> <message> <source>Name</source> <translation>Ім’я</translation> </message> <message> <source>Number of connections</source> <translation>Кількість підключень</translation> </message> <message> <source>Block chain</source> <translation>Ланцюг блоків</translation> </message> <message> <source>Current number of blocks</source> <translation>Поточне число блоків</translation> </message> <message> <source>Estimated total blocks</source> <translation>Розрахункове число блоків</translation> </message> <message> <source>Last block time</source> <translation>Час останнього блоку</translation> </message> <message> <source>&amp;Open</source> <translation>Відкрити</translation> </message> <message> <source>&amp;Console</source> <translation>Консоль</translation> </message> <message> <source>&amp;Network Traffic</source> <translation type="unfinished"/> </message> <message> <source>&amp;Clear</source> <translation type="unfinished"/> </message> <message> <source>Totals</source> <translation type="unfinished"/> </message> <message> <source>In:</source> <translation type="unfinished"/> </message> <message> <source>Out:</source> <translation type="unfinished"/> </message> <message> <source>Build date</source> <translation>Дата збирання</translation> </message> <message> <source>Debug log file</source> <translation>Файл звіту зневадження</translation> </message> <message> <source>Open the UnattainiumV2 debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Відкрийте налагодження файл журналу UnattainiumV2 з поточного каталогу даних. Це може зайняти кілька секунд для великих файлів журналів.</translation> </message> <message> <source>Clear console</source> <translation>Очистити консоль</translation> </message> <message> <source>Welcome to the UnattainiumV2 RPC console.</source> <translation>Вітаємо у консолі UnattainiumV2 RPC.</translation> </message> <message> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Використовуйте стрілки вгору вниз для навігації по історії, і &lt;b&gt;Ctrl-L&lt;/b&gt; для очищення екрана.</translation> </message> <message> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Наберіть &lt;b&gt;help&lt;/b&gt; для перегляду доступних команд.</translation> </message> <message> <source>%1 B</source> <translation type="unfinished"/> </message> <message> <source>%1 KB</source> <translation type="unfinished"/> </message> <message> <source>%1 MB</source> <translation type="unfinished"/> </message> <message> <source>%1 GB</source> <translation type="unfinished"/> </message> <message> <source>%1 m</source> <translation type="unfinished"/> </message> <message> <source>%1 h</source> <translation type="unfinished"/> </message> <message> <source>%1 h %2 m</source> <translation type="unfinished"/> </message> </context> <context> <name>ReceiveCoinsDialog</name> <message> <source>&amp;Amount:</source> <translation>&amp;Кількість:</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Мітка:</translation> </message> <message> <source>&amp;Message:</source> <translation>&amp;Повідомлення:</translation> </message> <message> <source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source> <translation type="unfinished"/> </message> <message> <source>R&amp;euse an existing receiving address (not recommended)</source> <translation type="unfinished"/> </message> <message> <source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the UnattainiumV2 network.</source> <translation type="unfinished"/> </message> <message> <source>An optional label to associate with the new receiving address.</source> <translation type="unfinished"/> </message> <message> <source>Use this form to request payments. All fields are &lt;b&gt;optional&lt;/b&gt;.</source> <translation type="unfinished"/> </message> <message> <source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source> <translation type="unfinished"/> </message> <message> <source>Clear all fields of the form.</source> <translation>Очистити всі поля в формі</translation> </message> <message> <source>Clear</source> <translation>Очистити</translation> </message> <message> <source>Requested payments history</source> <translation type="unfinished"/> </message> <message> <source>&amp;Request payment</source> <translation type="unfinished"/> </message> <message> <source>Show the selected request (does the same as double clicking an entry)</source> <translation type="unfinished"/> </message> <message> <source>Show</source> <translation type="unfinished"/> </message> <message> <source>Remove the selected entries from the list</source> <translation type="unfinished"/> </message> <message> <source>Remove</source> <translation type="unfinished"/> </message> <message> <source>Copy label</source> <translation>Скопіювати мітку</translation> </message> <message> <source>Copy message</source> <translation type="unfinished"/> </message> <message> <source>Copy amount</source> <translation>Копіювати кількість</translation> </message> </context> <context> <name>ReceiveRequestDialog</name> <message> <source>QR Code</source> <translation>QR-Код</translation> </message> <message> <source>Copy &amp;URI</source> <translation type="unfinished"/> </message> <message> <source>Copy &amp;Address</source> <translation type="unfinished"/> </message> <message> <source>&amp;Save Image...</source> <translation>&amp;Зберегти зображення...</translation> </message> <message> <source>Request payment to %1</source> <translation type="unfinished"/> </message> <message> <source>Payment information</source> <translation type="unfinished"/> </message> <message> <source>URI</source> <translation type="unfinished"/> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>Amount</source> <translation>Кількість</translation> </message> <message> <source>Label</source> <translation>Назва</translation> </message> <message> <source>Message</source> <translation>Повідомлення</translation> </message> <message> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>Кінцевий URI занадто довгий, спробуйте зменшити текст для мітки / повідомлення.</translation> </message> <message> <source>Error encoding URI into QR Code.</source> <translation>Помилка при кодуванні URI в QR-код.</translation> </message> </context> <context> <name>RecentRequestsTableModel</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Label</source> <translation>Назва</translation> </message> <message> <source>Message</source> <translation>Повідомлення</translation> </message> <message> <source>Amount</source> <translation>Кількість</translation> </message> <message> <source>(no label)</source> <translation>(немає назви)</translation> </message> <message> <source>(no message)</source> <translation type="unfinished"/> </message> <message> <source>(no amount)</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsDialog</name> <message> <source>Send Coins</source> <translation>Відправити</translation> </message> <message> <source>Coin Control Features</source> <translation type="unfinished"/> </message> <message> <source>Inputs...</source> <translation type="unfinished"/> </message> <message> <source>automatically selected</source> <translation type="unfinished"/> </message> <message> <source>Insufficient funds!</source> <translation type="unfinished"/> </message> <message> <source>Quantity:</source> <translation type="unfinished"/> </message> <message> <source>Bytes:</source> <translation type="unfinished"/> </message> <message> <source>Amount:</source> <translation>Кількість:</translation> </message> <message> <source>Priority:</source> <translation type="unfinished"/> </message> <message> <source>Fee:</source> <translation type="unfinished"/> </message> <message> <source>Low Output:</source> <translation type="unfinished"/> </message> <message> <source>After Fee:</source> <translation type="unfinished"/> </message> <message> <source>Change:</source> <translation type="unfinished"/> </message> <message> <source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source> <translation type="unfinished"/> </message> <message> <source>Custom change address</source> <translation type="unfinished"/> </message> <message> <source>Send to multiple recipients at once</source> <translation>Відправити на декілька адрес</translation> </message> <message> <source>Add &amp;Recipient</source> <translation>Дод&amp;ати одержувача</translation> </message> <message> <source>Clear all fields of the form.</source> <translation>Очистити всі поля в формі</translation> </message> <message> <source>Clear &amp;All</source> <translation>Очистити &amp;все</translation> </message> <message> <source>Balance:</source> <translation>Баланс:</translation> </message> <message> <source>Confirm the send action</source> <translation>Підтвердити відправлення</translation> </message> <message> <source>S&amp;end</source> <translation>&amp;Відправити</translation> </message> <message> <source>Confirm send coins</source> <translation>Підтвердіть відправлення</translation> </message> <message> <source>%1 to %2</source> <translation type="unfinished"/> </message> <message> <source>Copy quantity</source> <translation type="unfinished"/> </message> <message> <source>Copy amount</source> <translation>Копіювати кількість</translation> </message> <message> <source>Copy fee</source> <translation type="unfinished"/> </message> <message> <source>Copy after fee</source> <translation type="unfinished"/> </message> <message> <source>Copy bytes</source> <translation type="unfinished"/> </message> <message> <source>Copy priority</source> <translation type="unfinished"/> </message> <message> <source>Copy low output</source> <translation type="unfinished"/> </message> <message> <source>Copy change</source> <translation type="unfinished"/> </message> <message> <source>Total Amount %1 (= %2)</source> <translation type="unfinished"/> </message> <message> <source>or</source> <translation type="unfinished"/> </message> <message> <source>The recipient address is not valid, please recheck.</source> <translation>Адреса отримувача невірна, будь ласка перепровірте.</translation> </message> <message> <source>The amount to pay must be larger than 0.</source> <translation>Кількість монет для відправлення повинна бути більшою 0.</translation> </message> <message> <source>The amount exceeds your balance.</source> <translation>Кількість монет для відправлення перевищує ваш баланс.</translation> </message> <message> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>Сума перевищить ваш баланс, якщо комісія %1 буде додана до вашої транзакції.</translation> </message> <message> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>Знайдено адресу що дублюється. Відправлення на кожну адресу дозволяється лише один раз на кожну операцію переказу.</translation> </message> <message> <source>Transaction creation failed!</source> <translation type="unfinished"/> </message> <message> <source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation type="unfinished"/> </message> <message> <source>Warning: Invalid UnattainiumV2 address</source> <translation type="unfinished"/> </message> <message> <source>(no label)</source> <translation>(немає назви)</translation> </message> <message> <source>Warning: Unknown change address</source> <translation type="unfinished"/> </message> <message> <source>Are you sure you want to send?</source> <translation>Ви впевнені, що хочете відправити?</translation> </message> <message> <source>added as transaction fee</source> <translation type="unfinished"/> </message> <message> <source>Payment request expired</source> <translation type="unfinished"/> </message> <message> <source>Invalid payment address %1</source> <translation type="unfinished"/> </message> </context> <context> <name>SendCoinsEntry</name> <message> <source>A&amp;mount:</source> <translation>&amp;Кількість:</translation> </message> <message> <source>Pay &amp;To:</source> <translation>&amp;Отримувач:</translation> </message> <message> <source>The address to send the payment to (e.g. DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Звернення до відправити платіж на (наприклад DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <source>Enter a label for this address to add it to your address book</source> <translation>Введіть мітку для цієї адреси для додавання її в адресну книгу</translation> </message> <message> <source>&amp;Label:</source> <translation>&amp;Мітка:</translation> </message> <message> <source>Choose previously used address</source> <translation>Обрати ранiш використовувану адресу</translation> </message> <message> <source>This is a normal payment.</source> <translation type="unfinished"/> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Вставити адресу</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Remove this entry</source> <translation type="unfinished"/> </message> <message> <source>Message:</source> <translation>Повідомлення:</translation> </message> <message> <source>This is a verified payment request.</source> <translation type="unfinished"/> </message> <message> <source>Enter a label for this address to add it to the list of used addresses</source> <translation type="unfinished"/> </message> <message> <source>A message that was attached to the unattainiumv2: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the UnattainiumV2 network.</source> <translation type="unfinished"/> </message> <message> <source>This is an unverified payment request.</source> <translation type="unfinished"/> </message> <message> <source>Pay To:</source> <translation type="unfinished"/> </message> <message> <source>Memo:</source> <translation>Нотатка:</translation> </message> </context> <context> <name>ShutdownWindow</name> <message> <source>UnattainiumV2 Core is shutting down...</source> <translation type="unfinished"/> </message> <message> <source>Do not shut down the computer until this window disappears.</source> <translation type="unfinished"/> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <source>Signatures - Sign / Verify a Message</source> <translation>Підписи - Підпис / Перевірка повідомлення</translation> </message> <message> <source>&amp;Sign Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Ви можете зареєструватися повідомленнями зі своїми адресами, щоб довести, що ви є їх власником. Будьте обережні, щоб не підписувати що-небудь неясне, як фішинг-атак може спробувати обдурити вас в підписанні вашу особистість до них. Тільки підписати повністю докладні свідчення, користувач зобов&apos;язується.</translation> </message> <message> <source>The address to sign the message with (e.g. DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу UnattainiumV2 (наприклад DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <source>Choose previously used address</source> <translation>Обрати ранiш використовувану адресу</translation> </message> <message> <source>Alt+A</source> <translation>Alt+A</translation> </message> <message> <source>Paste address from clipboard</source> <translation>Вставити адресу</translation> </message> <message> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <source>Enter the message you want to sign here</source> <translation>Введіть повідомлення, яке ви хочете підписати тут</translation> </message> <message> <source>Signature</source> <translation>Підпис</translation> </message> <message> <source>Copy the current signature to the system clipboard</source> <translation>Копіювати поточну сигнатуру до системного буферу обміну</translation> </message> <message> <source>Sign the message to prove you own this UnattainiumV2 address</source> <translation>Підпишіть повідомлення щоб довести, що ви є власником цієї адреси</translation> </message> <message> <source>Sign &amp;Message</source> <translation>&amp;Підписати повідомлення</translation> </message> <message> <source>Reset all sign message fields</source> <translation>Скинути всі поля підпису повідомлення</translation> </message> <message> <source>Clear &amp;All</source> <translation>Очистити &amp;все</translation> </message> <message> <source>&amp;Verify Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Введіть адресу підписання, повідомлення (забезпечення копіюванні розриви рядків, прогалини, вкладки і т.д. точно) і підпис нижче, щоб перевірити повідомлення. Будьте обережні, щоб не читати далі в підпис, ніж те, що в підписаному самого повідомлення, щоб уникнути обдурять нападу чоловік-в-середній.</translation> </message> <message> <source>The address the message was signed with (e.g. DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу UnattainiumV2 (наприклад DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <source>Verify the message to ensure it was signed with the specified UnattainiumV2 address</source> <translation>Перевірте повідомлення для впевненості, що воно підписано вказаною UnattainiumV2-адресою</translation> </message> <message> <source>Verify &amp;Message</source> <translation>Перевірити повідомлення</translation> </message> <message> <source>Reset all verify message fields</source> <translation>Скинути всі поля перевірки повідомлення</translation> </message> <message> <source>Enter a UnattainiumV2 address (e.g. DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</source> <translation>Введіть адресу UnattainiumV2 (наприклад DNS17iag9jJgTHD1VXjvLCEnZuQ3rJDE9L)</translation> </message> <message> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Натисніть кнопку «Підписати повідомлення», для отримання підпису</translation> </message> <message> <source>The entered address is invalid.</source> <translation>Введена нечинна адреса.</translation> </message> <message> <source>Please check the address and try again.</source> <translation>Будь ласка, перевірте адресу та спробуйте ще.</translation> </message> <message> <source>The entered address does not refer to a key.</source> <translation>Введений адреса не відноситься до ключа.</translation> </message> <message> <source>Wallet unlock was cancelled.</source> <translation>Розблокування Гаманець був скасований.</translation> </message> <message> <source>Private key for the entered address is not available.</source> <translation>Приватний ключ для введеної адреси недоступний. </translation> </message> <message> <source>Message signing failed.</source> <translation>Не вдалося підписати повідомлення.</translation> </message> <message> <source>Message signed.</source> <translation>Повідомлення підписано.</translation> </message> <message> <source>The signature could not be decoded.</source> <translation>Підпис не можливо декодувати.</translation> </message> <message> <source>Please check the signature and try again.</source> <translation>Будь ласка, перевірте підпис та спробуйте ще.</translation> </message> <message> <source>The signature did not match the message digest.</source> <translation>Підпис не відповідає дайджест повідомлення.</translation> </message> <message> <source>Message verification failed.</source> <translation>Не вдалося перевірити повідомлення.</translation> </message> <message> <source>Message verified.</source> <translation>Повідомлення перевірено.</translation> </message> </context> <context> <name>SplashScreen</name> <message> <source>UnattainiumV2 Core</source> <translation>UnattainiumV2 Ядро</translation> </message> <message> <source>The Bitcoin and UnattainiumV2 Core developers</source> <translation type="unfinished"/> </message> <message> <source>[testnet]</source> <translation>[тестова мережа]</translation> </message> </context> <context> <name>TrafficGraphWidget</name> <message> <source>KB/s</source> <translation>КБ/с</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <source>Open until %1</source> <translation>Відкрити до %1</translation> </message> <message> <source>conflicted</source> <translation type="unfinished"/> </message> <message> <source>%1/offline</source> <translation>%1/поза інтернетом</translation> </message> <message> <source>%1/unconfirmed</source> <translation>%1/не підтверджено</translation> </message> <message> <source>%1 confirmations</source> <translation>%1 підтверджень</translation> </message> <message> <source>Status</source> <translation>Статус</translation> </message> <message numerus="yes"> <source>, broadcast through %n node(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Source</source> <translation>Джерело</translation> </message> <message> <source>Generated</source> <translation>Згенеровано</translation> </message> <message> <source>From</source> <translation>Відправник</translation> </message> <message> <source>To</source> <translation>Отримувач</translation> </message> <message> <source>own address</source> <translation>Власна адреса</translation> </message> <message> <source>label</source> <translation>Мітка</translation> </message> <message> <source>Credit</source> <translation>Кредит</translation> </message> <message numerus="yes"> <source>matures in %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <source>not accepted</source> <translation>не прийнято</translation> </message> <message> <source>Debit</source> <translation>Дебет</translation> </message> <message> <source>Transaction fee</source> <translation>Комісія за транзакцію</translation> </message> <message> <source>Net amount</source> <translation>Загальна сума</translation> </message> <message> <source>Message</source> <translation>Повідомлення</translation> </message> <message> <source>Comment</source> <translation>Коментар</translation> </message> <message> <source>Transaction ID</source> <translation>ID транзакції</translation> </message> <message> <source>Merchant</source> <translation type="unfinished"/> </message> <message> <source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation type="unfinished"/> </message> <message> <source>Debug information</source> <translation>Отладочна інформація</translation> </message> <message> <source>Transaction</source> <translation>Транзакція</translation> </message> <message> <source>Inputs</source> <translation>витрати</translation> </message> <message> <source>Amount</source> <translation>Кількість</translation> </message> <message> <source>true</source> <translation>true</translation> </message> <message> <source>false</source> <translation>false</translation> </message> <message> <source>, has not been successfully broadcast yet</source> <translation>, ще не було успішно розіслано</translation> </message> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <source>unknown</source> <translation>невідомий</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <source>Transaction details</source> <translation>Деталі транзакції</translation> </message> <message> <source>This pane shows a detailed description of the transaction</source> <translation>Даний діалог показує детальну статистику по вибраній транзакції</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Type</source> <translation>Тип</translation> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>Amount</source> <translation>Кількість</translation> </message> <message> <source>Immature (%1 confirmations, will be available after %2)</source> <translation type="unfinished"/> </message> <message numerus="yes"> <source>Open for %n more block(s)</source> <translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation> </message> <message> <source>Open until %1</source> <translation>Відкрити до %1</translation> </message> <message> <source>Confirmed (%1 confirmations)</source> <translation>Підтверджено (%1 підтверджень)</translation> </message> <message> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Цей блок не був отриманий жодними іншими вузлами і, ймовірно, не буде прийнятий!</translation> </message> <message> <source>Generated but not accepted</source> <translation>Згенеровано, але не підтверджено</translation> </message> <message> <source>Offline</source> <translation type="unfinished"/> </message> <message> <source>Unconfirmed</source> <translation type="unfinished"/> </message> <message> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation type="unfinished"/> </message> <message> <source>Conflicted</source> <translation type="unfinished"/> </message> <message> <source>Received with</source> <translation>Отримано</translation> </message> <message> <source>Received from</source> <translation>Отримано від</translation> </message> <message> <source>Sent to</source> <translation>Відправлено</translation> </message> <message> <source>Payment to yourself</source> <translation>Відправлено собі</translation> </message> <message> <source>Mined</source> <translation>Добуто</translation> </message> <message> <source>(n/a)</source> <translation>(недоступно)</translation> </message> <message> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Статус транзакції. Наведіть вказівник на це поле, щоб показати кількість підтверджень.</translation> </message> <message> <source>Date and time that the transaction was received.</source> <translation>Дата і час, коли транзакцію було отримано.</translation> </message> <message> <source>Type of transaction.</source> <translation>Тип транзакції.</translation> </message> <message> <source>Destination address of transaction.</source> <translation>Адреса отримувача транзакції.</translation> </message> <message> <source>Amount removed from or added to balance.</source> <translation>Сума, додана чи знята з балансу.</translation> </message> </context> <context> <name>TransactionView</name> <message> <source>All</source> <translation>Всі</translation> </message> <message> <source>Today</source> <translation>Сьогодні</translation> </message> <message> <source>This week</source> <translation>На цьому тижні</translation> </message> <message> <source>This month</source> <translation>На цьому місяці</translation> </message> <message> <source>Last month</source> <translation>Минулого місяця</translation> </message> <message> <source>This year</source> <translation>Цього року</translation> </message> <message> <source>Range...</source> <translation>Проміжок...</translation> </message> <message> <source>Received with</source> <translation>Отримані на</translation> </message> <message> <source>Sent to</source> <translation>Відправлені на</translation> </message> <message> <source>To yourself</source> <translation>Відправлені собі</translation> </message> <message> <source>Mined</source> <translation>Добуті</translation> </message> <message> <source>Other</source> <translation>Інше</translation> </message> <message> <source>Enter address or label to search</source> <translation>Введіть адресу чи мітку для пошуку</translation> </message> <message> <source>Min amount</source> <translation>Мінімальна сума</translation> </message> <message> <source>Copy address</source> <translation>Скопіювати адресу</translation> </message> <message> <source>Copy label</source> <translation>Скопіювати мітку</translation> </message> <message> <source>Copy amount</source> <translation>Копіювати кількість</translation> </message> <message> <source>Copy transaction ID</source> <translation>Копіювати ID транзакції </translation> </message> <message> <source>Edit label</source> <translation>Редагувати мітку</translation> </message> <message> <source>Show transaction details</source> <translation>Показати деталі транзакції</translation> </message> <message> <source>Export Transaction History</source> <translation type="unfinished"/> </message> <message> <source>Exporting Failed</source> <translation type="unfinished"/> </message> <message> <source>There was an error trying to save the transaction history to %1.</source> <translation type="unfinished"/> </message> <message> <source>Exporting Successful</source> <translation type="unfinished"/> </message> <message> <source>The transaction history was successfully saved to %1.</source> <translation type="unfinished"/> </message> <message> <source>Comma separated file (*.csv)</source> <translation>Файли, розділені комою (*.csv)</translation> </message> <message> <source>Confirmed</source> <translation>Підтверджені</translation> </message> <message> <source>Date</source> <translation>Дата</translation> </message> <message> <source>Type</source> <translation>Тип</translation> </message> <message> <source>Label</source> <translation>Мітка</translation> </message> <message> <source>Address</source> <translation>Адреса</translation> </message> <message> <source>Amount</source> <translation>Кількість</translation> </message> <message> <source>ID</source> <translation>Ідентифікатор</translation> </message> <message> <source>Range:</source> <translation>Діапазон від:</translation> </message> <message> <source>to</source> <translation>до</translation> </message> </context> <context> <name>WalletFrame</name> <message> <source>No wallet has been loaded.</source> <translation type="unfinished"/> </message> </context> <context> <name>WalletModel</name> <message> <source>Send Coins</source> <translation>Відправити</translation> </message> </context> <context> <name>WalletView</name> <message> <source>&amp;Export</source> <translation>&amp; Експорт</translation> </message> <message> <source>Export the data in the current tab to a file</source> <translation>Експортувати дані з поточної вкладки в файл</translation> </message> <message> <source>Backup Wallet</source> <translation>Зробити резервне копіювання гаманця</translation> </message> <message> <source>Wallet Data (*.dat)</source> <translation>Данi гаманця (*.dat)</translation> </message> <message> <source>Backup Failed</source> <translation>Помилка резервного копіювання</translation> </message> <message> <source>There was an error trying to save the wallet data to %1.</source> <translation type="unfinished"/> </message> <message> <source>The wallet data was successfully saved to %1.</source> <translation type="unfinished"/> </message> <message> <source>Backup Successful</source> <translation>Успішне створення резервної копії</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <source>Usage:</source> <translation>Використання:</translation> </message> <message> <source>List commands</source> <translation>Список команд</translation> </message> <message> <source>Get help for a command</source> <translation>Отримати довідку по команді</translation> </message> <message> <source>Options:</source> <translation>Параметри:</translation> </message> <message> <source>Specify configuration file (default: unattainiumv2.conf)</source> <translation>Вкажіть файл конфігурації (типово: unattainiumv2.conf)</translation> </message> <message> <source>Specify pid file (default: unattainiumv2d.pid)</source> <translation>Вкажіть pid-файл (типово: unattainiumv2d.pid)</translation> </message> <message> <source>Specify data directory</source> <translation>Вкажіть робочий каталог</translation> </message> <message> <source>Listen for connections on &lt;port&gt; (default: 8333 or testnet: 18333)</source> <translation>Чекати на з&apos;єднання на &lt;port&gt; (типово: 8333 або тестова мережа: 18333)</translation> </message> <message> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Підтримувати не більше &lt;n&gt; зв&apos;язків з колегами (типово: 125)</translation> </message> <message> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Підключитись до вузла, щоб отримати список адрес інших учасників та від&apos;єднатись</translation> </message> <message> <source>Specify your own public address</source> <translation>Вкажіть вашу власну публічну адресу</translation> </message> <message> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Поріг відключення неправильно під&apos;єднаних пірів (типово: 100)</translation> </message> <message> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Максимальній розмір вхідного буферу на одне з&apos;єднання (типово: 86400)</translation> </message> <message> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation type="unfinished"/> </message> <message> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 8332 or testnet: 18332)</source> <translation>Прослуховувати &lt;port&gt; для JSON-RPC-з&apos;єднань (типово: 8332 або тестова мережа: 18332)</translation> </message> <message> <source>Accept command line and JSON-RPC commands</source> <translation>Приймати команди із командного рядка та команди JSON-RPC</translation> </message> <message> <source>UnattainiumV2 Core RPC client version</source> <translation type="unfinished"/> </message> <message> <source>Run in the background as a daemon and accept commands</source> <translation>Запустити в фоновому режимі (як демон) та приймати команди</translation> </message> <message> <source>Use the test network</source> <translation>Використовувати тестову мережу</translation> </message> <message> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Приймати з&apos;єднання ззовні (за замовчуванням: 1, якщо ні-проксі або-з&apos;єднання)</translation> </message> <message> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=unattainiumv2rpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;UnattainiumV2 Alert&quot; [email protected] </source> <translation type="unfinished"/> </message> <message> <source>Acceptable ciphers (default: TLSv1.2+HIGH:TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!3DES:@STRENGTH)</source> <translation type="unfinished"/> </message> <message> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation type="unfinished"/> </message> <message> <source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source> <translation>Прив&apos;язка до даного адресою і завжди слухати на ньому. Використовуйте [господаря]: позначення порту для IPv6</translation> </message> <message> <source>Continuously rate-limit free transactions to &lt;n&gt;*1000 bytes per minute (default:15)</source> <translation type="unfinished"/> </message> <message> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly. This is intended for regression testing tools and app development.</source> <translation>Введіть тестовий режим регресії, яка використовує спеціальну ланцюг, в якій блоки можуть бути вирішені негайно. Це призначено для регресійного тестування інструментів і розробки додатків.</translation> </message> <message> <source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source> <translation type="unfinished"/> </message> <message> <source>Error: Listening for incoming connections failed (listen returned error %d)</source> <translation type="unfinished"/> </message> <message> <source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Помилка: транзакцію було відхилено. Це може статись, якщо декілька монет з вашого гаманця вже використані, наприклад, якщо ви використовуєте одну копію гаманця (wallet.dat), а монети були використані з іншої копії, але не позначені як використані в цій.</translation> </message> <message> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source> <translation type="unfinished"/> </message> <message> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation type="unfinished"/> </message> <message> <source>Fees smaller than this are considered zero fee (for transaction creation) (default:</source> <translation type="unfinished"/> </message> <message> <source>Flush database activity from memory pool to disk log every &lt;n&gt; megabytes (default: 100)</source> <translation type="unfinished"/> </message> <message> <source>How thorough the block verification of -checkblocks is (0-4, default: 3)</source> <translation type="unfinished"/> </message> <message> <source>In this mode -genproclimit controls how many blocks are generated immediately.</source> <translation type="unfinished"/> </message> <message> <source>Set the number of script verification threads (%u to %d, 0 = auto, &lt;0 = leave that many cores free, default: %d)</source> <translation type="unfinished"/> </message> <message> <source>Set the processor limit for when generation is on (-1 = unlimited, default: -1)</source> <translation type="unfinished"/> </message> <message> <source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source> <translation>Це тест збірки попередньою версією - використовуйте на свій страх і ризик - не використовувати для гірничодобувних або торгових додатків</translation> </message> <message> <source>Unable to bind to %s on this computer. UnattainiumV2 Core is probably already running.</source> <translation type="unfinished"/> </message> <message> <source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: -proxy)</source> <translation type="unfinished"/> </message> <message> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Увага: встановлено занадто велику комісію (-paytxfee). Комісія зніматиметься кожен раз коли ви проводитимете транзакції.</translation> </message> <message> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong UnattainiumV2 will not work properly.</source> <translation>Увага: будь ласка, перевірте дату і час на своєму комп&apos;ютері. Якщо ваш годинник йде неправильно, UnattainiumV2 може працювати некоректно.</translation> </message> <message> <source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source> <translation type="unfinished"/> </message> <message> <source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source> <translation type="unfinished"/> </message> <message> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Увага: помилка читання wallet.dat! Всі ключі прочитано коректно, але дані транзакцій чи записи адресної книги можуть бути пропущені, або пошкоджені.</translation> </message> <message> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Увага: файл wallet.dat пошкоджено, дані врятовано! Оригінальний wallet.dat збережено як wallet.{timestamp}.bak до %s; якщо Ваш баланс чи транзакції неправильні, Ви можете відновити їх з резервної копії. </translation> </message> <message> <source>(default: 1)</source> <translation type="unfinished"/> </message> <message> <source>(default: wallet.dat)</source> <translation type="unfinished"/> </message> <message> <source>&lt;category&gt; can be:</source> <translation type="unfinished"/> </message> <message> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Спроба відновити закриті ключі з пошкодженого wallet.dat</translation> </message> <message> <source>UnattainiumV2 Core Daemon</source> <translation type="unfinished"/> </message> <message> <source>Block creation options:</source> <translation>Опції створення блоку:</translation> </message> <message> <source>Clear list of wallet transactions (diagnostic tool; implies -rescan)</source> <translation type="unfinished"/> </message> <message> <source>Connect only to the specified node(s)</source> <translation>Підключитись лише до вказаного вузла</translation> </message> <message> <source>Connect through SOCKS proxy</source> <translation type="unfinished"/> </message> <message> <source>Connect to JSON-RPC on &lt;port&gt; (default: 8332 or testnet: 18332)</source> <translation type="unfinished"/> </message> <message> <source>Connection options:</source> <translation type="unfinished"/> </message> <message> <source>Corrupted block database detected</source> <translation>Виявлено пошкоджений блок бази даних</translation> </message> <message> <source>Debugging/Testing options:</source> <translation type="unfinished"/> </message> <message> <source>Disable safemode, override a real safe mode event (default: 0)</source> <translation type="unfinished"/> </message> <message> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Відкрийте власну IP-адресу (за замовчуванням: 1, коли не чує і-externalip)</translation> </message> <message> <source>Do not load the wallet and disable wallet RPC calls</source> <translation type="unfinished"/> </message> <message> <source>Do you want to rebuild the block database now?</source> <translation>Ви хочете перебудувати базу даних блоку зараз?</translation> </message> <message> <source>Error initializing block database</source> <translation>Помилка ініціалізації бази даних блоків</translation> </message> <message> <source>Error initializing wallet database environment %s!</source> <translation type="unfinished"/> </message> <message> <source>Error loading block database</source> <translation>Помилка завантаження бази даних блоків</translation> </message> <message> <source>Error opening block database</source> <translation>Помилка відкриття блоку бази даних </translation> </message> <message> <source>Error: Disk space is low!</source> <translation>Помилка: Мало вільного місця на диску!</translation> </message> <message> <source>Error: Wallet locked, unable to create transaction!</source> <translation>Помилка: Гаманець заблокований, неможливо створити транзакцію!</translation> </message> <message> <source>Error: system error: </source> <translation>Помилка: системна помилка: </translation> </message> <message> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Не вдалося слухати на будь-якому порту. Використовуйте-слухати = 0, якщо ви хочете цього.</translation> </message> <message> <source>Failed to read block info</source> <translation>Не вдалося розпізнати блок інформації </translation> </message> <message> <source>Failed to read block</source> <translation>Не вдалося розпізнати блок</translation> </message> <message> <source>Failed to sync block index</source> <translation>Не вдалося синхронізувати індекс блоку </translation> </message> <message> <source>Failed to write block index</source> <translation>Не вдалося записати індекс блоку</translation> </message> <message> <source>Failed to write block info</source> <translation>Не вдалося записати інформацію індекса</translation> </message> <message> <source>Failed to write block</source> <translation>Не вдалося записати блок</translation> </message> <message> <source>Failed to write file info</source> <translation>Не вдалося записати інформацію файла</translation> </message> <message> <source>Failed to write to coin database</source> <translation>Не вдалося записати до бази даних монет</translation> </message> <message> <source>Failed to write transaction index</source> <translation>Не вдалося записати індекс транзакції</translation> </message> <message> <source>Failed to write undo data</source> <translation>Не вдалося записати скасувати дані</translation> </message> <message> <source>Fee per kB to add to transactions you send</source> <translation>Комісія за Кб</translation> </message> <message> <source>Fees smaller than this are considered zero fee (for relaying) (default:</source> <translation type="unfinished"/> </message> <message> <source>Find peers using DNS lookup (default: 1 unless -connect)</source> <translation>Знайти однолітків за допомогою DNS пошук (за замовчуванням: 1, якщо-ні підключити)</translation> </message> <message> <source>Force safe mode (default: 0)</source> <translation type="unfinished"/> </message> <message> <source>Generate coins (default: 0)</source> <translation>Генерація монети (за замовчуванням: 0)</translation> </message> <message> <source>How many blocks to check at startup (default: 288, 0 = all)</source> <translation>Скільки блоків перевіряти під час запуску (типово: 288, 0 = всі)</translation> </message> <message> <source>If &lt;category&gt; is not supplied, output all debugging information.</source> <translation type="unfinished"/> </message> <message> <source>Importing...</source> <translation type="unfinished"/> </message> <message> <source>Incorrect or no genesis block found. Wrong datadir for network?</source> <translation type="unfinished"/> </message> <message> <source>Invalid -onion address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <source>Not enough file descriptors available.</source> <translation>Бракує дескрипторів файлів, доступних.</translation> </message> <message> <source>Prepend debug output with timestamp (default: 1)</source> <translation type="unfinished"/> </message> <message> <source>RPC client options:</source> <translation type="unfinished"/> </message> <message> <source>Rebuild block chain index from current blk000??.dat files</source> <translation type="unfinished"/> </message> <message> <source>Select SOCKS version for -proxy (4 or 5, default: 5)</source> <translation type="unfinished"/> </message> <message> <source>Set database cache size in megabytes (%d to %d, default: %d)</source> <translation type="unfinished"/> </message> <message> <source>Set maximum block size in bytes (default: %d)</source> <translation type="unfinished"/> </message> <message> <source>Set the number of threads to service RPC calls (default: 4)</source> <translation>Встановити число потоків до дзвінків служба RPC (за замовчуванням: 4)</translation> </message> <message> <source>Specify wallet file (within data directory)</source> <translation type="unfinished"/> </message> <message> <source>Spend unconfirmed change when sending transactions (default: 1)</source> <translation type="unfinished"/> </message> <message> <source>This is intended for regression testing tools and app development.</source> <translation type="unfinished"/> </message> <message> <source>Usage (deprecated, use unattainiumv2-cli):</source> <translation>Використання (застаріле, використовуйте unattainiumv2-cli):</translation> </message> <message> <source>Verifying blocks...</source> <translation>Перевірка блоків...</translation> </message> <message> <source>Verifying wallet...</source> <translation>Перевірка гаманця... </translation> </message> <message> <source>Wait for RPC server to start</source> <translation type="unfinished"/> </message> <message> <source>Wallet %s resides outside data directory %s</source> <translation type="unfinished"/> </message> <message> <source>Wallet options:</source> <translation type="unfinished"/> </message> <message> <source>Warning: Deprecated argument -debugnet ignored, use -debug=net</source> <translation type="unfinished"/> </message> <message> <source>You need to rebuild the database using -reindex to change -txindex</source> <translation type="unfinished"/> </message> <message> <source>Imports blocks from external blk000??.dat file</source> <translation>Імпорт блоків з зовнішнього файлу blk000??.dat</translation> </message> <message> <source>Cannot obtain a lock on data directory %s. UnattainiumV2 Core is probably already running.</source> <translation type="unfinished"/> </message> <message> <source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source> <translation type="unfinished"/> </message> <message> <source>Output debugging information (default: 0, supplying &lt;category&gt; is optional)</source> <translation type="unfinished"/> </message> <message> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source> <translation type="unfinished"/> </message> <message> <source>Information</source> <translation>Інформація</translation> </message> <message> <source>Invalid amount for -minrelaytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <source>Invalid amount for -mintxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <source>Limit size of signature cache to &lt;n&gt; entries (default: 50000)</source> <translation type="unfinished"/> </message> <message> <source>Log transaction priority and fee per kB when mining blocks (default: 0)</source> <translation type="unfinished"/> </message> <message> <source>Maintain a full transaction index (default: 0)</source> <translation>Підтримувати індекс повний транзакцій (за замовчуванням: 0)</translation> </message> <message> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Максимальний буфер, &lt;n&gt;*1000 байт (типово: 5000)</translation> </message> <message> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Максимальній розмір вихідного буферу на одне з&apos;єднання, &lt;n&gt;*1000 байт (типово: 1000)</translation> </message> <message> <source>Only accept block chain matching built-in checkpoints (default: 1)</source> <translation>Тільки приймати блок відповідності ланцюга вбудованих контрольно-пропускних пунктів (за замовчуванням: 1)</translation> </message> <message> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Підключити тільки до вузлів в мережі &lt;net&gt; (IPv4, IPv6 або Tor)</translation> </message> <message> <source>Print block on startup, if found in block index</source> <translation type="unfinished"/> </message> <message> <source>Print block tree on startup (default: 0)</source> <translation type="unfinished"/> </message> <message> <source>RPC SSL options: (see the UnattainiumV2 Wiki for SSL setup instructions)</source> <translation type="unfinished"/> </message> <message> <source>RPC server options:</source> <translation type="unfinished"/> </message> <message> <source>Randomly drop 1 of every &lt;n&gt; network messages</source> <translation type="unfinished"/> </message> <message> <source>Randomly fuzz 1 of every &lt;n&gt; network messages</source> <translation type="unfinished"/> </message> <message> <source>Run a thread to flush wallet periodically (default: 1)</source> <translation type="unfinished"/> </message> <message> <source>SSL options: (see the UnattainiumV2 Wiki for SSL setup instructions)</source> <translation>Параметри SSL: (див. UnattainiumV2 Wiki для налаштування SSL)</translation> </message> <message> <source>Send command to UnattainiumV2 Core</source> <translation type="unfinished"/> </message> <message> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Відсилати налагоджувальну інформацію на консоль, а не у файл debug.log</translation> </message> <message> <source>Set minimum block size in bytes (default: 0)</source> <translation>Встановити мінімальний розмір блоку у байтах (типово: 0)</translation> </message> <message> <source>Sets the DB_PRIVATE flag in the wallet db environment (default: 1)</source> <translation type="unfinished"/> </message> <message> <source>Show all debugging options (usage: --help -help-debug)</source> <translation type="unfinished"/> </message> <message> <source>Show benchmark information (default: 0)</source> <translation type="unfinished"/> </message> <message> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Стискати файл debug.log під час старту клієнта (типово: 1 коли відсутутній параметр -debug)</translation> </message> <message> <source>Signing transaction failed</source> <translation>Підписання угоди не вдалося</translation> </message> <message> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Вказати тайм-аут підключення у мілісекундах (типово: 5000)</translation> </message> <message> <source>Start UnattainiumV2 Core Daemon</source> <translation type="unfinished"/> </message> <message> <source>System error: </source> <translation>Системна помилка: </translation> </message> <message> <source>Transaction amount too small</source> <translation>Сума угоди занадто малий</translation> </message> <message> <source>Transaction amounts must be positive</source> <translation>Суми угоди має бути позитивним</translation> </message> <message> <source>Transaction too large</source> <translation>Угода занадто великий</translation> </message> <message> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Намагатись використовувати UPnP для відображення порту, що прослуховується на роутері (default: 0)</translation> </message> <message> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Намагатись використовувати UPnP для відображення порту, що прослуховується на роутері (default: 1 when listening)</translation> </message> <message> <source>Username for JSON-RPC connections</source> <translation>Ім&apos;я користувача для JSON-RPC-з&apos;єднань</translation> </message> <message> <source>Warning</source> <translation>Попередження</translation> </message> <message> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Увага: Поточна версія застаріла, необхідне оновлення!</translation> </message> <message> <source>Zapping all transactions from wallet...</source> <translation type="unfinished"/> </message> <message> <source>on startup</source> <translation type="unfinished"/> </message> <message> <source>version</source> <translation>версія</translation> </message> <message> <source>wallet.dat corrupt, salvage failed</source> <translation>wallet.dat пошкоджено, відновлення не вдалося</translation> </message> <message> <source>Password for JSON-RPC connections</source> <translation>Пароль для JSON-RPC-з&apos;єднань</translation> </message> <message> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Дозволити JSON-RPC-з&apos;єднання з вказаної IP-адреси</translation> </message> <message> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Відправляти команди на вузол, запущений на &lt;ip&gt; (типово: 127.0.0.1)</translation> </message> <message> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Виконати команду, коли з&apos;явиться новий блок (%s в команді змінюється на хеш блоку)</translation> </message> <message> <source>Upgrade wallet to latest format</source> <translation>Модернізувати гаманець до останнього формату</translation> </message> <message> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Встановити розмір пулу ключів &lt;n&gt; (типово: 100)</translation> </message> <message> <source>Rescan the block chain for missing wallet transactions</source> <translation>Пересканувати ланцюжок блоків, в пошуку втрачених транзакцій</translation> </message> <message> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Використовувати OpenSSL (https) для JSON-RPC-з&apos;єднань</translation> </message> <message> <source>Server certificate file (default: server.cert)</source> <translation>Файл сертифіката сервера (типово: server.cert)</translation> </message> <message> <source>Server private key (default: server.pem)</source> <translation>Закритий ключ сервера (типово: server.pem)</translation> </message> <message> <source>This help message</source> <translation>Дана довідка</translation> </message> <message> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Неможливо прив&apos;язати до порту %s на цьому комп&apos;ютері (bind returned error %d, %s)</translation> </message> <message> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Дозволити пошук в DNS для команд -addnode, -seednode та -connect</translation> </message> <message> <source>Loading addresses...</source> <translation>Завантаження адрес...</translation> </message> <message> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Помилка при завантаженні wallet.dat: Гаманець пошкоджено</translation> </message> <message> <source>Error loading wallet.dat: Wallet requires newer version of UnattainiumV2</source> <translation>Помилка при завантаженні wallet.dat: Гаманець потребує новішої версії Біткоін-клієнта</translation> </message> <message> <source>Wallet needed to be rewritten: restart UnattainiumV2 to complete</source> <translation>Потрібно перезаписати гаманець: перезапустіть Біткоін-клієнт для завершення</translation> </message> <message> <source>Error loading wallet.dat</source> <translation>Помилка при завантаженні wallet.dat</translation> </message> <message> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Помилка в адресі проксі-сервера: «%s»</translation> </message> <message> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Невідома мережа вказана в -onlynet: «%s»</translation> </message> <message> <source>Unknown -socks proxy version requested: %i</source> <translation>В параметрі -socks запитується невідома версія: %i</translation> </message> <message> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation type="unfinished"/> </message> <message> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Помилка у величині комісії -paytxfee=&lt;amount&gt;: «%s»</translation> </message> <message> <source>Invalid amount</source> <translation>Некоректна кількість</translation> </message> <message> <source>Insufficient funds</source> <translation>Недостатньо коштів</translation> </message> <message> <source>Loading block index...</source> <translation>Завантаження індексу блоків...</translation> </message> <message> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Додати вузол до підключення і лишити його відкритим</translation> </message> <message> <source>Loading wallet...</source> <translation>Завантаження гаманця...</translation> </message> <message> <source>Cannot downgrade wallet</source> <translation>Не вдається понизити версію гаманця</translation> </message> <message> <source>Cannot write default address</source> <translation>Неможливо записати типову адресу</translation> </message> <message> <source>Rescanning...</source> <translation>Сканування...</translation> </message> <message> <source>Done loading</source> <translation>Завантаження завершене</translation> </message> <message> <source>To use the %s option</source> <translation>Щоб використати опцію %s</translation> </message> <message> <source>Error</source> <translation>Помилка</translation> </message> <message> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Ви мусите встановити rpcpassword=&lt;password&gt; в файлі конфігурації: %s Якщо файл не існує, створіть його із правами тільки для читання власником (owner-readable-only).</translation> </message> </context> </TS><|fim▁end|>
<|file_name|>test_utils.py<|end_file_name|><|fim▁begin|>from decimal import Decimal import ddt from babel.numbers import format_currency from django.conf import settings from django.utils.translation import get_language, to_locale from oscar.core.loading import get_model from oscar.test.factories import * # pylint:disable=wildcard-import,unused-wildcard-import from ecommerce.courses.tests.factories import CourseFactory from ecommerce.extensions.catalogue.tests.mixins import CourseCatalogTestMixin from ecommerce.extensions.offer.utils import _remove_exponent_and_trailing_zeros, format_benefit_value from ecommerce.tests.testcases import TestCase Benefit = get_model('offer', 'Benefit') @ddt.ddt class UtilTests(CourseCatalogTestMixin, TestCase): def setUp(self): super(UtilTests, self).setUp() self.course = CourseFactory() self.verified_seat = self.course.create_or_update_seat('verified', False, 100, self.partner) self.stock_record = StockRecord.objects.filter(product=self.verified_seat).first() self.seat_price = self.stock_record.price_excl_tax self._range = RangeFactory(products=[self.verified_seat, ]) self.percentage_benefit = BenefitFactory(type=Benefit.PERCENTAGE, range=self._range, value=35.00) self.value_benefit = BenefitFactory(type=Benefit.FIXED, range=self._range, value=self.seat_price - 10) def test_format_benefit_value(self): """ format_benefit_value(benefit) should format benefit value based on benefit type """ benefit_value = format_benefit_value(self.percentage_benefit) self.assertEqual(benefit_value, '35%') benefit_value = format_benefit_value(self.value_benefit) expected_benefit = format_currency( Decimal((self.seat_price - 10)), settings.OSCAR_DEFAULT_CURRENCY, format=u'#,##0.00',<|fim▁hole|> locale=to_locale(get_language())) self.assertEqual(benefit_value, '${expected_benefit}'.format(expected_benefit=expected_benefit)) @ddt.data( ('1.0', '1'), ('5000.0', '5000'), ('1.45000', '1.45'), ('5000.40000', '5000.4'), ) @ddt.unpack def test_remove_exponent_and_trailing_zeros(self, value, expected): """ _remove_exponent_and_trailing_zeros(decimal) should remove exponent and trailing zeros from decimal number """ decimal = _remove_exponent_and_trailing_zeros(Decimal(value)) self.assertEqual(decimal, Decimal(expected))<|fim▁end|>
<|file_name|>datastructures.py<|end_file_name|><|fim▁begin|>import copy from collections import OrderedDict from collections.abc import Mapping class OrderedSet: """ A set which keeps the ordering of the inserted items. Currently backs onto OrderedDict. """ def __init__(self, iterable=None): self.dict = OrderedDict.fromkeys(iterable or ()) def add(self, item): self.dict[item] = None def remove(self, item): del self.dict[item] def discard(self, item): try: self.remove(item) except KeyError: pass def __iter__(self): return iter(self.dict) def __contains__(self, item): return item in self.dict def __bool__(self): return bool(self.dict) def __len__(self): return len(self.dict) class MultiValueDictKeyError(KeyError): pass class MultiValueDict(dict): """ A subclass of dictionary customized to handle multiple values for the<|fim▁hole|> same key. >>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']}) >>> d['name'] 'Simon' >>> d.getlist('name') ['Adrian', 'Simon'] >>> d.getlist('doesnotexist') [] >>> d.getlist('doesnotexist', ['Adrian', 'Simon']) ['Adrian', 'Simon'] >>> d.get('lastname', 'nonexistent') 'nonexistent' >>> d.setlist('lastname', ['Holovaty', 'Willison']) This class exists to solve the irritating problem raised by cgi.parse_qs, which returns a list for every key, even though most Web forms submit single name-value pairs. """ def __init__(self, key_to_list_mapping=()): super().__init__(key_to_list_mapping) def __repr__(self): return "<%s: %s>" % (self.__class__.__name__, super().__repr__()) def __getitem__(self, key): """ Return the last data value for this key, or [] if it's an empty list; raise KeyError if not found. """ try: list_ = super().__getitem__(key) except KeyError: raise MultiValueDictKeyError(key) try: return list_[-1] except IndexError: return [] def __setitem__(self, key, value): super().__setitem__(key, [value]) def __copy__(self): return self.__class__([ (k, v[:]) for k, v in self.lists() ]) def __deepcopy__(self, memo): result = self.__class__() memo[id(self)] = result for key, value in dict.items(self): dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo)) return result def __getstate__(self): return {**self.__dict__, '_data': {k: self._getlist(k) for k in self}} def __setstate__(self, obj_dict): data = obj_dict.pop('_data', {}) for k, v in data.items(): self.setlist(k, v) self.__dict__.update(obj_dict) def get(self, key, default=None): """ Return the last data value for the passed key. If key doesn't exist or value is an empty list, return `default`. """ try: val = self[key] except KeyError: return default if val == []: return default return val def _getlist(self, key, default=None, force_list=False): """ Return a list of values for the key. Used internally to manipulate values list. If force_list is True, return a new copy of values. """ try: values = super().__getitem__(key) except KeyError: if default is None: return [] return default else: if force_list: values = list(values) if values is not None else None return values def getlist(self, key, default=None): """ Return the list of values for the key. If key doesn't exist, return a default value. """ return self._getlist(key, default, force_list=True) def setlist(self, key, list_): super().__setitem__(key, list_) def setdefault(self, key, default=None): if key not in self: self[key] = default # Do not return default here because __setitem__() may store # another value -- QueryDict.__setitem__() does. Look it up. return self[key] def setlistdefault(self, key, default_list=None): if key not in self: if default_list is None: default_list = [] self.setlist(key, default_list) # Do not return default_list here because setlist() may store # another value -- QueryDict.setlist() does. Look it up. return self._getlist(key) def appendlist(self, key, value): """Append an item to the internal list associated with key.""" self.setlistdefault(key).append(value) def items(self): """ Yield (key, value) pairs, where value is the last item in the list associated with the key. """ for key in self: yield key, self[key] def lists(self): """Yield (key, list) pairs.""" return iter(super().items()) def values(self): """Yield the last value on every key list.""" for key in self: yield self[key] def copy(self): """Return a shallow copy of this object.""" return copy.copy(self) def update(self, *args, **kwargs): """Extend rather than replace existing key lists.""" if len(args) > 1: raise TypeError("update expected at most 1 argument, got %d" % len(args)) if args: other_dict = args[0] if isinstance(other_dict, MultiValueDict): for key, value_list in other_dict.lists(): self.setlistdefault(key).extend(value_list) else: try: for key, value in other_dict.items(): self.setlistdefault(key).append(value) except TypeError: raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary") for key, value in kwargs.items(): self.setlistdefault(key).append(value) def dict(self): """Return current object as a dict with singular values.""" return {key: self[key] for key in self} class ImmutableList(tuple): """ A tuple-like object that raises useful errors when it is asked to mutate. Example:: >>> a = ImmutableList(range(5), warning="You cannot mutate this.") >>> a[3] = '4' Traceback (most recent call last): ... AttributeError: You cannot mutate this. """ def __new__(cls, *args, warning='ImmutableList object is immutable.', **kwargs): self = tuple.__new__(cls, *args, **kwargs) self.warning = warning return self def complain(self, *wargs, **kwargs): if isinstance(self.warning, Exception): raise self.warning else: raise AttributeError(self.warning) # All list mutation functions complain. __delitem__ = complain __delslice__ = complain __iadd__ = complain __imul__ = complain __setitem__ = complain __setslice__ = complain append = complain extend = complain insert = complain pop = complain remove = complain sort = complain reverse = complain class DictWrapper(dict): """ Wrap accesses to a dictionary so that certain values (those starting with the specified prefix) are passed through a function before being returned. The prefix is removed before looking up the real value. Used by the SQL construction code to ensure that values are correctly quoted before being used. """ def __init__(self, data, func, prefix): super().__init__(data) self.func = func self.prefix = prefix def __getitem__(self, key): """ Retrieve the real value after stripping the prefix string (if present). If the prefix is present, pass the value through self.func before returning, otherwise return the raw value. """ use_func = key.startswith(self.prefix) if use_func: key = key[len(self.prefix):] value = super().__getitem__(key) if use_func: return self.func(value) return value def _destruct_iterable_mapping_values(data): for i, elem in enumerate(data): if len(elem) != 2: raise ValueError( 'dictionary update sequence element #{} has ' 'length {}; 2 is required.'.format(i, len(elem)) ) if not isinstance(elem[0], str): raise ValueError('Element key %r invalid, only strings are allowed' % elem[0]) yield tuple(elem) class CaseInsensitiveMapping(Mapping): """ Mapping allowing case-insensitive key lookups. Original case of keys is preserved for iteration and string representation. Example:: >>> ci_map = CaseInsensitiveMapping({'name': 'Jane'}) >>> ci_map['Name'] Jane >>> ci_map['NAME'] Jane >>> ci_map['name'] Jane >>> ci_map # original case preserved {'name': 'Jane'} """ def __init__(self, data): if not isinstance(data, Mapping): data = {k: v for k, v in _destruct_iterable_mapping_values(data)} self._store = {k.lower(): (k, v) for k, v in data.items()} def __getitem__(self, key): return self._store[key.lower()][1] def __len__(self): return len(self._store) def __eq__(self, other): return isinstance(other, Mapping) and { k.lower(): v for k, v in self.items() } == { k.lower(): v for k, v in other.items() } def __iter__(self): return (original_key for original_key, value in self._store.values()) def __repr__(self): return repr({key: value for key, value in self._store.values()}) def copy(self): return self<|fim▁end|>