prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>activation.py<|end_file_name|><|fim▁begin|># Copyright (C) 2014 Universidad Politecnica de Madrid # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from keystoneclient import base from keystoneclient import exceptions from keystoneclient.v3.contrib.user_registration.utils import REGISTRATION_PATH class Activation(base.Resource): pass class ActivationManager(base.CrudManager):<|fim▁hole|> resource_class = Activation collection_key = 'activate' key = 'activation_key' base_url = REGISTRATION_PATH def new_activation_key(self, user): base_url = self.base_url + '/users/{0}/'.format(base.getid(user)) return super(ActivationManager, self).get(base_url) def activate_user(self, user, activation_key): base_url = self.base_url + '/users/{0}/'.format(base.getid(user)) return super(ActivationManager, self).update(base_url, activation_key)<|fim▁end|>
"""Manager class for activating user in the USER REGISTRATION extension for Keystone. For more information about the extension: https://www.github.com/ging/keystone """
<|file_name|>ftplib.py<|end_file_name|><|fim▁begin|>"""An FTP client class and some helper functions. Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds Example: >>> from ftplib import FTP >>> ftp = FTP('ftp.python.org') # connect to host, default port >>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@ '230 Guest login ok, access restrictions apply.' >>> ftp.retrlines('LIST') # list directory contents total 9 drwxr-xr-x 8 root wheel 1024 Jan 3 1994 . drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .. drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr -rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg '226 Transfer complete.' >>> ftp.quit() '221 Goodbye.' >>> A nice test that reveals some of the network dialogue would be: python ftplib.py -d localhost -l -p -l """ # # Changes and improvements suggested by Steve Majewski. # Modified by Jack to work on the mac. # Modified by Siebren to support docstrings and PASV. # Modified by Phil Schwartz to add storbinary and storlines callbacks. # import os import sys # Import SOCKS module if it exists, else standard socket module socket try: import SOCKS; socket = SOCKS; del SOCKS # import SOCKS as socket from socket import getfqdn; socket.getfqdn = getfqdn; del getfqdn except ImportError: import socket from socket import _GLOBAL_DEFAULT_TIMEOUT __all__ = ["FTP","Netrc"] # Magic number from <socket.h> MSG_OOB = 0x1 # Process data out of band # The standard FTP server control port FTP_PORT = 21 # Exception raised when an error or invalid response is received class Error(Exception): pass class error_reply(Error): pass # unexpected [123]xx reply class error_temp(Error): pass # 4xx errors class error_perm(Error): pass # 5xx errors class error_proto(Error): pass # response does not begin with [1-5] # All exceptions (hopefully) that may be raised here and that aren't # (always) programming errors on our side all_errors = (Error, IOError, EOFError) # Line terminators (we always output CRLF, but accept any of CRLF, CR, LF) CRLF = '\r\n' # The class itself class FTP: '''An FTP client class. To create a connection, call the class using these arguments: host, user, passwd, acct, timeout The first four arguments are all strings, and have default value ''. timeout must be numeric and defaults to None if not passed, meaning that no timeout will be set on any ftp socket(s) If a timeout is passed, then this is now the default timeout for all ftp socket operations for this instance. Then use self.connect() with optional host and port argument. To download a file, use ftp.retrlines('RETR ' + filename), or ftp.retrbinary() with slightly different arguments. To upload a file, use ftp.storlines() or ftp.storbinary(), which have an open file as argument (see their definitions below for details). The download/upload functions first issue appropriate TYPE and PORT or PASV commands. ''' debugging = 0 host = '' port = FTP_PORT sock = None file = None welcome = None passiveserver = 1 # Initialization method (called by class instantiation). # Initialize host to localhost, port to standard ftp port # Optional arguments are host (for connect()), # and user, passwd, acct (for login()) def __init__(self, host='', user='', passwd='', acct='', timeout=_GLOBAL_DEFAULT_TIMEOUT): self.timeout = timeout if host: self.connect(host) if user: self.login(user, passwd, acct) def connect(self, host='', port=0, timeout=-999): '''Connect to host. Arguments are: - host: hostname to connect to (string, default previous host) - port: port to connect to (integer, default previous port) ''' if host != '': self.host = host if port > 0: self.port = port if timeout != -999: self.timeout = timeout self.sock = socket.create_connection((self.host, self.port), self.timeout) self.af = self.sock.family self.file = self.sock.makefile('rb') self.welcome = self.getresp() return self.welcome def getwelcome(self): '''Get the welcome message from the server. (this is read and squirreled away by connect())''' if self.debugging: print '*welcome*', self.sanitize(self.welcome) return self.welcome def set_debuglevel(self, level): '''Set the debugging level. The required argument level means: 0: no debugging output (default) 1: print commands and responses but not body text etc. 2: also print raw lines read and sent before stripping CR/LF''' self.debugging = level debug = set_debuglevel def set_pasv(self, val): '''Use passive or active mode for data transfers. With a false argument, use the normal PORT mode, With a true argument, use the PASV command.''' self.passiveserver = val # Internal: "sanitize" a string for printing def sanitize(self, s): if s[:5] == 'pass ' or s[:5] == 'PASS ': i = len(s) while i > 5 and s[i-1] in '\r\n': i = i-1 s = s[:5] + '*'*(i-5) + s[i:] return repr(s) # Internal: send one line to the server, appending CRLF def putline(self, line): line = line + CRLF if self.debugging > 1: print '*put*', self.sanitize(line) self.sock.sendall(line) # Internal: send one command to the server (through putline()) def putcmd(self, line): if self.debugging: print '*cmd*', self.sanitize(line) self.putline(line) # Internal: return one line from the server, stripping CRLF. # Raise EOFError if the connection is closed def getline(self): line = self.file.readline() if self.debugging > 1: print '*get*', self.sanitize(line) if not line: raise EOFError if line[-2:] == CRLF: line = line[:-2] elif line[-1:] in CRLF: line = line[:-1] return line # Internal: get a response from the server, which may possibly # consist of multiple lines. Return a single string with no # trailing CRLF. If the response consists of multiple lines, # these are separated by '\n' characters in the string def getmultiline(self): line = self.getline() if line[3:4] == '-': code = line[:3] while 1: nextline = self.getline() line = line + ('\n' + nextline) if nextline[:3] == code and \ nextline[3:4] != '-': break return line # Internal: get a response from the server. # Raise various errors if the response indicates an error def getresp(self): resp = self.getmultiline() if self.debugging: print '*resp*', self.sanitize(resp) self.lastresp = resp[:3] c = resp[:1] if c in ('1', '2', '3'): return resp if c == '4': raise error_temp, resp if c == '5': raise error_perm, resp raise error_proto, resp def voidresp(self): """Expect a response beginning with '2'.""" resp = self.getresp() if resp[0] != '2': raise error_reply, resp return resp def abort(self): '''Abort a file transfer. Uses out-of-band data. This does not follow the procedure from the RFC to send Telnet IP and Synch; that doesn't seem to work with the servers I've tried. Instead, just send the ABOR command as OOB data.''' line = 'ABOR' + CRLF if self.debugging > 1: print '*put urgent*', self.sanitize(line) self.sock.sendall(line, MSG_OOB) resp = self.getmultiline() if resp[:3] not in ('426', '226'): raise error_proto, resp def sendcmd(self, cmd): '''Send a command and return the response.''' self.putcmd(cmd) return self.getresp() def voidcmd(self, cmd): """Send a command and expect a response beginning with '2'.""" self.putcmd(cmd) return self.voidresp() def sendport(self, host, port): '''Send a PORT command with the current host and the given port number. ''' hbytes = host.split('.') pbytes = [repr(port//256), repr(port%256)] bytes = hbytes + pbytes cmd = 'PORT ' + ','.join(bytes) return self.voidcmd(cmd) def sendeprt(self, host, port): '''Send a EPRT command with the current host and the given port number.''' af = 0 if self.af == socket.AF_INET: af = 1 if self.af == socket.AF_INET6: af = 2 if af == 0: raise error_proto, 'unsupported address family' fields = ['', repr(af), host, repr(port), ''] cmd = 'EPRT ' + '|'.join(fields) return self.voidcmd(cmd) def makeport(self): '''Create a new socket and send a PORT command for it.''' msg = "getaddrinfo returns an empty list" sock = None for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE): af, socktype, proto, canonname, sa = res try: sock = socket.socket(af, socktype, proto) sock.bind(sa) except socket.error, msg: if sock: sock.close() sock = None continue break if not sock: raise socket.error, msg sock.listen(1) port = sock.getsockname()[1] # Get proper port host = self.sock.getsockname()[0] # Get proper host if self.af == socket.AF_INET: resp = self.sendport(host, port) else: resp = self.sendeprt(host, port) return sock def makepasv(self): if self.af == socket.AF_INET: host, port = parse227(self.sendcmd('PASV')) else: host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername()) return host, port def ntransfercmd(self, cmd, rest=None): """Initiate a transfer over the data connection. If the transfer is active, send a port command and the transfer command, and accept the connection. If the server is passive, send a pasv command, connect to it, and start the transfer command. Either way, return the socket for the connection and the expected size of the transfer. The expected size may be None if it could not be determined. Optional `rest' argument can be a string that is sent as the argument to a REST command. This is essentially a server marker used to tell the server to skip over any data up to the given marker. """ size = None if self.passiveserver: host, port = self.makepasv() conn = socket.create_connection((host, port), self.timeout) if rest is not None: self.sendcmd("REST %s" % rest) resp = self.sendcmd(cmd) # Some servers apparently send a 200 reply to # a LIST or STOR command, before the 150 reply # (and way before the 226 reply). This seems to # be in violation of the protocol (which only allows # 1xx or error messages for LIST), so we just discard # this response. if resp[0] == '2': resp = self.getresp() if resp[0] != '1': raise error_reply, resp else: sock = self.makeport() if rest is not None: self.sendcmd("REST %s" % rest) resp = self.sendcmd(cmd) # See above. if resp[0] == '2': resp = self.getresp() if resp[0] != '1': raise error_reply, resp conn, sockaddr = sock.accept() if resp[:3] == '150': # this is conditional in case we received a 125 size = parse150(resp) return conn, size def transfercmd(self, cmd, rest=None): """Like ntransfercmd() but returns only the socket.""" return self.ntransfercmd(cmd, rest)[0] def login(self, user = '', passwd = '', acct = ''): '''Login, default anonymous.''' if not user: user = 'anonymous' if not passwd: passwd = '' if not acct: acct = '' if user == 'anonymous' and passwd in ('', '-'): # If there is no anonymous ftp password specified # then we'll just use anonymous@ # We don't send any other thing because: # - We want to remain anonymous # - We want to stop SPAM # - We don't want to let ftp sites to discriminate by the user, # host or country. passwd = passwd + 'anonymous@' resp = self.sendcmd('USER ' + user) if resp[0] == '3': resp = self.sendcmd('PASS ' + passwd) if resp[0] == '3': resp = self.sendcmd('ACCT ' + acct) if resp[0] != '2': raise error_reply, resp return resp def retrbinary(self, cmd, callback, blocksize=8192, rest=None): """Retrieve data in binary mode. A new port is created for you. Args: cmd: A RETR command. callback: A single parameter callable to be called on each block of data read. blocksize: The maximum number of bytes to read from the socket at one time. [default: 8192] rest: Passed to transfercmd(). [default: None] Returns: The response code. """ self.voidcmd('TYPE I') conn = self.transfercmd(cmd, rest) while 1: data = conn.recv(blocksize) if not data: break callback(data) conn.close() return self.voidresp() def retrlines(self, cmd, callback = None): """Retrieve data in line mode. A new port is created for you. Args: cmd: A RETR, LIST, NLST, or MLSD command. callback: An optional single parameter callable that is called for each line with the trailing CRLF stripped. [default: print_line()] Returns: The response code. """ if callback is None: callback = print_line resp = self.sendcmd('TYPE A') conn = self.transfercmd(cmd) fp = conn.makefile('rb') while 1: line = fp.readline() if self.debugging > 2: print '*retr*', repr(line) if not line: break if line[-2:] == CRLF: line = line[:-2] elif line[-1:] == '\n': line = line[:-1] callback(line) fp.close() conn.close() return self.voidresp() def storbinary(self, cmd, fp, blocksize=8192, callback=None): """Store a file in binary mode. A new port is created for you. Args: cmd: A STOR command. fp: A file-like object with a read(num_bytes) method. blocksize: The maximum data size to read from fp and send over the connection at once. [default: 8192] callback: An optional single parameter callable that is called on on each block of data after it is sent. [default: None] Returns: The response code. """ self.voidcmd('TYPE I') conn = self.transfercmd(cmd) while 1: buf = fp.read(blocksize) if not buf: break conn.sendall(buf) if callback: callback(buf) conn.close() return self.voidresp() def storlines(self, cmd, fp, callback=None): """Store a file in line mode. A new port is created for you. Args: cmd: A STOR command. fp: A file-like object with a readline() method. callback: An optional single parameter callable that is called on on each line after it is sent. [default: None] Returns: The response code. """ self.voidcmd('TYPE A') conn = self.transfercmd(cmd) while 1: buf = fp.readline() if not buf: break if buf[-2:] != CRLF: if buf[-1] in CRLF: buf = buf[:-1] buf = buf + CRLF conn.sendall(buf) if callback: callback(buf) conn.close() return self.voidresp() def acct(self, password): '''Send new account name.''' cmd = 'ACCT ' + password return self.voidcmd(cmd) def nlst(self, *args): '''Return a list of files in a given directory (default the current).''' cmd = 'NLST' for arg in args: cmd = cmd + (' ' + arg) files = [] self.retrlines(cmd, files.append) return files def dir(self, *args): '''List a directory in long form. By default list current directory to stdout. Optional last argument is callback function; all non-empty arguments before it are concatenated to the LIST command. (This *should* only be used for a pathname.)''' cmd = 'LIST' func = None if args[-1:] and type(args[-1]) != type(''): args, func = args[:-1], args[-1] for arg in args: if arg: cmd = cmd + (' ' + arg) self.retrlines(cmd, func) def rename(self, fromname, toname): '''Rename a file.''' resp = self.sendcmd('RNFR ' + fromname) if resp[0] != '3': raise error_reply, resp return self.voidcmd('RNTO ' + toname) def delete(self, filename): '''Delete a file.''' resp = self.sendcmd('DELE ' + filename) if resp[:3] in ('250', '200'): return resp elif resp[:1] == '5': raise error_perm, resp else: raise error_reply, resp def cwd(self, dirname): '''Change to a directory.''' if dirname == '..': try: return self.voidcmd('CDUP') except error_perm, msg: if msg.args[0][:3] != '500': raise elif dirname == '': dirname = '.' # does nothing, but could return error cmd = 'CWD ' + dirname return self.voidcmd(cmd) def size(self, filename): '''Retrieve the size of a file.''' # The SIZE command is defined in RFC-3659 resp = self.sendcmd('SIZE ' + filename) if resp[:3] == '213': s = resp[3:].strip() try: return int(s) except (OverflowError, ValueError): return long(s) def mkd(self, dirname): '''Make a directory, return its full pathname.''' resp = self.sendcmd('MKD ' + dirname) return parse257(resp) def rmd(self, dirname): '''Remove a directory.''' return self.voidcmd('RMD ' + dirname) def pwd(self): '''Return current working directory.''' resp = self.sendcmd('PWD') return parse257(resp) def quit(self): '''Quit, and close the connection.''' resp = self.voidcmd('QUIT') self.close() return resp def close(self): '''Close the connection without assuming anything about it.''' if self.file: self.file.close() self.sock.close() self.file = self.sock = None _150_re = None def parse150(resp): '''Parse the '150' response for a RETR request. Returns the expected transfer size or None; size is not guaranteed to be present in the 150 message. ''' if resp[:3] != '150': raise error_reply, resp global _150_re if _150_re is None: import re _150_re = re.compile("150 .* \((\d+) bytes\)", re.IGNORECASE) m = _150_re.match(resp) if not m: return None s = m.group(1) try: return int(s) except (OverflowError, ValueError): return long(s) _227_re = None def parse227(resp): '''Parse the '227' response for a PASV request. Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)' Return ('host.addr.as.numbers', port#) tuple.''' if resp[:3] != '227': raise error_reply, resp global _227_re if _227_re is None: import re _227_re = re.compile(r'(\d+),(\d+),(\d+),(\d+),(\d+),(\d+)') m = _227_re.search(resp) if not m: raise error_proto, resp numbers = m.groups() host = '.'.join(numbers[:4]) port = (int(numbers[4]) << 8) + int(numbers[5]) return host, port def parse229(resp, peer): '''Parse the '229' response for a EPSV request. Raises error_proto if it does not contain '(|||port|)' Return ('host.addr.as.numbers', port#) tuple.''' if resp[:3] != '229': raise error_reply, resp left = resp.find('(') if left < 0: raise error_proto, resp right = resp.find(')', left + 1) if right < 0: raise error_proto, resp # should contain '(|||port|)' if resp[left + 1] != resp[right - 1]: raise error_proto, resp parts = resp[left + 1:right].split(resp[left+1]) if len(parts) != 5: raise error_proto, resp host = peer[0] port = int(parts[3]) return host, port def parse257(resp): '''Parse the '257' response for a MKD or PWD request. This is a response to a MKD or PWD request: a directory name. Returns the directoryname in the 257 reply.''' if resp[:3] != '257': raise error_reply, resp if resp[3:5] != ' "': return '' # Not compliant to RFC 959, but UNIX ftpd does this dirname = '' i = 5 n = len(resp) while i < n: c = resp[i] i = i+1 if c == '"': if i >= n or resp[i] != '"': break i = i+1 dirname = dirname + c return dirname def print_line(line): '''Default retrlines callback to print a line.''' print line def ftpcp(source, sourcename, target, targetname = '', type = 'I'): '''Copy file from one FTP-instance to another.''' if not targetname: targetname = sourcename type = 'TYPE ' + type source.voidcmd(type) target.voidcmd(type) sourcehost, sourceport = parse227(source.sendcmd('PASV')) target.sendport(sourcehost, sourceport) # RFC 959: the user must "listen" [...] BEFORE sending the # transfer request. # So: STOR before RETR, because here the target is a "user". treply = target.sendcmd('STOR ' + targetname) if treply[:3] not in ('125', '150'): raise error_proto # RFC 959 sreply = source.sendcmd('RETR ' + sourcename) if sreply[:3] not in ('125', '150'): raise error_proto # RFC 959 source.voidresp() target.voidresp() class Netrc: """Class to parse & provide access to 'netrc' format files. See the netrc(4) man page for information on the file format. WARNING: This class is obsolete -- use module netrc instead. """ __defuser = None __defpasswd = None __defacct = None def __init__(self, filename=None): if filename is None: if "HOME" in os.environ: filename = os.path.join(os.environ["HOME"], ".netrc") else: raise IOError, \ "specify file to load or set $HOME" self.__hosts = {} self.__macros = {} fp = open(filename, "r") in_macro = 0 while 1: line = fp.readline() if not line: break if in_macro and line.strip(): macro_lines.append(line) continue elif in_macro: self.__macros[macro_name] = tuple(macro_lines) in_macro = 0 words = line.split() host = user = passwd = acct = None default = 0 i = 0 while i < len(words): w1 = words[i] if i+1 < len(words): w2 = words[i + 1] else: w2 = None <|fim▁hole|> i = i + 1 elif w1 == 'login' and w2: user = w2 i = i + 1 elif w1 == 'password' and w2: passwd = w2 i = i + 1 elif w1 == 'account' and w2: acct = w2 i = i + 1 elif w1 == 'macdef' and w2: macro_name = w2 macro_lines = [] in_macro = 1 break i = i + 1 if default: self.__defuser = user or self.__defuser self.__defpasswd = passwd or self.__defpasswd self.__defacct = acct or self.__defacct if host: if host in self.__hosts: ouser, opasswd, oacct = \ self.__hosts[host] user = user or ouser passwd = passwd or opasswd acct = acct or oacct self.__hosts[host] = user, passwd, acct fp.close() def get_hosts(self): """Return a list of hosts mentioned in the .netrc file.""" return self.__hosts.keys() def get_account(self, host): """Returns login information for the named host. The return value is a triple containing userid, password, and the accounting field. """ host = host.lower() user = passwd = acct = None if host in self.__hosts: user, passwd, acct = self.__hosts[host] user = user or self.__defuser passwd = passwd or self.__defpasswd acct = acct or self.__defacct return user, passwd, acct def get_macros(self): """Return a list of all defined macro names.""" return self.__macros.keys() def get_macro(self, macro): """Return a sequence of lines which define a named macro.""" return self.__macros[macro] def test(): '''Test program. Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ... -d dir -l list -p password ''' if len(sys.argv) < 2: print test.__doc__ sys.exit(0) debugging = 0 rcfile = None while sys.argv[1] == '-d': debugging = debugging+1 del sys.argv[1] if sys.argv[1][:2] == '-r': # get name of alternate ~/.netrc file: rcfile = sys.argv[1][2:] del sys.argv[1] host = sys.argv[1] ftp = FTP(host) ftp.set_debuglevel(debugging) userid = passwd = acct = '' try: netrc = Netrc(rcfile) except IOError: if rcfile is not None: sys.stderr.write("Could not open account file" " -- using anonymous login.") else: try: userid, passwd, acct = netrc.get_account(host) except KeyError: # no account for host sys.stderr.write( "No account -- using anonymous login.") ftp.login(userid, passwd, acct) for file in sys.argv[2:]: if file[:2] == '-l': ftp.dir(file[2:]) elif file[:2] == '-d': cmd = 'CWD' if file[2:]: cmd = cmd + ' ' + file[2:] resp = ftp.sendcmd(cmd) elif file == '-p': ftp.set_pasv(not ftp.passiveserver) else: ftp.retrbinary('RETR ' + file, \ sys.stdout.write, 1024) ftp.quit() if __name__ == '__main__': test()<|fim▁end|>
if w1 == 'default': default = 1 elif w1 == 'machine' and w2: host = w2.lower()
<|file_name|>albino-build.rs<|end_file_name|><|fim▁begin|><|fim▁hole|> #[phase(plugin, link)] extern crate log; extern crate getopts; extern crate whitebase; extern crate albino; use getopts::Matches; use std::os; use std::io::IoError; use whitebase::syntax::{Compiler, Assembly, Brainfuck, DT, Ook, Whitespace}; use albino::command::{BuildCommand, BuildExecutable}; use albino::util; use albino::util::Target; fn build<B: Buffer, W: Writer, C: Compiler>(input: &mut B, output: &mut W, syntax: C) { match syntax.compile(input, output) { Err(e) => { println!("{}", e); os::set_exit_status(1); } _ => (), } } struct CommandBody; impl BuildExecutable for CommandBody { fn handle_error(&self, e: IoError) { println!("{}", e); os::set_exit_status(1); } fn exec<B: Buffer, W: Writer>(&self, _: &Matches, buffer: &mut B, writer: &mut W, target: Option<Target>) { match target { Some(util::Assembly) => build(buffer, writer, Assembly::new()), Some(util::Brainfuck) => build(buffer, writer, Brainfuck::new()), Some(util::DT) => build(buffer, writer, DT::new()), Some(util::Ook) => build(buffer, writer, Ook::new()), Some(util::Whitespace) => build(buffer, writer, Whitespace::new()), _ => { println!("syntax should be \"asm\", \"bf\", \"dt\", \"ook\" or \"ws\" (default: ws)"); os::set_exit_status(1); }, } } } fn main() { debug!("executing; cmd=albino-build; args={}", os::args()); let mut opts = vec!(); let cmd = BuildCommand::new("build", "[-s syntax] [-o output] [file]", &mut opts, CommandBody); cmd.exec(); }<|fim▁end|>
#![crate_name="albino-build"] #![crate_type="bin"] #![feature(phase)] #![unstable]
<|file_name|>render_task.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ //! The task that handles all rendering/painting. use buffer_map::BufferMap; use display_list::{mod, StackingContext}; use font_cache_task::FontCacheTask; use font_context::FontContext; use render_context::RenderContext; use azure::azure_hl::{B8G8R8A8, Color, DrawTarget, SkiaBackend, StolenGLResources}; use azure::AzFloat; use geom::matrix2d::Matrix2D; use geom::point::Point2D; use geom::rect::Rect; use geom::size::Size2D; use layers::platform::surface::{NativeGraphicsMetadata, NativePaintingGraphicsContext}; use layers::platform::surface::{NativeSurface, NativeSurfaceMethods}; use layers::layers::{BufferRequest, LayerBuffer, LayerBufferSet}; use layers; use native::task::NativeTaskBuilder; use servo_msg::compositor_msg::{Epoch, IdleRenderState, LayerId}; use servo_msg::compositor_msg::{LayerMetadata, RenderListener, RenderingRenderState, ScrollPolicy}; use servo_msg::constellation_msg::{ConstellationChan, Failure, FailureMsg, PipelineId}; use servo_msg::constellation_msg::{RendererReadyMsg}; use servo_msg::platform::surface::NativeSurfaceAzureMethods; use servo_util::geometry::{Au, ZERO_POINT}; use servo_util::opts; use servo_util::smallvec::SmallVec; use servo_util::task::spawn_named_with_send_on_failure; use servo_util::task_state; use servo_util::time::{TimeProfilerChan, profile}; use servo_util::time; use std::comm::{Receiver, Sender, channel}; use std::mem; use std::task::TaskBuilder; use sync::Arc; /// Information about a hardware graphics layer that layout sends to the painting task. #[deriving(Clone)] pub struct RenderLayer { /// A per-pipeline ID describing this layer that should be stable across reflows. pub id: LayerId, /// The color of the background in this layer. Used for unrendered content. pub background_color: Color, /// The scrolling policy of this layer. pub scroll_policy: ScrollPolicy, } impl RenderLayer { /// Creates a new `RenderLayer`. pub fn new(id: LayerId, background_color: Color, scroll_policy: ScrollPolicy) -> RenderLayer { RenderLayer { id: id, background_color: background_color, scroll_policy: scroll_policy, } } } pub struct RenderRequest { pub buffer_requests: Vec<BufferRequest>, pub scale: f32, pub layer_id: LayerId, pub epoch: Epoch, } pub enum Msg { RenderInitMsg(Arc<StackingContext>), RenderMsg(Vec<RenderRequest>), UnusedBufferMsg(Vec<Box<LayerBuffer>>), PaintPermissionGranted, PaintPermissionRevoked, ExitMsg(Option<Sender<()>>), } #[deriving(Clone)] pub struct RenderChan(Sender<Msg>); impl RenderChan { pub fn new() -> (Receiver<Msg>, RenderChan) { let (chan, port) = channel(); (port, RenderChan(chan)) } pub fn send(&self, msg: Msg) { let &RenderChan(ref chan) = self; assert!(chan.send_opt(msg).is_ok(), "RenderChan.send: render port closed") } pub fn send_opt(&self, msg: Msg) -> Result<(), Msg> { let &RenderChan(ref chan) = self; chan.send_opt(msg) } } pub struct RenderTask<C> { id: PipelineId, port: Receiver<Msg>, compositor: C, constellation_chan: ConstellationChan, /// A channel to the time profiler. time_profiler_chan: TimeProfilerChan, /// The native graphics context. native_graphics_context: Option<NativePaintingGraphicsContext>, /// The root stacking context sent to us by the layout thread. root_stacking_context: Option<Arc<StackingContext>>, /// Permission to send paint messages to the compositor paint_permission: bool, /// A counter for epoch messages epoch: Epoch, /// A data structure to store unused LayerBuffers buffer_map: BufferMap, /// Communication handles to each of the worker threads. worker_threads: Vec<WorkerThreadProxy>, } // If we implement this as a function, we get borrowck errors from borrowing // the whole RenderTask struct. macro_rules! native_graphics_context( ($task:expr) => ( $task.native_graphics_context.as_ref().expect("Need a graphics context to do rendering") ) ) fn initialize_layers<C>(compositor: &mut C, pipeline_id: PipelineId, epoch: Epoch, root_stacking_context: &StackingContext) where C: RenderListener { let mut metadata = Vec::new(); build(&mut metadata, root_stacking_context, &ZERO_POINT); compositor.initialize_layers_for_pipeline(pipeline_id, metadata, epoch); fn build(metadata: &mut Vec<LayerMetadata>, stacking_context: &StackingContext, page_position: &Point2D<Au>) { let page_position = stacking_context.bounds.origin + *page_position; match stacking_context.layer { None => {} Some(ref render_layer) => { metadata.push(LayerMetadata { id: render_layer.id, position: Rect(Point2D(page_position.x.to_nearest_px() as uint, page_position.y.to_nearest_px() as uint), Size2D(stacking_context.bounds.size.width.to_nearest_px() as uint, stacking_context.bounds.size.height.to_nearest_px() as uint)), background_color: render_layer.background_color, scroll_policy: render_layer.scroll_policy, }) } } for kid in stacking_context.display_list.children.iter() { build(metadata, &**kid, &page_position) } }<|fim▁hole|>impl<C> RenderTask<C> where C: RenderListener + Send { pub fn create(id: PipelineId, port: Receiver<Msg>, compositor: C, constellation_chan: ConstellationChan, font_cache_task: FontCacheTask, failure_msg: Failure, time_profiler_chan: TimeProfilerChan, shutdown_chan: Sender<()>) { let ConstellationChan(c) = constellation_chan.clone(); spawn_named_with_send_on_failure("RenderTask", task_state::RENDER, proc() { { // Ensures that the render task and graphics context are destroyed before the // shutdown message. let mut compositor = compositor; let native_graphics_context = compositor.get_graphics_metadata().map( |md| NativePaintingGraphicsContext::from_metadata(&md)); let worker_threads = WorkerThreadProxy::spawn(compositor.get_graphics_metadata(), font_cache_task, time_profiler_chan.clone()); // FIXME: rust/#5967 let mut render_task = RenderTask { id: id, port: port, compositor: compositor, constellation_chan: constellation_chan, time_profiler_chan: time_profiler_chan, native_graphics_context: native_graphics_context, root_stacking_context: None, paint_permission: false, epoch: Epoch(0), buffer_map: BufferMap::new(10000000), worker_threads: worker_threads, }; render_task.start(); // Destroy all the buffers. match render_task.native_graphics_context.as_ref() { Some(ctx) => render_task.buffer_map.clear(ctx), None => (), } // Tell all the worker threads to shut down. for worker_thread in render_task.worker_threads.iter_mut() { worker_thread.exit() } } debug!("render_task: shutdown_chan send"); shutdown_chan.send(()); }, FailureMsg(failure_msg), c, true); } fn start(&mut self) { debug!("render_task: beginning rendering loop"); loop { match self.port.recv() { RenderInitMsg(stacking_context) => { self.epoch.next(); self.root_stacking_context = Some(stacking_context.clone()); if !self.paint_permission { debug!("render_task: render ready msg"); let ConstellationChan(ref mut c) = self.constellation_chan; c.send(RendererReadyMsg(self.id)); continue; } initialize_layers(&mut self.compositor, self.id, self.epoch, &*stacking_context); } RenderMsg(requests) => { if !self.paint_permission { debug!("render_task: render ready msg"); let ConstellationChan(ref mut c) = self.constellation_chan; c.send(RendererReadyMsg(self.id)); self.compositor.render_msg_discarded(); continue; } let mut replies = Vec::new(); self.compositor.set_render_state(self.id, RenderingRenderState); for RenderRequest { buffer_requests, scale, layer_id, epoch } in requests.into_iter() { if self.epoch == epoch { self.render(&mut replies, buffer_requests, scale, layer_id); } else { debug!("renderer epoch mismatch: {} != {}", self.epoch, epoch); } } self.compositor.set_render_state(self.id, IdleRenderState); debug!("render_task: returning surfaces"); self.compositor.paint(self.id, self.epoch, replies); } UnusedBufferMsg(unused_buffers) => { for buffer in unused_buffers.into_iter().rev() { self.buffer_map.insert(native_graphics_context!(self), buffer); } } PaintPermissionGranted => { self.paint_permission = true; match self.root_stacking_context { None => {} Some(ref stacking_context) => { self.epoch.next(); initialize_layers(&mut self.compositor, self.id, self.epoch, &**stacking_context); } } } PaintPermissionRevoked => { self.paint_permission = false; } ExitMsg(response_ch) => { debug!("render_task: exitmsg response send"); response_ch.map(|ch| ch.send(())); break; } } } } /// Retrieves an appropriately-sized layer buffer from the cache to match the requirements of /// the given tile, or creates one if a suitable one cannot be found. fn find_or_create_layer_buffer_for_tile(&mut self, tile: &BufferRequest, scale: f32) -> Option<Box<LayerBuffer>> { let width = tile.screen_rect.size.width; let height = tile.screen_rect.size.height; if opts::get().gpu_painting { return None } match self.buffer_map.find(tile.screen_rect.size) { Some(mut buffer) => { buffer.rect = tile.page_rect; buffer.screen_pos = tile.screen_rect; buffer.resolution = scale; buffer.native_surface.mark_wont_leak(); buffer.painted_with_cpu = true; buffer.content_age = tile.content_age; return Some(buffer) } None => {} } // Create an empty native surface. We mark it as not leaking // in case it dies in transit to the compositor task. let mut native_surface: NativeSurface = layers::platform::surface::NativeSurfaceMethods::new(native_graphics_context!(self), Size2D(width as i32, height as i32), width as i32 * 4); native_surface.mark_wont_leak(); Some(box LayerBuffer { native_surface: native_surface, rect: tile.page_rect, screen_pos: tile.screen_rect, resolution: scale, stride: (width * 4) as uint, painted_with_cpu: true, content_age: tile.content_age, }) } /// Renders one layer and sends the tiles back to the layer. fn render(&mut self, replies: &mut Vec<(LayerId, Box<LayerBufferSet>)>, mut tiles: Vec<BufferRequest>, scale: f32, layer_id: LayerId) { time::profile(time::PaintingCategory, None, self.time_profiler_chan.clone(), || { // Bail out if there is no appropriate stacking context. let stacking_context = match self.root_stacking_context { Some(ref stacking_context) => { match display_list::find_stacking_context_with_layer_id(stacking_context, layer_id) { Some(stacking_context) => stacking_context, None => return, } } None => return, }; // Divide up the layer into tiles and distribute them to workers via a simple round- // robin strategy. let tiles = mem::replace(&mut tiles, Vec::new()); let tile_count = tiles.len(); for (i, tile) in tiles.into_iter().enumerate() { let thread_id = i % self.worker_threads.len(); let layer_buffer = self.find_or_create_layer_buffer_for_tile(&tile, scale); self.worker_threads[thread_id].paint_tile(tile, layer_buffer, stacking_context.clone(), scale); } let new_buffers = Vec::from_fn(tile_count, |i| { let thread_id = i % self.worker_threads.len(); self.worker_threads[thread_id].get_painted_tile_buffer() }); let layer_buffer_set = box LayerBufferSet { buffers: new_buffers, }; replies.push((layer_id, layer_buffer_set)); }) } } struct WorkerThreadProxy { sender: Sender<MsgToWorkerThread>, receiver: Receiver<MsgFromWorkerThread>, } impl WorkerThreadProxy { fn spawn(native_graphics_metadata: Option<NativeGraphicsMetadata>, font_cache_task: FontCacheTask, time_profiler_chan: TimeProfilerChan) -> Vec<WorkerThreadProxy> { let thread_count = if opts::get().gpu_painting { 1 } else { opts::get().layout_threads }; Vec::from_fn(thread_count, |_| { let (from_worker_sender, from_worker_receiver) = channel(); let (to_worker_sender, to_worker_receiver) = channel(); let native_graphics_metadata = native_graphics_metadata.clone(); let font_cache_task = font_cache_task.clone(); let time_profiler_chan = time_profiler_chan.clone(); TaskBuilder::new().native().spawn(proc() { let mut worker_thread = WorkerThread::new(from_worker_sender, to_worker_receiver, native_graphics_metadata, font_cache_task, time_profiler_chan); worker_thread.main(); }); WorkerThreadProxy { receiver: from_worker_receiver, sender: to_worker_sender, } }) } fn paint_tile(&mut self, tile: BufferRequest, layer_buffer: Option<Box<LayerBuffer>>, stacking_context: Arc<StackingContext>, scale: f32) { self.sender.send(PaintTileMsgToWorkerThread(tile, layer_buffer, stacking_context, scale)) } fn get_painted_tile_buffer(&mut self) -> Box<LayerBuffer> { match self.receiver.recv() { PaintedTileMsgFromWorkerThread(layer_buffer) => layer_buffer, } } fn exit(&mut self) { self.sender.send(ExitMsgToWorkerThread) } } struct WorkerThread { sender: Sender<MsgFromWorkerThread>, receiver: Receiver<MsgToWorkerThread>, native_graphics_context: Option<NativePaintingGraphicsContext>, font_context: Box<FontContext>, time_profiler_sender: TimeProfilerChan, } impl WorkerThread { fn new(sender: Sender<MsgFromWorkerThread>, receiver: Receiver<MsgToWorkerThread>, native_graphics_metadata: Option<NativeGraphicsMetadata>, font_cache_task: FontCacheTask, time_profiler_sender: TimeProfilerChan) -> WorkerThread { WorkerThread { sender: sender, receiver: receiver, native_graphics_context: native_graphics_metadata.map(|metadata| { NativePaintingGraphicsContext::from_metadata(&metadata) }), font_context: box FontContext::new(font_cache_task.clone()), time_profiler_sender: time_profiler_sender, } } fn main(&mut self) { loop { match self.receiver.recv() { ExitMsgToWorkerThread => break, PaintTileMsgToWorkerThread(tile, layer_buffer, stacking_context, scale) => { let draw_target = self.optimize_and_paint_tile(&tile, stacking_context, scale); let buffer = self.create_layer_buffer_for_painted_tile(&tile, layer_buffer, draw_target, scale); self.sender.send(PaintedTileMsgFromWorkerThread(buffer)) } } } } fn optimize_and_paint_tile(&mut self, tile: &BufferRequest, stacking_context: Arc<StackingContext>, scale: f32) -> DrawTarget { let size = Size2D(tile.screen_rect.size.width as i32, tile.screen_rect.size.height as i32); let draw_target = if !opts::get().gpu_painting { DrawTarget::new(SkiaBackend, size, B8G8R8A8) } else { // FIXME(pcwalton): Cache the components of draw targets (texture color buffer, // renderbuffers) instead of recreating them. let draw_target = DrawTarget::new_with_fbo(SkiaBackend, native_graphics_context!(self), size, B8G8R8A8); draw_target.make_current(); draw_target }; { // Build the render context. let mut render_context = RenderContext { draw_target: &draw_target, font_ctx: &mut self.font_context, page_rect: tile.page_rect, screen_rect: tile.screen_rect, }; // Apply the translation to render the tile we want. let tile_bounds = tile.page_rect; let matrix: Matrix2D<AzFloat> = Matrix2D::identity(); let matrix = matrix.scale(scale as AzFloat, scale as AzFloat); let matrix = matrix.translate(-tile_bounds.origin.x as AzFloat, -tile_bounds.origin.y as AzFloat); render_context.draw_target.set_transform(&matrix); // Clear the buffer. render_context.clear(); // Draw the display list. profile(time::PaintingPerTileCategory, None, self.time_profiler_sender.clone(), || { let mut clip_stack = Vec::new(); stacking_context.optimize_and_draw_into_context(&mut render_context, &tile.page_rect, &matrix, &mut clip_stack); render_context.draw_target.flush(); }); } draw_target } fn create_layer_buffer_for_painted_tile(&mut self, tile: &BufferRequest, layer_buffer: Option<Box<LayerBuffer>>, draw_target: DrawTarget, scale: f32) -> Box<LayerBuffer> { // Extract the texture from the draw target and place it into its slot in the buffer. If // using CPU rendering, upload it first. // // FIXME(pcwalton): We should supply the texture and native surface *to* the draw target in // GPU rendering mode, so that it doesn't have to recreate it. if !opts::get().gpu_painting { let mut buffer = layer_buffer.unwrap(); draw_target.snapshot().get_data_surface().with_data(|data| { buffer.native_surface.upload(native_graphics_context!(self), data); debug!("painting worker thread uploading to native surface {:d}", buffer.native_surface.get_id() as int); }); return buffer } // GPU painting path: draw_target.make_current(); let StolenGLResources { surface: native_surface } = draw_target.steal_gl_resources().unwrap(); // We mark the native surface as not leaking in case the surfaces // die on their way to the compositor task. let mut native_surface: NativeSurface = NativeSurfaceAzureMethods::from_azure_surface(native_surface); native_surface.mark_wont_leak(); box LayerBuffer { native_surface: native_surface, rect: tile.page_rect, screen_pos: tile.screen_rect, resolution: scale, stride: (tile.screen_rect.size.width * 4) as uint, painted_with_cpu: false, content_age: tile.content_age, } } } enum MsgToWorkerThread { ExitMsgToWorkerThread, PaintTileMsgToWorkerThread(BufferRequest, Option<Box<LayerBuffer>>, Arc<StackingContext>, f32), } enum MsgFromWorkerThread { PaintedTileMsgFromWorkerThread(Box<LayerBuffer>), }<|fim▁end|>
}
<|file_name|>log.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-<|fim▁hole|>from abc import ABCMeta, abstractmethod from collections import namedtuple InternalEvent = namedtuple( 'InternalEvent', ('identifier', 'state_change_id', 'block_number', 'event_object'), ) # TODO: # - snapshots should be used to reduce the log file size class StateChangeLogSerializer(object): """ StateChangeLogSerializer An abstract class defining the serialization interface for the Transaction log. Allows for pluggable serializer backends. """ __metaclass__ = ABCMeta @abstractmethod def serialize(self, transaction): pass @abstractmethod def deserialize(self, data): pass class PickleTransactionSerializer(StateChangeLogSerializer): """ PickleTransactionSerializer A simple transaction serializer using pickle """ def serialize(self, transaction): # Some of our StateChange classes have __slots__ without having a __getstate__ # As seen in the SO question below: # http://stackoverflow.com/questions/2204155/why-am-i-getting-an-error-about-my-class-defining-slots-when-trying-to-pickl#2204702 # We can either add a __getstate__ to all of them or use the `-1` protocol and be # incompatible with ancient python version. Here I opt for the latter. return pickle.dumps(transaction, -1) def deserialize(self, data): return pickle.loads(data) class StateChangeLogStorageBackend(object): """ StateChangeLogStorageBackend An abstract class defining the storage backend for the transaction log. Allows for pluggable storage backends. """ __metaclass__ = ABCMeta @abstractmethod def write_state_change(self, data): pass @abstractmethod def write_state_snapshot(self, statechange_id, data): pass @abstractmethod def read(self): pass class StateChangeLogSQLiteBackend(StateChangeLogStorageBackend): def __init__(self, database_path): self.conn = sqlite3.connect(database_path) self.conn.text_factory = str self.conn.execute("PRAGMA foreign_keys=ON") cursor = self.conn.cursor() cursor.execute( 'CREATE TABLE IF NOT EXISTS state_changes (' ' id integer primary key autoincrement, data binary' ')' ) cursor.execute( 'CREATE TABLE IF NOT EXISTS state_snapshot (' 'identifier integer primary key, statechange_id integer, data binary, ' 'FOREIGN KEY(statechange_id) REFERENCES state_changes(id)' ')' ) cursor.execute( 'CREATE TABLE IF NOT EXISTS state_events (' 'identifier integer primary key, source_statechange_id integer NOT NULL, ' 'block_number integer NOT NULL, data binary, ' 'FOREIGN KEY(source_statechange_id) REFERENCES state_changes(id)' ')' ) self.conn.commit() self.sanity_check() # When writting to a table where the primary key is the identifier and we want # to return said identifier we use cursor.lastrowid, which uses sqlite's last_insert_rowid # https://github.com/python/cpython/blob/2.7/Modules/_sqlite/cursor.c#L727-L732 # # According to the documentation (http://www.sqlite.org/c3ref/last_insert_rowid.html) # if a different thread tries to use the same connection to write into the table # while we query the last_insert_rowid, the result is unpredictable. For that reason # we have this write lock here. # # TODO (If possible): # Improve on this and find a better way to protect against this potential race # condition. self.write_lock = threading.Lock() def sanity_check(self): """ Ensures that NUL character can be safely inserted and recovered from the database. http://bugs.python.org/issue13676 """ data = '\x00a' self.conn.execute( 'INSERT INTO state_changes (id, data) VALUES (null,?)', (data, ), ) result = next(self.conn.execute('SELECT data FROM state_changes ORDER BY id DESC')) if result[0] != data: raise RuntimeError( 'Database cannot save NUL character, ensure python is at least 2.7.3' ) self.conn.rollback() def write_state_change(self, data): with self.write_lock: cursor = self.conn.cursor() cursor.execute( 'INSERT INTO state_changes(id, data) VALUES(null,?)', (data,) ) last_id = cursor.lastrowid self.conn.commit() return last_id def write_state_snapshot(self, statechange_id, data): # TODO: Snapshotting is not yet implemented. This is just skeleton code # Issue: https://github.com/raiden-network/raiden/issues/593 # This skeleton code assumes we only keep a single snapshot and overwrite it each time. with self.write_lock: cursor = self.conn.cursor() cursor.execute( 'INSERT OR REPLACE INTO state_snapshot(' 'identifier, statechange_id, data) VALUES(?,?,?)', (1, statechange_id, data) ) last_id = cursor.lastrowid self.conn.commit() return last_id def write_state_events(self, statechange_id, events_data): """Do an 'execute_many' write of state events. `events_data` should be a list of tuples of the form: (None, source_statechange_id, block_number, serialized_event_data) """ cursor = self.conn.cursor() cursor.executemany( 'INSERT INTO state_events(' 'identifier, source_statechange_id, block_number, data) VALUES(?,?,?,?)', events_data ) self.conn.commit() def get_state_snapshot(self): """ Return the last state snapshot as a tuple of (state_change_id, data)""" cursor = self.conn.cursor() result = cursor.execute('SELECT * from state_snapshot') result = result.fetchall() if result == list(): return None assert len(result) == 1 return (result[0][1], result[0][2]) def get_state_change_by_id(self, identifier): cursor = self.conn.cursor() result = cursor.execute( 'SELECT data from state_changes where id=?', (identifier,) ) result = result.fetchall() if result != list(): assert len(result) == 1 result = result[0][0] return result def get_events_in_range(self, from_block, to_block): cursor = self.conn.cursor() if from_block is None: from_block = 0 if to_block is None: result = cursor.execute( 'SELECT * from state_events WHERE block_number >= ?', (from_block,) ) else: result = cursor.execute( 'SELECT * from state_events WHERE block_number ' 'BETWEEN ? AND ?', (from_block, to_block) ) result = result.fetchall() return result def read(self): pass def __del__(self): self.conn.close() class StateChangeLog(object): def __init__( self, storage_instance, serializer_instance=PickleTransactionSerializer()): if not isinstance(serializer_instance, StateChangeLogSerializer): raise ValueError( 'serializer_instance must follow the StateChangeLogSerializer interface' ) self.serializer = serializer_instance if not isinstance(storage_instance, StateChangeLogStorageBackend): raise ValueError( 'storage_instance must follow the StateChangeLogStorageBackend interface' ) self.storage = storage_instance def log(self, state_change): """ Log a state change and return its identifier""" # TODO: Issue 587 # Implement a queue of state changes for batch writting serialized_data = self.serializer.serialize(state_change) return self.storage.write_state_change(serialized_data) def log_events(self, state_change_id, events, current_block_number): """ Log the events that were generated by `state_change_id` into the write ahead Log """ assert isinstance(events, list) self.storage.write_state_events( state_change_id, [(None, state_change_id, current_block_number, self.serializer.serialize(event)) for event in events] ) def get_events_in_block_range(self, from_block, to_block): """Get the raiden events in the period (inclusive) ranging from `from_block` to `to_block`. This function returns a list of tuples of the form: (identifier, generated_statechange_id, block_number, event_object) """ results = self.storage.get_events_in_range(from_block, to_block) return [ InternalEvent(res[0], res[1], res[2], self.serializer.deserialize(res[3])) for res in results ] def get_state_change_by_id(self, identifier): serialized_data = self.storage.get_state_change_by_id(identifier) return self.serializer.deserialize(serialized_data) def snapshot(self, state_change_id, state): serialized_data = self.serializer.serialize(state) self.storage.write_state_snapshot(state_change_id, serialized_data)<|fim▁end|>
import pickle import sqlite3 import threading
<|file_name|>opengl-graphics.cpp<|end_file_name|><|fim▁begin|>/* Cabal - Legacy Game Implementations * * Cabal is the legal property of its developers, whose names * are too numerous to list here. Please refer to the COPYRIGHT * file distributed with this source distribution. * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. * */ // Based on the ScummVM (GPLv2+) file of the same name #include "backends/graphics/opengl/opengl-graphics.h" #include "backends/graphics/opengl/texture.h" #include "backends/graphics/opengl/debug.h" #include "backends/graphics/opengl/extensions.h" #include "common/textconsole.h" #include "common/translation.h" #include "common/algorithm.h" #include "common/file.h" #ifdef USE_OSD #include "common/tokenizer.h" #include "common/rect.h" #endif #include "graphics/conversion.h" #ifdef USE_OSD #include "graphics/fontman.h" #include "graphics/font.h" #endif namespace OpenGL { OpenGLGraphicsManager::OpenGLGraphicsManager() : _currentState(), _oldState(), _transactionMode(kTransactionNone), _screenChangeID(1 << (sizeof(int) * 8 - 2)), _outputScreenWidth(0), _outputScreenHeight(0), _displayX(0), _displayY(0), _displayWidth(0), _displayHeight(0), _defaultFormat(), _defaultFormatAlpha(), _gameScreen(nullptr), _gameScreenShakeOffset(0), _overlay(nullptr), _overlayVisible(false), _cursor(nullptr), _cursorX(0), _cursorY(0), _cursorDisplayX(0),_cursorDisplayY(0), _cursorHotspotX(0), _cursorHotspotY(0), _cursorHotspotXScaled(0), _cursorHotspotYScaled(0), _cursorWidthScaled(0), _cursorHeightScaled(0), _cursorKeyColor(0), _cursorVisible(false), _cursorDontScale(false), _cursorPaletteEnabled(false), _forceRedraw(false), _scissorOverride(3) #ifdef USE_OSD , _osdAlpha(0), _osdFadeStartTime(0), _osd(nullptr) #endif { memset(_gamePalette, 0, sizeof(_gamePalette)); } OpenGLGraphicsManager::~OpenGLGraphicsManager() { delete _gameScreen; delete _overlay; delete _cursor; #ifdef USE_OSD delete _osd; #endif } bool OpenGLGraphicsManager::hasFeature(OSystem::Feature f) { switch (f) { case OSystem::kFeatureAspectRatioCorrection: case OSystem::kFeatureCursorPalette: return true; case OSystem::kFeatureOverlaySupportsAlpha: return _defaultFormatAlpha.aBits() > 3; default: return false; } } void OpenGLGraphicsManager::setFeatureState(OSystem::Feature f, bool enable) { switch (f) { case OSystem::kFeatureAspectRatioCorrection:<|fim▁hole|> break; case OSystem::kFeatureCursorPalette: _cursorPaletteEnabled = enable; updateCursorPalette(); break; default: break; } } bool OpenGLGraphicsManager::getFeatureState(OSystem::Feature f) { switch (f) { case OSystem::kFeatureAspectRatioCorrection: return _currentState.aspectRatioCorrection; case OSystem::kFeatureCursorPalette: return _cursorPaletteEnabled; default: return false; } } namespace { const OSystem::GraphicsMode glGraphicsModes[] = { { "opengl_linear", _s("OpenGL"), GFX_LINEAR }, { "opengl_nearest", _s("OpenGL (No filtering)"), GFX_NEAREST }, { nullptr, nullptr, 0 } }; } // End of anonymous namespace const OSystem::GraphicsMode *OpenGLGraphicsManager::getSupportedGraphicsModes() const { return glGraphicsModes; } int OpenGLGraphicsManager::getDefaultGraphicsMode() const { return GFX_LINEAR; } bool OpenGLGraphicsManager::setGraphicsMode(int mode) { assert(_transactionMode != kTransactionNone); switch (mode) { case GFX_LINEAR: case GFX_NEAREST: _currentState.graphicsMode = mode; if (_gameScreen) { _gameScreen->enableLinearFiltering(mode == GFX_LINEAR); } if (_cursor) { _cursor->enableLinearFiltering(mode == GFX_LINEAR); } return true; default: warning("OpenGLGraphicsManager::setGraphicsMode(%d): Unknown graphics mode", mode); return false; } } int OpenGLGraphicsManager::getGraphicsMode() const { return _currentState.graphicsMode; } Graphics::PixelFormat OpenGLGraphicsManager::getScreenFormat() const { return _currentState.gameFormat; } void OpenGLGraphicsManager::beginGFXTransaction() { assert(_transactionMode == kTransactionNone); // Start a transaction. _oldState = _currentState; _transactionMode = kTransactionActive; } OSystem::TransactionError OpenGLGraphicsManager::endGFXTransaction() { assert(_transactionMode == kTransactionActive); uint transactionError = OSystem::kTransactionSuccess; bool setupNewGameScreen = false; if ( _oldState.gameWidth != _currentState.gameWidth || _oldState.gameHeight != _currentState.gameHeight) { setupNewGameScreen = true; } if (_oldState.gameFormat != _currentState.gameFormat) { setupNewGameScreen = true; } // Check whether the requested format can actually be used. Common::List<Graphics::PixelFormat> supportedFormats = getSupportedFormats(); // In case the requested format is not usable we will fall back to CLUT8. if (Common::find(supportedFormats.begin(), supportedFormats.end(), _currentState.gameFormat) == supportedFormats.end()) { _currentState.gameFormat = Graphics::PixelFormat::createFormatCLUT8(); transactionError |= OSystem::kTransactionFormatNotSupported; } do { uint requestedWidth = _currentState.gameWidth; uint requestedHeight = _currentState.gameHeight; const uint desiredAspect = getDesiredGameScreenAspect(); requestedHeight = intToFrac(requestedWidth) / desiredAspect; if (!loadVideoMode(requestedWidth, requestedHeight, _currentState.gameFormat) // HACK: This is really nasty but we don't have any guarantees of // a context existing before, which means we don't know the maximum // supported texture size before this. Thus, we check whether the // requested game resolution is supported over here. || ( _currentState.gameWidth > (uint)Texture::getMaximumTextureSize() || _currentState.gameHeight > (uint)Texture::getMaximumTextureSize())) { if (_transactionMode == kTransactionActive) { // Try to setup the old state in case its valid and is // actually different from the new one. if (_oldState.valid && _oldState != _currentState) { // Give some hints on what failed to set up. if ( _oldState.gameWidth != _currentState.gameWidth || _oldState.gameHeight != _currentState.gameHeight) { transactionError |= OSystem::kTransactionSizeChangeFailed; } if (_oldState.gameFormat != _currentState.gameFormat) { transactionError |= OSystem::kTransactionFormatNotSupported; } if (_oldState.aspectRatioCorrection != _currentState.aspectRatioCorrection) { transactionError |= OSystem::kTransactionAspectRatioFailed; } if (_oldState.graphicsMode != _currentState.graphicsMode) { transactionError |= OSystem::kTransactionModeSwitchFailed; } // Roll back to the old state. _currentState = _oldState; _transactionMode = kTransactionRollback; // Try to set up the old state. continue; } } // DON'T use error(), as this tries to bring up the debug // console, which WON'T WORK now that we might no have a // proper screen. warning("OpenGLGraphicsManager::endGFXTransaction: Could not load any graphics mode!"); g_system->quit(); } // In case we reach this we have a valid state, yay. _transactionMode = kTransactionNone; _currentState.valid = true; } while (_transactionMode == kTransactionRollback); if (setupNewGameScreen) { delete _gameScreen; _gameScreen = nullptr; _gameScreen = createTexture(_currentState.gameFormat); assert(_gameScreen); if (_gameScreen->hasPalette()) { _gameScreen->setPalette(0, 256, _gamePalette); } _gameScreen->allocate(_currentState.gameWidth, _currentState.gameHeight); _gameScreen->enableLinearFiltering(_currentState.graphicsMode == GFX_LINEAR); // We fill the screen to all black or index 0 for CLUT8. if (_currentState.gameFormat.bytesPerPixel == 1) { _gameScreen->fill(0); } else { _gameScreen->fill(_gameScreen->getSurface()->getFormat().RGBToColor(0, 0, 0)); } } // Update our display area and cursor scaling. This makes sure we pick up // aspect ratio correction and game screen changes correctly. recalculateDisplayArea(); recalculateCursorScaling(); // Something changed, so update the screen change ID. ++_screenChangeID; // Since transactionError is a ORd list of TransactionErrors this is // clearly wrong. But our API is simply broken. return (OSystem::TransactionError)transactionError; } int OpenGLGraphicsManager::getScreenChangeID() const { return _screenChangeID; } void OpenGLGraphicsManager::initSize(uint width, uint height, const Graphics::PixelFormat *format) { Graphics::PixelFormat requestedFormat; if (!format) { requestedFormat = Graphics::PixelFormat::createFormatCLUT8(); } else { requestedFormat = *format; } _currentState.gameFormat = requestedFormat; _currentState.gameWidth = width; _currentState.gameHeight = height; } int16 OpenGLGraphicsManager::getWidth() { return _currentState.gameWidth; } int16 OpenGLGraphicsManager::getHeight() { return _currentState.gameHeight; } void OpenGLGraphicsManager::copyRectToScreen(const void *buf, int pitch, int x, int y, int w, int h) { _gameScreen->copyRectToTexture(x, y, w, h, buf, pitch); } void OpenGLGraphicsManager::fillScreen(uint32 col) { // FIXME: This does not conform to the OSystem specs because fillScreen // is always taking CLUT8 color values and use color indexed mode. This is, // however, plain odd and probably was a forgotten when we introduced // RGB support. Thus, we simply do the "sane" thing here and hope OSystem // gets fixed one day. _gameScreen->fill(col); } void OpenGLGraphicsManager::setShakePos(int shakeOffset) { if (_gameScreenShakeOffset != shakeOffset) { _gameScreenShakeOffset = shakeOffset; _forceRedraw = true; } } void OpenGLGraphicsManager::updateScreen() { if (!_gameScreen) { return; } // We only update the screen when there actually have been any changes. if ( !_forceRedraw && !_gameScreen->isDirty() && !(_overlayVisible && _overlay->isDirty()) && !(_cursorVisible && _cursor && _cursor->isDirty()) && _osdAlpha == 0) { return; } _forceRedraw = false; // Clear the screen buffer. if (_scissorOverride && !_overlayVisible) { // In certain cases we need to assure that the whole screen area is // cleared. For example, when switching from overlay visible to // invisible, we need to assure that all contents are cleared to // properly remove all overlay contents. GLCALL(glDisable(GL_SCISSOR_TEST)); GLCALL(glClear(GL_COLOR_BUFFER_BIT)); GLCALL(glEnable(GL_SCISSOR_TEST)); --_scissorOverride; } else { GLCALL(glClear(GL_COLOR_BUFFER_BIT)); } const GLfloat shakeOffset = _gameScreenShakeOffset * (GLfloat)_displayHeight / _gameScreen->getHeight(); // First step: Draw the (virtual) game screen. _gameScreen->draw(_displayX, _displayY + shakeOffset, _displayWidth, _displayHeight); // Second step: Draw the overlay if visible. if (_overlayVisible) { _overlay->draw(0, 0, _outputScreenWidth, _outputScreenHeight); } // Third step: Draw the cursor if visible. if (_cursorVisible && _cursor) { // Adjust game screen shake position, but only when the overlay is not // visible. const GLfloat cursorOffset = _overlayVisible ? 0 : shakeOffset; _cursor->draw(_cursorDisplayX - _cursorHotspotXScaled, _cursorDisplayY - _cursorHotspotYScaled + cursorOffset, _cursorWidthScaled, _cursorHeightScaled); } #ifdef USE_OSD // Fourth step: Draw the OSD. if (_osdAlpha > 0) { Common::StackLock lock(_osdMutex); // Update alpha value. const int diff = g_system->getMillis() - _osdFadeStartTime; if (diff > 0) { if (diff >= kOSDFadeOutDuration) { // Back to full transparency. _osdAlpha = 0; } else { // Do a fade out. _osdAlpha = kOSDInitialAlpha - diff * kOSDInitialAlpha / kOSDFadeOutDuration; } } // Set the OSD transparency. GLCALL(glColor4f(1.0f, 1.0f, 1.0f, _osdAlpha / 100.0f)); // Draw the OSD texture. _osd->draw(0, 0, _outputScreenWidth, _outputScreenHeight); // Reset color. GLCALL(glColor4f(1.0f, 1.0f, 1.0f, 1.0f)); } #endif refreshScreen(); } Graphics::Surface *OpenGLGraphicsManager::lockScreen() { return _gameScreen->getSurface(); } void OpenGLGraphicsManager::unlockScreen() { _gameScreen->flagDirty(); } void OpenGLGraphicsManager::setFocusRectangle(const Common::Rect& rect) { } void OpenGLGraphicsManager::clearFocusRectangle() { } int16 OpenGLGraphicsManager::getOverlayWidth() { if (_overlay) { return _overlay->getWidth(); } else { return 0; } } int16 OpenGLGraphicsManager::getOverlayHeight() { if (_overlay) { return _overlay->getHeight(); } else { return 0; } } void OpenGLGraphicsManager::showOverlay() { _overlayVisible = true; _forceRedraw = true; // Allow drawing inside full screen area. GLCALL(glDisable(GL_SCISSOR_TEST)); // Update cursor position. setMousePosition(_cursorX, _cursorY); } void OpenGLGraphicsManager::hideOverlay() { _overlayVisible = false; _forceRedraw = true; // Limit drawing to screen area. GLCALL(glEnable(GL_SCISSOR_TEST)); _scissorOverride = 3; // Update cursor position. setMousePosition(_cursorX, _cursorY); } Graphics::PixelFormat OpenGLGraphicsManager::getOverlayFormat() const { return _overlay->getFormat(); } void OpenGLGraphicsManager::copyRectToOverlay(const void *buf, int pitch, int x, int y, int w, int h) { _overlay->copyRectToTexture(x, y, w, h, buf, pitch); } void OpenGLGraphicsManager::clearOverlay() { _overlay->fill(0); } void OpenGLGraphicsManager::grabOverlay(void *buf, int pitch) { const Graphics::Surface *overlayData = _overlay->getSurface(); const byte *src = (const byte *)overlayData->getPixels(); byte *dst = (byte *)buf; for (uint h = overlayData->getHeight(); h > 0; --h) { memcpy(dst, src, overlayData->getWidth() * overlayData->getFormat().bytesPerPixel); dst += pitch; src += overlayData->getPitch(); } } bool OpenGLGraphicsManager::showMouse(bool visible) { // In case the mouse cursor visibility changed we need to redraw the whole // screen even when nothing else changed. if (_cursorVisible != visible) { _forceRedraw = true; } bool last = _cursorVisible; _cursorVisible = visible; return last; } void OpenGLGraphicsManager::warpMouse(int x, int y) { int16 currentX = _cursorX; int16 currentY = _cursorY; adjustMousePosition(currentX, currentY); // Check whether the (virtual) coordinate actually changed. If not, then // simply do nothing. This avoids ugly "jittering" due to the actual // output screen having a bigger resolution than the virtual coordinates. if (currentX == x && currentY == y) { return; } // Scale the virtual coordinates into actual physical coordinates. if (_overlayVisible) { if (!_overlay) { return; } // It might be confusing that we actually have to handle something // here when the overlay is visible. This is because for very small // resolutions we have a minimal overlay size and have to adjust // for that. x = (x * _outputScreenWidth) / _overlay->getWidth(); y = (y * _outputScreenHeight) / _overlay->getHeight(); } else { if (!_gameScreen) { return; } x = (x * _outputScreenWidth) / _gameScreen->getWidth(); y = (y * _outputScreenHeight) / _gameScreen->getHeight(); } setMousePosition(x, y); setInternalMousePosition(x, y); } namespace { template<typename DstPixel, typename SrcPixel> void applyColorKey(DstPixel *dst, const SrcPixel *src, uint w, uint h, uint dstPitch, uint srcPitch, SrcPixel keyColor, DstPixel alphaMask) { const uint srcAdd = srcPitch - w * sizeof(SrcPixel); const uint dstAdd = dstPitch - w * sizeof(DstPixel); while (h-- > 0) { for (uint x = w; x > 0; --x, ++dst, ++src) { if (*src == keyColor) { *dst &= ~alphaMask; } } dst = (DstPixel *)((byte *)dst + dstAdd); src = (const SrcPixel *)((const byte *)src + srcAdd); } } } // End of anonymous namespace void OpenGLGraphicsManager::setMouseCursor(const void *buf, uint w, uint h, int hotspotX, int hotspotY, uint32 keycolor, bool dontScale, const Graphics::PixelFormat *format) { Graphics::PixelFormat inputFormat; if (format) { inputFormat = *format; } else { inputFormat = Graphics::PixelFormat::createFormatCLUT8(); } // In case the color format has changed we will need to create the texture. if (!_cursor || _cursor->getFormat() != inputFormat) { delete _cursor; _cursor = nullptr; GLenum glIntFormat, glFormat, glType; Graphics::PixelFormat textureFormat; if (inputFormat.bytesPerPixel == 1 || (inputFormat.aBits() && getGLPixelFormat(inputFormat, glIntFormat, glFormat, glType))) { // There is two cases when we can use the cursor format directly. // The first is when it's CLUT8, here color key handling can // always be applied because we use the alpha channel of // _defaultFormatAlpha for that. // The other is when the input format has alpha bits and // furthermore is directly supported. textureFormat = inputFormat; } else { textureFormat = _defaultFormatAlpha; } _cursor = createTexture(textureFormat, true); assert(_cursor); _cursor->enableLinearFiltering(_currentState.graphicsMode == GFX_LINEAR); } _cursorKeyColor = keycolor; _cursorHotspotX = hotspotX; _cursorHotspotY = hotspotY; _cursorDontScale = dontScale; _cursor->allocate(w, h); if (inputFormat.bytesPerPixel == 1) { // For CLUT8 cursors we can simply copy the input data into the // texture. _cursor->copyRectToTexture(0, 0, w, h, buf, w * inputFormat.bytesPerPixel); } else { // Otherwise it is a bit more ugly because we have to handle a key // color properly. Graphics::Surface *dst = _cursor->getSurface(); const uint srcPitch = w * inputFormat.bytesPerPixel; // Copy the cursor data to the actual texture surface. This will make // sure that the data is also converted to the expected format. Graphics::crossBlit((byte *)dst->getPixels(), (const byte *)buf, dst->getPitch(), srcPitch, w, h, dst->getFormat(), inputFormat); // We apply the color key by setting the alpha bits of the pixels to // fully transparent. const uint32 aMask = (0xFF >> dst->getFormat().aLoss) << dst->getFormat().aShift; if (dst->getFormat().bytesPerPixel == 2) { if (inputFormat.bytesPerPixel == 2) { applyColorKey<uint16, uint16>((uint16 *)dst->getPixels(), (const uint16 *)buf, w, h, dst->getPitch(), srcPitch, keycolor, aMask); } else if (inputFormat.bytesPerPixel == 4) { applyColorKey<uint16, uint32>((uint16 *)dst->getPixels(), (const uint32 *)buf, w, h, dst->getPitch(), srcPitch, keycolor, aMask); } } else { if (inputFormat.bytesPerPixel == 2) { applyColorKey<uint32, uint16>((uint32 *)dst->getPixels(), (const uint16 *)buf, w, h, dst->getPitch(), srcPitch, keycolor, aMask); } else if (inputFormat.bytesPerPixel == 4) { applyColorKey<uint32, uint32>((uint32 *)dst->getPixels(), (const uint32 *)buf, w, h, dst->getPitch(), srcPitch, keycolor, aMask); } } // Flag the texture as dirty. _cursor->flagDirty(); } // In case we actually use a palette set that up properly. if (inputFormat.bytesPerPixel == 1) { updateCursorPalette(); } // Update the scaling. recalculateCursorScaling(); } void OpenGLGraphicsManager::setCursorPalette(const byte *colors, uint start, uint num) { // FIXME: For some reason client code assumes that usage of this function // automatically enables the cursor palette. _cursorPaletteEnabled = true; memcpy(_cursorPalette + start * 3, colors, num * 3); updateCursorPalette(); } void OpenGLGraphicsManager::displayMessageOnOSD(const char *msg) { #ifdef USE_OSD // HACK: Actually no client code should use graphics functions from // another thread. But the MT-32 emulator still does, thus we need to // make sure this doesn't happen while a updateScreen call is done. Common::StackLock lock(_osdMutex); // Slip up the lines. Common::Array<Common::String> osdLines; Common::StringTokenizer tokenizer(msg, "\n"); while (!tokenizer.empty()) { osdLines.push_back(tokenizer.nextToken()); } // Do the actual drawing like the SDL backend. const Graphics::Font *font = getFontOSD(); Graphics::Surface *dst = _osd->getSurface(); _osd->fill(0); _osd->flagDirty(); // Determine a rect which would contain the message string (clipped to the // screen dimensions). const int vOffset = 6; const int lineSpacing = 1; const int lineHeight = font->getFontHeight() + 2 * lineSpacing; int width = 0; int height = lineHeight * osdLines.size() + 2 * vOffset; for (uint i = 0; i < osdLines.size(); i++) { width = MAX(width, font->getStringWidth(osdLines[i]) + 14); } // Clip the rect width = MIN<int>(width, dst->getWidth()); height = MIN<int>(height, dst->getHeight()); int dstX = (dst->getWidth() - width) / 2; int dstY = (dst->getHeight() - height) / 2; // Draw a dark gray rect. const uint32 color = dst->getFormat().RGBToColor(40, 40, 40); dst->fillRect(Common::Rect(dstX, dstY, dstX + width, dstY + height), color); // Render the message, centered, and in white const uint32 white = dst->getFormat().RGBToColor(255, 255, 255); for (uint i = 0; i < osdLines.size(); ++i) { font->drawString(dst, osdLines[i], dstX, dstY + i * lineHeight + vOffset + lineSpacing, width, white, Graphics::kTextAlignCenter); } // Init the OSD display parameters. _osdAlpha = kOSDInitialAlpha; _osdFadeStartTime = g_system->getMillis() + kOSDFadeOutDelay; #endif } void OpenGLGraphicsManager::setPalette(const byte *colors, uint start, uint num) { assert(_gameScreen->hasPalette()); memcpy(_gamePalette + start * 3, colors, num * 3); _gameScreen->setPalette(start, num, colors); // We might need to update the cursor palette here. updateCursorPalette(); } void OpenGLGraphicsManager::grabPalette(byte *colors, uint start, uint num) { assert(_gameScreen->hasPalette()); memcpy(colors, _gamePalette + start * 3, num * 3); } void OpenGLGraphicsManager::setActualScreenSize(uint width, uint height) { _outputScreenWidth = width; _outputScreenHeight = height; // Setup coordinates system. GLCALL(glViewport(0, 0, _outputScreenWidth, _outputScreenHeight)); GLCALL(glMatrixMode(GL_PROJECTION)); GLCALL(glLoadIdentity()); #ifdef USE_GLES GLCALL(glOrthof(0, _outputScreenWidth, _outputScreenHeight, 0, -1, 1)); #else GLCALL(glOrtho(0, _outputScreenWidth, _outputScreenHeight, 0, -1, 1)); #endif GLCALL(glMatrixMode(GL_MODELVIEW)); GLCALL(glLoadIdentity()); uint overlayWidth = width; uint overlayHeight = height; // WORKAROUND: We can only support surfaces up to the maximum supported // texture size. Thus, in case we encounter a physical size bigger than // this maximum texture size we will simply use an overlay as big as // possible and then scale it to the physical display size. This sounds // bad but actually all recent chips should support full HD resolution // anyway. Thus, it should not be a real issue for modern hardware. if ( overlayWidth > (uint)Texture::getMaximumTextureSize() || overlayHeight > (uint)Texture::getMaximumTextureSize()) { const frac_t outputAspect = intToFrac(_outputScreenWidth) / _outputScreenHeight; if (outputAspect > (frac_t)FRAC_ONE) { overlayWidth = Texture::getMaximumTextureSize(); overlayHeight = intToFrac(overlayWidth) / outputAspect; } else { overlayHeight = Texture::getMaximumTextureSize(); overlayWidth = fracToInt(overlayHeight * outputAspect); } } // HACK: We limit the minimal overlay size to 256x200, which is the // minimum of the dimensions of the two resolutions 256x240 (NES) and // 320x200 (many DOS games use this). This hopefully assure that our // GUI has working layouts. overlayWidth = MAX<uint>(overlayWidth, 256); overlayHeight = MAX<uint>(overlayHeight, 200); if (!_overlay || _overlay->getFormat() != _defaultFormatAlpha) { delete _overlay; _overlay = nullptr; _overlay = createTexture(_defaultFormatAlpha); assert(_overlay); // We always filter the overlay with GL_LINEAR. This assures it's // readable in case it needs to be scaled and does not affect it // otherwise. _overlay->enableLinearFiltering(true); } _overlay->allocate(overlayWidth, overlayHeight); _overlay->fill(0); #ifdef USE_OSD if (!_osd || _osd->getFormat() != _defaultFormatAlpha) { delete _osd; _osd = nullptr; _osd = createTexture(_defaultFormatAlpha); assert(_osd); // We always filter the osd with GL_LINEAR. This assures it's // readable in case it needs to be scaled and does not affect it // otherwise. _osd->enableLinearFiltering(true); } _osd->allocate(_overlay->getWidth(), _overlay->getHeight()); _osd->fill(0); #endif // Re-setup the scaling for the screen and cursor recalculateDisplayArea(); recalculateCursorScaling(); // Something changed, so update the screen change ID. ++_screenChangeID; } void OpenGLGraphicsManager::notifyContextCreate(const Graphics::PixelFormat &defaultFormat, const Graphics::PixelFormat &defaultFormatAlpha) { // Initialize all extensions. initializeGLExtensions(); // Disable 3D properties. GLCALL(glDisable(GL_CULL_FACE)); GLCALL(glDisable(GL_DEPTH_TEST)); GLCALL(glDisable(GL_LIGHTING)); GLCALL(glDisable(GL_FOG)); GLCALL(glDisable(GL_DITHER)); GLCALL(glShadeModel(GL_FLAT)); GLCALL(glHint(GL_PERSPECTIVE_CORRECTION_HINT, GL_FASTEST)); // Default to black as clear color. GLCALL(glClearColor(0.0f, 0.0f, 0.0f, 0.0f)); GLCALL(glColor4f(1.0f, 1.0f, 1.0f, 1.0f)); // Setup alpha blend (for overlay and cursor). GLCALL(glEnable(GL_BLEND)); GLCALL(glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)); // Enable rendering with vertex and coord arrays. GLCALL(glEnableClientState(GL_VERTEX_ARRAY)); GLCALL(glEnableClientState(GL_TEXTURE_COORD_ARRAY)); GLCALL(glEnable(GL_TEXTURE_2D)); // Setup scissor state accordingly. if (_overlayVisible) { GLCALL(glDisable(GL_SCISSOR_TEST)); } else { GLCALL(glEnable(GL_SCISSOR_TEST)); } // Clear the whole screen for the first three frames to assure any // leftovers are cleared. _scissorOverride = 3; // We use a "pack" alignment (when reading from textures) to 4 here, // since the only place where we really use it is the BMP screenshot // code and that requires the same alignment too. GLCALL(glPixelStorei(GL_PACK_ALIGNMENT, 4)); // Query information needed by textures. Texture::queryTextureInformation(); // Refresh the output screen dimensions if some are set up. if (_outputScreenWidth != 0 && _outputScreenHeight != 0) { setActualScreenSize(_outputScreenWidth, _outputScreenHeight); } // TODO: Should we try to convert textures into one of those formats if // possible? For example, when _gameScreen is CLUT8 we might want to use // defaultFormat now. _defaultFormat = defaultFormat; _defaultFormatAlpha = defaultFormatAlpha; if (_gameScreen) { _gameScreen->recreateInternalTexture(); } if (_overlay) { _overlay->recreateInternalTexture(); } if (_cursor) { _cursor->recreateInternalTexture(); } #ifdef USE_OSD if (_osd) { _osd->recreateInternalTexture(); } #endif } void OpenGLGraphicsManager::notifyContextDestroy() { if (_gameScreen) { _gameScreen->releaseInternalTexture(); } if (_overlay) { _overlay->releaseInternalTexture(); } if (_cursor) { _cursor->releaseInternalTexture(); } #ifdef USE_OSD if (_osd) { _osd->releaseInternalTexture(); } #endif } void OpenGLGraphicsManager::adjustMousePosition(int16 &x, int16 &y) { if (_overlayVisible) { // It might be confusing that we actually have to handle something // here when the overlay is visible. This is because for very small // resolutions we have a minimal overlay size and have to adjust // for that. // This can also happen when the overlay is smaller than the actual // display size because of texture size limitations. if (_overlay) { x = (x * _overlay->getWidth()) / _outputScreenWidth; y = (y * _overlay->getHeight()) / _outputScreenHeight; } } else if (_gameScreen) { const int16 width = _gameScreen->getWidth(); const int16 height = _gameScreen->getHeight(); x = (x * width) / (int)_outputScreenWidth; y = (y * height) / (int)_outputScreenHeight; } } void OpenGLGraphicsManager::setMousePosition(int x, int y) { // Whenever the mouse position changed we force a screen redraw to reflect // changes properly. if (_cursorX != x || _cursorY != y) { _forceRedraw = true; } _cursorX = x; _cursorY = y; if (_overlayVisible) { _cursorDisplayX = x; _cursorDisplayY = y; } else { _cursorDisplayX = _displayX + (x * _displayWidth) / _outputScreenWidth; _cursorDisplayY = _displayY + (y * _displayHeight) / _outputScreenHeight; } } Texture *OpenGLGraphicsManager::createTexture(const Graphics::PixelFormat &format, bool wantAlpha) { GLenum glIntFormat, glFormat, glType; if (format.bytesPerPixel == 1) { const Graphics::PixelFormat &virtFormat = wantAlpha ? _defaultFormatAlpha : _defaultFormat; const bool supported = getGLPixelFormat(virtFormat, glIntFormat, glFormat, glType); if (!supported) { return nullptr; } else { return new TextureCLUT8(glIntFormat, glFormat, glType, virtFormat); } } else { const bool supported = getGLPixelFormat(format, glIntFormat, glFormat, glType); if (!supported) { return nullptr; } else { return new Texture(glIntFormat, glFormat, glType, format); } } } bool OpenGLGraphicsManager::getGLPixelFormat(const Graphics::PixelFormat &pixelFormat, GLenum &glIntFormat, GLenum &glFormat, GLenum &glType) const { #ifdef SCUMM_LITTLE_ENDIAN if (pixelFormat == Graphics::PixelFormat(4, 8, 8, 8, 8, 0, 8, 16, 24)) { // ABGR8888 #else if (pixelFormat == Graphics::PixelFormat(4, 8, 8, 8, 8, 24, 16, 8, 0)) { // RGBA8888 #endif glIntFormat = GL_RGBA; glFormat = GL_RGBA; glType = GL_UNSIGNED_BYTE; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 5, 6, 5, 0, 11, 5, 0, 0)) { // RGB565 glIntFormat = GL_RGB; glFormat = GL_RGB; glType = GL_UNSIGNED_SHORT_5_6_5; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 5, 5, 5, 1, 11, 6, 1, 0)) { // RGBA5551 glIntFormat = GL_RGBA; glFormat = GL_RGBA; glType = GL_UNSIGNED_SHORT_5_5_5_1; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 4, 4, 4, 4, 12, 8, 4, 0)) { // RGBA4444 glIntFormat = GL_RGBA; glFormat = GL_RGBA; glType = GL_UNSIGNED_SHORT_4_4_4_4; return true; #ifndef USE_GLES #ifdef SCUMM_LITTLE_ENDIAN } else if (pixelFormat == Graphics::PixelFormat(4, 8, 8, 8, 8, 24, 16, 8, 0)) { // RGBA8888 glIntFormat = GL_RGBA; glFormat = GL_RGBA; glType = GL_UNSIGNED_INT_8_8_8_8; return true; #endif } else if (pixelFormat == Graphics::PixelFormat(2, 5, 5, 5, 0, 10, 5, 0, 0)) { // RGB555 // GL_BGRA does not exist in every GLES implementation so should not be configured if // USE_GLES is set. glIntFormat = GL_RGB; glFormat = GL_BGRA; glType = GL_UNSIGNED_SHORT_1_5_5_5_REV; return true; } else if (pixelFormat == Graphics::PixelFormat(4, 8, 8, 8, 8, 16, 8, 0, 24)) { // ARGB8888 glIntFormat = GL_RGBA; glFormat = GL_BGRA; glType = GL_UNSIGNED_INT_8_8_8_8_REV; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 4, 4, 4, 4, 8, 4, 0, 12)) { // ARGB4444 glIntFormat = GL_RGBA; glFormat = GL_BGRA; glType = GL_UNSIGNED_SHORT_4_4_4_4_REV; return true; #ifdef SCUMM_BIG_ENDIAN } else if (pixelFormat == Graphics::PixelFormat(4, 8, 8, 8, 8, 0, 8, 16, 24)) { // ABGR8888 glIntFormat = GL_RGBA; glFormat = GL_RGBA; glType = GL_UNSIGNED_INT_8_8_8_8_REV; return true; #endif } else if (pixelFormat == Graphics::PixelFormat(4, 8, 8, 8, 8, 8, 16, 24, 0)) { // BGRA8888 glIntFormat = GL_RGBA; glFormat = GL_BGRA; glType = GL_UNSIGNED_INT_8_8_8_8; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 5, 6, 5, 0, 0, 5, 11, 0)) { // BGR565 glIntFormat = GL_RGB; glFormat = GL_BGR; glType = GL_UNSIGNED_SHORT_5_6_5; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 5, 5, 5, 1, 1, 6, 11, 0)) { // BGRA5551 glIntFormat = GL_RGBA; glFormat = GL_BGRA; glType = GL_UNSIGNED_SHORT_5_5_5_1; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 4, 4, 4, 4, 0, 4, 8, 12)) { // ABGR4444 glIntFormat = GL_RGBA; glFormat = GL_RGBA; glType = GL_UNSIGNED_SHORT_4_4_4_4_REV; return true; } else if (pixelFormat == Graphics::PixelFormat(2, 4, 4, 4, 4, 4, 8, 12, 0)) { // BGRA4444 glIntFormat = GL_RGBA; glFormat = GL_BGRA; glType = GL_UNSIGNED_SHORT_4_4_4_4; return true; #endif } else { return false; } } frac_t OpenGLGraphicsManager::getDesiredGameScreenAspect() const { const uint width = _currentState.gameWidth; const uint height = _currentState.gameHeight; if (_currentState.aspectRatioCorrection) { // In case we enable aspect ratio correction we force a 4/3 ratio. // But just for 320x200 and 640x400 games, since other games do not need // this. if ((width == 320 && height == 200) || (width == 640 && height == 400)) { return intToFrac(4) / 3; } } return intToFrac(width) / height; } void OpenGLGraphicsManager::recalculateDisplayArea() { if (!_gameScreen || _outputScreenHeight == 0) { return; } const frac_t outputAspect = intToFrac(_outputScreenWidth) / _outputScreenHeight; const frac_t desiredAspect = getDesiredGameScreenAspect(); _displayWidth = _outputScreenWidth; _displayHeight = _outputScreenHeight; // Adjust one dimension for mantaining the aspect ratio. if (outputAspect < desiredAspect) { _displayHeight = intToFrac(_displayWidth) / desiredAspect; } else if (outputAspect > desiredAspect) { _displayWidth = fracToInt(_displayHeight * desiredAspect); } // We center the screen in the middle for now. _displayX = (_outputScreenWidth - _displayWidth ) / 2; _displayY = (_outputScreenHeight - _displayHeight) / 2; // Setup drawing limitation for game graphics. // This invovles some trickery because OpenGL's viewport coordinate system // is upside down compared to ours. GLCALL(glScissor(_displayX, _outputScreenHeight - _displayHeight - _displayY, _displayWidth, _displayHeight)); // Clear the whole screen for the first three frames to remove leftovers. _scissorOverride = 3; // Update the cursor position to adjust for new display area. setMousePosition(_cursorX, _cursorY); // Force a redraw to assure screen is properly redrawn. _forceRedraw = true; } void OpenGLGraphicsManager::updateCursorPalette() { if (!_cursor || !_cursor->hasPalette()) { return; } if (_cursorPaletteEnabled) { _cursor->setPalette(0, 256, _cursorPalette); } else { _cursor->setPalette(0, 256, _gamePalette); } // We remove all alpha bits from the palette entry of the color key. // This makes sure its properly handled as color key. const Graphics::PixelFormat &hardwareFormat = _cursor->getHardwareFormat(); const uint32 aMask = (0xFF >> hardwareFormat.aLoss) << hardwareFormat.aShift; if (hardwareFormat.bytesPerPixel == 2) { uint16 *palette = (uint16 *)_cursor->getPalette() + _cursorKeyColor; *palette &= ~aMask; } else if (hardwareFormat.bytesPerPixel == 4) { uint32 *palette = (uint32 *)_cursor->getPalette() + _cursorKeyColor; *palette &= ~aMask; } else { warning("OpenGLGraphicsManager::updateCursorPalette: Unsupported pixel depth %d", hardwareFormat.bytesPerPixel); } } void OpenGLGraphicsManager::recalculateCursorScaling() { if (!_cursor || !_gameScreen) { return; } // By default we use the unscaled versions. _cursorHotspotXScaled = _cursorHotspotX; _cursorHotspotYScaled = _cursorHotspotY; _cursorWidthScaled = _cursor->getWidth(); _cursorHeightScaled = _cursor->getHeight(); // In case scaling is actually enabled we will scale the cursor according // to the game screen. if (!_cursorDontScale) { const frac_t screenScaleFactorX = intToFrac(_displayWidth) / _gameScreen->getWidth(); const frac_t screenScaleFactorY = intToFrac(_displayHeight) / _gameScreen->getHeight(); _cursorHotspotXScaled = fracToInt(_cursorHotspotXScaled * screenScaleFactorX); _cursorWidthScaled = fracToInt(_cursorWidthScaled * screenScaleFactorX); _cursorHotspotYScaled = fracToInt(_cursorHotspotYScaled * screenScaleFactorY); _cursorHeightScaled = fracToInt(_cursorHeightScaled * screenScaleFactorY); } } #ifdef USE_OSD const Graphics::Font *OpenGLGraphicsManager::getFontOSD() { return FontMan.getFontByUsage(Graphics::FontManager::kLocalizedFont); } #endif void OpenGLGraphicsManager::saveScreenshot(const Common::String &filename) const { const uint width = _outputScreenWidth; const uint height = _outputScreenHeight; // A line of a BMP image must have a size divisible by 4. // We calculate the padding bytes needed here. // Since we use a 3 byte per pixel mode, we can use width % 4 here, since // it is equal to 4 - (width * 3) % 4. (4 - (width * Bpp) % 4, is the // usual way of computing the padding bytes required). const uint linePaddingSize = width % 4; const uint lineSize = width * 3 + linePaddingSize; // Allocate memory for screenshot uint8 *pixels = new uint8[lineSize * height]; // Get pixel data from OpenGL buffer GLCALL(glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE, pixels)); // BMP stores as BGR. Since we can't assume that GL_BGR is supported we // will swap the components from the RGB we read to BGR on our own. for (uint y = height; y-- > 0;) { uint8 *line = pixels + y * lineSize; for (uint x = width; x > 0; --x, line += 3) { SWAP(line[0], line[2]); } } // Open file Common::DumpFile out; out.open(filename); // Write BMP header out.writeByte('B'); out.writeByte('M'); out.writeUint32LE(height * lineSize + 54); out.writeUint32LE(0); out.writeUint32LE(54); out.writeUint32LE(40); out.writeUint32LE(width); out.writeUint32LE(height); out.writeUint16LE(1); out.writeUint16LE(24); out.writeUint32LE(0); out.writeUint32LE(0); out.writeUint32LE(0); out.writeUint32LE(0); out.writeUint32LE(0); out.writeUint32LE(0); // Write pixel data to BMP out.write(pixels, lineSize * height); // Free allocated memory delete[] pixels; } } // End of namespace OpenGL<|fim▁end|>
assert(_transactionMode != kTransactionNone); _currentState.aspectRatioCorrection = enable;
<|file_name|>config.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 Ahmed Zaher Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package main import ( "fmt" "os" "strings" getopt "github.com/kesselborn/go-getopt" ) // GitCommit is the git commit hash string, // gets passed from the command line using a binary release of this tool. var GitCommit string // BuildTimestamp is the current timestamp in a string format, // gets passed from the command line using a binary release of this tool. var BuildTimestamp string // ReleaseVersion is the desired release version string that represents the version of this executable. // gets passed from the command line using a binary release of this tool. var ReleaseVersion string // GoVersion indicates which version of Go has been used to build this binary. // gets passed from the command line using a binary release of this tool. var GoVersion string type configurations struct { WorkDir string Tag string Release string IgnoreTagPrefix string Package string Commit string Branch string Verbose bool } func configure(conf *configurations) (string, error) { var e error var workDirectory string if workDirectory, e = os.Getwd(); e != nil { return "", e } parser := getopt.Options{ Description: "Builds and installs a binary release of a Golang source code while embedding its release information - through a group of exported public variables in the source - based on the current status of its Git repository, all the source files must be committed into the local repository before running this command or it will complain, this tool assumes that Golang (with a valid 'GOROOT' and 'GOPATH' environment variables) and Git source control are installed and fully working though shell.", Definitions: []getopt.Option{ { OptionDefinition: "work-directory|w|REGO_WORK_DIR", Description: "The working directory that contains the project source files and its Git repository", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: workDirectory, }, { OptionDefinition: "branch|b|REGO_BRANCH", Description: "The branch name of where the binary release source is going to be taken from, the command automatically picks the most recent commit hash in the specified branch, the commit hash string is passed to the binary release while building through the public variable 'GitCommit'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "develop", }, { OptionDefinition: "commit|c|REGO_COMMIT", Description: "The commit hash string of where the binary release source is going to be taken from, specifying this option causes the '--branch' option to be ignored since this option is more specific, the commit hash string is passed to the binary release while building through the public variable 'GitCommit'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "", }, { OptionDefinition: "tag|t|REGO_TAG", Description: "The tag name of where the binary release source is going to be taken from, causes the '--branch' and '--commit' options to be ignored since this option is more specific, the commit hash string is passed to the binary release while building through the public variable 'GitCommit'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "", }, { OptionDefinition: "release|r|REGO_RELEASE", Description: "The string that is meant to represent the final binary release version, if the '--tag' option is specified this option is automatically calculated with consideration of '--ignore-tag-prefix' option if specified to represent the tag name, the value of this option is passed to the binary release while building through the public variable 'ReleaseVersion'", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "SNAPSHOT", }, { OptionDefinition: "package|p|REGO_PACKAGE", Description: "The package name of which contains the declarations of the public variables" + " (GitCommit, BuildTimestamp, ReleaseVersion, GoVersion) which represent the commit hash of where the binary release source has been pulled from, the timestamp of when the build has be triggered, the release version string, the Golang version that has been used in the build, respectively",<|fim▁hole|> Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "main", }, { OptionDefinition: "ignore-tag-prefix|i|REGO_IGNORE_TAG_PREFIX", Description: "If the '--tag' option is specified, this option trims the specified prefix off the tag name while calculating the release version string", Flags: getopt.Optional | getopt.ExampleIsDefault, DefaultValue: "", }, { OptionDefinition: "verbose", Description: "Shows more verbose output", Flags: getopt.Flag, DefaultValue: false, }, { OptionDefinition: "version|v", Description: "Prints the version and exits", Flags: getopt.Flag, DefaultValue: false, }, }, } var err *getopt.GetOptError var options map[string]getopt.OptionValue if options, _, _, err = parser.ParseCommandLine(); err != nil { return "", fmt.Errorf("failed with error code: %v, %v", err.ErrorCode, err.Error()) } else if help, wantsHelp := options["help"]; wantsHelp && help.String == "usage" { return parser.Usage(), nil } else if wantsHelp && help.String == "help" { return parser.Help(), nil } else if options["version"].Bool { return fmt.Sprintf("Release: %v%vCommit: %v%vBuild Time: %v%vBuilt with: %v", ReleaseVersion, NewLine(), GitCommit, NewLine(), BuildTimestamp, NewLine(), GoVersion), nil } conf.Verbose = options["verbose"].Bool conf.WorkDir = strings.TrimSpace(options["work-directory"].String) conf.Package = strings.TrimSpace(options["package"].String) conf.Branch = strings.TrimSpace(options["branch"].String) conf.Commit = strings.TrimSpace(options["commit"].String) conf.Tag = strings.TrimSpace(options["tag"].String) conf.IgnoreTagPrefix = strings.TrimSpace(options["ignore-tag-prefix"].String) conf.Release = strings.TrimSpace(options["release"].String) return "", nil }<|fim▁end|>
<|file_name|>smf_plot.py<|end_file_name|><|fim▁begin|>import astropy.cosmology as co aa=co.Planck15 import astropy.io.fits as fits import matplotlib import matplotlib matplotlib.rcParams['agg.path.chunksize'] = 2000000 matplotlib.rcParams.update({'font.size': 12}) matplotlib.use('Agg') import matplotlib.pyplot as p import numpy as n import os import sys # global cosmo quantities z_min = float(sys.argv[1]) z_max = float(sys.argv[2]) #imf = 'kroupa' lO2_min = float(sys.argv[3]) # 'salpeter' SNlimit = 5 out_dir = os.path.join(os.environ['OBS_REPO'], 'spm', 'results') #previous catalogs ll_dir = os.path.join(os.environ['OBS_REPO'], 'spm', 'literature') cosmos_dir = os.path.join(os.environ['OBS_REPO'], 'COSMOS', 'catalogs' )<|fim▁hole|># FIREFLY CATALOGS # SDSS data and catalogs sdss_dir = os.path.join(os.environ['OBS_REPO'], 'SDSS', 'dr14') path_2_spall_sdss_dr14_cat = os.path.join( sdss_dir, "specObj-SDSS-dr14.fits" ) path_2_spall_boss_dr14_cat = os.path.join( sdss_dir, "specObj-BOSS-dr14.fits" ) path_2_sdss_cat = os.path.join( sdss_dir, "FireflyGalaxySdss26.fits" ) path_2_eboss_cat = os.path.join( sdss_dir, "FireflyGalaxyEbossDR14.fits" ) # DEEP SURVEYS deep2_dir = os.path.join(os.environ['OBS_REPO'], 'DEEP2') path_2_deep2_cat = os.path.join( deep2_dir, "zcat.deep2.dr4.v4.LFcatalogTC.Planck13.spm.v2.fits" ) vipers_dir = os.path.join(os.environ['OBS_REPO'], 'VIPERS') path_2_vipers_cat = os.path.join( vipers_dir, "VIPERS_W14_summary_v2.1.linesFitted.spm.fits" ) vvds_dir = os.path.join(os.environ['OBS_REPO'], 'VVDS') path_2_vvdsW_cat = os.path.join( vvds_dir, "catalogs", "VVDS_WIDE_summary.v1.spm.fits" ) path_2_vvdsD_cat = os.path.join( vvds_dir, "catalogs", "VVDS_DEEP_summary.v1.spm.fits" ) # path_2_F16_cat = os.path.join( sdss_dir, "RA_DEC_z_w_fluxOII_Mstar_grcol_Mr_lumOII.dat" ) # OPENS THE CATALOGS deep2 = fits.open(path_2_deep2_cat)[1].data #vvdsD = fits.open(path_2_vvdsD_cat)[1].data #vvdsW = fits.open(path_2_vvdsW_cat)[1].data #vipers = fits.open(path_2_vipers_cat)[1].data #sdss = fits.open(path_2_sdss_cat)[1].data #boss = fits.open(path_2_eboss_cat)[1].data cosmos = fits.open(path_2_cosmos_cat)[1].data lineSelection = lambda catalog, lineName : (catalog[lineName+'_flux']>0.)& (catalog[lineName+'_fluxErr'] >0.) & (catalog[lineName+'_flux'] > SNlimit * catalog[lineName+'_fluxErr']) # & (catalog[lineName+'_luminosity']>0)& (catalog[lineName+'_luminosity']<1e50) out_dir = os.path.join('/data42s/comparat/firefly/v1_1_0/figures') smf_ilbert13 = lambda M, M_star, phi_1s, alpha_1s, phi_2s, alpha_2s : ( phi_1s * (M/M_star) ** alpha_1s + phi_2s * (M/M_star) ** alpha_2s ) * n.e ** (-M/M_star) * (M/ M_star) path_ilbert13_SMF = os.path.join(ll_dir, "ilbert_2013_mass_function_params.txt") zmin, zmax, N, M_comp, M_star, phi_1s, alpha_1s, phi_2s, alpha_2s, log_rho_s = n.loadtxt(os.path.join( ll_dir, "ilbert_2013_mass_function_params.txt"), unpack=True) #smfs_ilbert13 = n.array([lambda mass : smf_ilbert13( mass , 10**M_star[ii], phi_1s[ii]*10**(-3), alpha_1s[ii], phi_2s[ii]*10**(-3), alpha_2s[ii] ) for ii in range(len(M_star)) ]) smf01 = lambda mass : smf_ilbert13( mass , 10**M_star[0], phi_1s[0]*10**(-3), alpha_1s[0], phi_2s[0]*10**(-3), alpha_2s[0] ) #print 10**M_star[0], phi_1s[0]*10**(-3), alpha_1s[0], phi_2s[0]*10**(-3), alpha_2s[0] smf08 = lambda mass : smf_ilbert13( mass , 10**M_star[2], phi_1s[2]*10**(-3), alpha_1s[2], phi_2s[2]*10**(-3), alpha_2s[2] ) #print 10**M_star[2], phi_1s[2]*10**(-3), alpha_1s[2], phi_2s[2]*10**(-3), alpha_2s[2] volume_per_deg2 = ( aa.comoving_volume(z_max) - aa.comoving_volume(z_min) ) * n.pi / 129600. volume_per_deg2_val = volume_per_deg2.value # global spm quantities # stat functions ld = lambda selection : len(selection.nonzero()[0]) # stats about DEEP2 run area1=0.60 area2=0.62 area3=0.90 area4=0.66 if z_min>=0.7: area_deep2 = area1+area2+area3+area4 else : area_deep2 = 0.6 #area_vvdsD = 0.6 #area_vvdsW = 5.785 #area_vipers = 24. #area_cosmos = 1.52 def get_basic_stat(catalog, z_name, z_flg, name, zflg_min, prefix): catalog_zOk = (catalog[z_name] > z_min) & (catalog[z_flg]>=zflg_min) catalog_stat = (catalog_zOk) & (catalog[z_name] > z_min) & (catalog[z_name] < z_max) & (catalog['SSR']>0) & (catalog['TSR']>0) & (catalog['SSR']<=1.0001) & (catalog['TSR']<=1.0001) catalog_sel = (catalog_stat) & (catalog[prefix+'stellar_mass'] < 10**14. ) & (catalog[prefix+'stellar_mass'] >= 10**5. ) & (catalog[prefix+'stellar_mass'] <= catalog[prefix+'stellar_mass_up'] ) & (catalog[prefix+'stellar_mass'] >= catalog[prefix+'stellar_mass_low'] ) & (-n.log10(catalog[prefix+'stellar_mass_low']) + n.log10(catalog[prefix+'stellar_mass_up']) < 0.6 ) l_o2 = lineSelection(catalog, "O2_3728") & catalog_stat l_o3 = lineSelection(catalog, "O3_5007") & catalog_stat l_hb = lineSelection(catalog, "H1_4862") & catalog_stat m_catalog = n.log10(catalog[prefix+'stellar_mass']) w_catalog = 1. / (catalog['TSR'] * catalog['SSR']) #print name, '& $',len(catalog), "$ & $", ld(catalog_zOk),"$ & $", ld(catalog_stat), "\\;(", ld(catalog_sel),")$ & $", ld(l_o2), "\\;(", ld(catalog_sel & l_o2),")$ & $", ld(l_o3), "\\;(", ld(catalog_sel & l_o3),")$ & $", ld(l_hb), "\\;(", ld(catalog_sel & l_hb),")$ \\\\" return catalog_sel, m_catalog, w_catalog, l_o2, l_o3, l_hb def get_hist(masses, weights, mbins): NN = n.histogram(masses, mbins)[0] NW = n.histogram(masses, mbins, weights = weights)[0] xx = (mbins[1:] + mbins[:-1])/2. return xx, NW, NN**(-0.5)*NW def plotMF_raw(prefix="Chabrier_ELODIE_"): deep2_sel, deep2_m, deep2_w, deep2_o2, deep2_o3, deep2_hb = get_basic_stat(deep2, 'ZBEST', 'ZQUALITY', 'DEEP2', 3., prefix) #vvdsD_sel, vvdsD_m, vvdsD_w, vvdsD_o2, vvdsD_o3, vvdsD_hb = get_basic_stat(vvdsD, 'Z', 'ZFLAGS', 'VVDS Deep', 2., prefix) #vvdsW_sel, vvdsW_m, vvdsW_w, vvdsW_o2, vvdsW_o3, vvdsW_hb = get_basic_stat(vvdsW, 'Z', 'ZFLAGS', 'VVDS Wide', 2., prefix) #vipers_sel, vipers_m, vipers_w, vipers_o2, vipers_o3, vipers_hb = get_basic_stat(vipers, 'zspec', 'zflg', 'VIPERS', 1., prefix) lbins = n.arange(40.5,44,0.25) x_lum = (lbins[1:] + lbins[:-1])/2. p.figure(1, (4.5,4.5)) p.axes([0.19,0.17,0.74,0.72]) N_O2_all = n.histogram(deep2['O2_3728_luminosity'][deep2_o2], bins = 10**lbins)[0] N_O2_mass = n.histogram(deep2['O2_3728_luminosity'][deep2_sel & deep2_o2], bins = 10**lbins)[0] N_O2_all_normed = n.histogram(n.log10(deep2['O2_3728_luminosity'][deep2_o2]), bins = lbins, normed = True)[0] #print N_O2_all_normed ok_o2 = (N_O2_all>0) p.plot(x_lum, N_O2_all_normed/2., label = 'normed hist') p.plot(x_lum[ok_o2], 1. * N_O2_mass[ok_o2] / N_O2_all[ok_o2], label = 'DEEP2') p.axvline(lO2_min) p.title(str(z_min)+'<z<'+str(z_max)) p.xlabel('[OII] luminosity') p.ylabel('[OII] with mass measurement / all [OII] detections') #p.yscale('log') p.legend(loc=0, frameon = False) p.ylim((-0.01, 1.01)) p.xlim((40.5, 43.5)) p.grid() p.savefig(os.path.join(out_dir, "SMF_"+prefix+"line_detection_raw_"+"_"+str(z_min)+'_z_'+str(z_max)+".jpg" )) p.clf() dlog10m = 0.25 mbins = n.arange(8,12.5,dlog10m) p.figure(1, (4.5,4.5)) p.axes([0.19,0.17,0.74,0.72]) p.plot(mbins, smf01(10**mbins), label='Ilbert 13, 0.2<z<0.5', ls='dashed') p.plot(mbins, smf08(10**mbins), label='Ilbert 13, 0.8<z<1.1', ls='dashed') x, y, ye = get_hist(deep2_m[deep2_sel], weights = deep2_w[deep2_sel]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) p.errorbar(x, y, yerr = ye, label='DEEP2', lw=1) x, y, ye = get_hist(deep2_m[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**lO2_min)], weights = deep2_w[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**lO2_min)]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) p.errorbar(x, y, yerr = ye, label='DEEP2 L([OII])>'+str(lO2_min), lw=1) #x, y, ye = get_hist(vvdsD_m, weights = vvdsD_w/(dlog10m*n.log(10)*area_vvdsD*volume_per_deg2_val), mbins = mbins) #p.errorbar(x, y, yerr = ye, label='VVDSDEEP', lw=1) #x, y, ye = get_hist(vipers_m, weights = vipers_w/(dlog10m*n.log(10)*area_vipers*volume_per_deg2_val), mbins = mbins) #p.errorbar(x, y, yerr = ye, label='VIPERS', lw=0.5) #x, y, ye = get_hist(vvdsW_m, weights = vvdsW_w/(dlog10m*n.log(10)*area_vvdsW*volume_per_deg2_val), mbins = mbins) #p.errorbar(x, y, yerr = ye, label='VVDSWide', lw=0.5) #cosmos_sel = (cosmos['flag_maskI']==0) &( cosmos['K'] < 24.) & ( cosmos['photoz'] > z_min) & (cosmos['photoz'] < z_max ) #cosmos_w = n.ones_like(cosmos['photoz'][cosmos_sel]) #p.hist(cosmos['mass_med'][cosmos_sel], weights = cosmos_w/(dlog10m*n.log(10)*area_cosmos*volume_per_deg2_val), bins = mbins, label='COSMOS K<24', histtype='step') #cosmos_sel = (cosmos['flag_maskI']==0) & ( cosmos['R'] < 24.1) & ( cosmos['photoz'] > z_min) & (cosmos['photoz'] < z_max ) #cosmos_w = n.ones_like(cosmos['photoz'][cosmos_sel]) #p.hist(cosmos['mass_med'][cosmos_sel], weights = cosmos_w/(dlog10m*n.log(10)*area_cosmos*volume_per_deg2_val), bins = mbins, label='COSMOS R<24.1', histtype='step') #for smfEq in smfs_ilbert13: p.title(str(z_min)+'<z<'+str(z_max)) p.xlabel(r'$\log_{10}$ (stellar mass '+r" / $M_\odot$ )") p.ylabel(r'$\Phi(M)$ [Mpc$^{-3}$ dex$^{-1}$]') p.yscale('log') p.legend(loc=0, frameon = False) p.ylim((1e-8, 1e-2)) p.xlim((9.5, 12.5)) p.grid() p.savefig(os.path.join(out_dir, "SMF_"+prefix+"SMF_"+prefix+"SMF_raw_"+"_"+str(z_min)+'_z_'+str(z_max)+".jpg" )) p.clf() p.figure(1, (4.5,4.5)) p.axes([0.19,0.17,0.74,0.72]) x, y, ye = get_hist(deep2_m[deep2_sel], weights = deep2_w[deep2_sel]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) p.errorbar(x, y/smf08(10**x), yerr = ye/smf08(10**x), label='DEEP2', lw=1) x, y, ye = get_hist(deep2_m[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**lO2_min)], weights = deep2_w[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**lO2_min)]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) p.errorbar(x, y/smf08(10**x), yerr = ye/smf08(10**x), label='DEEP2 L([OII])>'+str(lO2_min), lw=1) p.title(str(z_min)+'<z<'+str(z_max)) p.xlabel(r'$\log_{10}$ (stellar mass '+r" / $M_\odot$ )") p.ylabel(r'$\Phi_{[OII]} / \Phi_{all}(M)$') p.yscale('log') p.legend(loc=0, frameon = False) p.ylim((1e-4, 2.)) p.xlim((9.5, 12.5)) p.grid() p.savefig(os.path.join(out_dir, "SMF_"+prefix+"ratio_SMF_"+"_"+str(z_min)+'_z_'+str(z_max)+".jpg" )) p.clf() def plotMF_raw_many(prefixs=["Chabrier_ELODIE_"]): dlog10m = 0.2 mbins = n.arange(8,12.5,dlog10m) p.figure(1, (4.5,4.5)) p.axes([0.19,0.17,0.74,0.72]) ys_u, yso2_u = [], [] ys_l, yso2_l = [], [] yso2P_u, yso2P_l = [], [] yso2D_u, yso2D_l = [], [] for prefix in prefixs : deep2_sel, deep2_m, deep2_w, deep2_o2, deep2_o3, deep2_hb = get_basic_stat(deep2, 'ZBEST', 'ZQUALITY', 'DEEP2', 2., prefix) x, y, ye = get_hist(deep2_m[deep2_sel], weights = deep2_w[deep2_sel]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) ys_u.append(y+ye) ys_l.append(y-ye) x, y, ye = get_hist(deep2_m[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**lO2_min)], weights = deep2_w[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**lO2_min)]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) yso2_u.append(y+ye) yso2_l.append(y-ye) x, y, ye = get_hist(deep2_m[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**(lO2_min+0.2))], weights = deep2_w[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**(lO2_min+0.2))]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) yso2P_u.append(y+ye) yso2P_l.append(y-ye) x, y, ye = get_hist(deep2_m[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**(lO2_min+0.4))], weights = deep2_w[deep2_sel & deep2_o2 & (deep2['O2_3728_luminosity']>10**(lO2_min+0.4))]/(dlog10m*n.log(10)*area_deep2*volume_per_deg2_val), mbins = mbins) yso2D_u.append(y+ye) yso2D_l.append(y-ye) #print n.array(ys_l).shape, n.min(n.array(ys_l), axis=0).shape #p.fill_between(x, y1=n.min(n.array(ys_l), axis=0), y2=n.max(n.array(ys_u), axis=0), alpha=0.5, color='r') p.plot(x, (n.median(n.array(ys_l), axis=0) + n.median(n.array(ys_u), axis=0))/2., color='r', label='DEEP2') #p.plot(x, n.median(n.array(ys_l), axis=0), ls='dashed', alpha=0.5, color='r') #p.plot(x, n.median(n.array(ys_l), axis=0), ls='dashed', alpha=0.5, color='r') #p.fill_between(x, y1=n.min(n.array(yso2_l), axis=0), y2=n.max(n.array(yso2_u), axis=0), alpha=0.5, color='b') p.plot(x, (n.median(n.array(yso2_l), axis=0)+n.median(n.array(yso2_u), axis=0))/2., color='b', label='L[OII]>'+str(n.round(lO2_min,1)) ) #p.plot(x, n.median(n.array(yso2_l), axis=0), ls='dashed', color='b' ) #p.plot(x, n.median(n.array(yso2_u), axis=0), ls='dashed', color='b' ) #p.fill_between(x, y1=n.min(n.array(yso2_l), axis=0), y2=n.max(n.array(yso2_u), axis=0), alpha=0.5, color='b') p.plot(x, (n.median(n.array(yso2P_l), axis=0)+n.median(n.array(yso2P_u), axis=0))/2., color='g', label='L[OII]>'+str(n.round(lO2_min+0.2,1)) ) #p.plot(x, n.median(n.array(yso2P_l), axis=0), ls='dashed', color='b' ) #p.plot(x, n.median(n.array(yso2P_u), axis=0), ls='dashed', color='b' ) #p.fill_between(x, y1=n.min(n.array(yso2_l), axis=0), y2=n.max(n.array(yso2_u), axis=0), alpha=0.5, color='b') p.plot(x, (n.median(n.array(yso2D_l), axis=0)+n.median(n.array(yso2D_u), axis=0))/2., color='m', label='L[OII]>'+str(n.round(lO2_min+0.4,1)) ) #p.plot(x, n.median(n.array(yso2P_l), axis=0), ls='dashed', color='b' ) #p.plot(x, n.median(n.array(yso2P_u), axis=0), ls='dashed', color='b' ) #p.plot(mbins, smf01(10**mbins), label='Ilbert 13, 0.2<z<0.5', ls='dashed') p.plot(mbins, smf08(10**mbins), label='Ilbert 13, 0.8<z<1.1', color='k') p.title(str(z_min)+'<z<'+str(z_max)) p.xlabel(r'$\log_{10}$ (stellar mass '+r" / $M_\odot$ )") p.ylabel(r'$\Phi(M)$ [Mpc$^{-3}$ dex$^{-1}$]') p.yscale('log') p.legend(loc=0, frameon = False, fontsize=12) p.ylim((1e-8, 1e-2)) p.xlim((8.5, 12.)) p.grid() p.savefig(os.path.join(out_dir, "all_contour_SMF_raw_"+str(lO2_min) +"_"+str(z_min)+'_z_'+str(z_max)+".png" )) p.clf() #plotMF_raw_many(["Chabrier_ELODIE_"]) plotMF_raw_many(["Chabrier_ELODIE_" ,"Chabrier_MILES_", "Chabrier_STELIB_" ])#,"Kroupa_ELODIE_","Kroupa_MILES_", "Kroupa_STELIB_","Salpeter_ELODIE_" ,"Salpeter_MILES_","Salpeter_STELIB_"]) #plotMF_raw_many(["Chabrier_ELODIE_" ,"Chabrier_MILES_","Chabrier_STELIB_" ,"Kroupa_ELODIE_","Kroupa_MILES_", "Kroupa_STELIB_","Salpeter_ELODIE_" ,"Salpeter_MILES_","Salpeter_STELIB_"]) sys.exit() plotMF_raw("Chabrier_ELODIE_") plotMF_raw("Chabrier_MILES_") plotMF_raw("Chabrier_STELIB_") plotMF_raw("Kroupa_ELODIE_") plotMF_raw("Kroupa_MILES_") plotMF_raw("Kroupa_STELIB_") plotMF_raw("Salpeter_ELODIE_") plotMF_raw("Salpeter_MILES_") plotMF_raw("Salpeter_STELIB_")<|fim▁end|>
path_2_cosmos_cat = os.path.join( cosmos_dir, "photoz-2.0", "photoz_vers2.0_010312.fits") #path_2_cosmos_cat = os.path.join( cosmos_dir, "COSMOS2015_Laigle+_v1.1.fits.gz")
<|file_name|>bucketlist.py<|end_file_name|><|fim▁begin|>import logging import pprint from flask import jsonify, make_response, request from flask_restful import Resource, reqparse, fields, marshal from app.models import BucketList from app.common.db import save_record, delete_record from app.common.auth.authorize import login_required logger = logging.getLogger(__name__) bucketlist_item_fields = {"id": fields.Integer, "name": fields.String, "done": fields.Boolean, "bucketlist_id": fields.Integer, "created_at": fields.DateTime, "updated_at": fields.DateTime } # Field marshal for bucketlist item bucketlist_fields = {"id": fields.Integer, "name": fields.String, "description": fields.String, "created_at": fields.DateTime, "updated_at": fields.DateTime, "items": fields.List(fields.Nested(bucketlist_item_fields)) } # Field marshal for bucketlist item class BucketListsResource(Resource): """ This class handles creation and getting of bucketlists. """ method_decorators = [login_required] # applies to all inherited resources def __init__(self): self.parser = reqparse.RequestParser() self.parser.add_argument("name", type=str, required=True, help="bucketlist name is required", location="json") self.parser.add_argument("description", type=str, required=True, help="bucketlist description is required", location="json") def get(self, user_id=None, response=None): """ This function handles get requests. """ if user_id is not None: self.reqparse = reqparse.RequestParser() self.reqparse.add_argument( 'page', type=int, location='args', default=1 ) self.reqparse.add_argument( 'limit', type=int, default=20, location='args' ) self.reqparse.add_argument( 'q', type=str, location='args' ) args = self.reqparse.parse_args() q = args['q'] page = args['page'] limit = args['limit'] # Pagination logic if q: bucketlist = BucketList.query.filter( BucketList.name.\ ilike('%' + q + '%'),\ BucketList.user_id==user_id)\ .paginate(page, limit, False) else: bucketlist = BucketList.query.filter_by(user_id=user_id)\ .paginate(page, limit, False) if bucketlist.has_next: url = request.url.split("?limit")[0] next_page = url + '?limit=' + \ str(limit) + '&page=' + str(page + 1) else: next_page = 'Null' if bucketlist.has_prev: url = request.url.split("?limit")[0] prev_page = url + '?limit=' + \ str(limit) + '&page=' + str(page - 1) else: prev_page = 'Null' return {'meta': {'next_page': next_page, 'prev_page': prev_page, 'total_pages': bucketlist.pages }, 'bucketlists': marshal(bucketlist.items, bucketlist_fields) }, 200 return make_response(jsonify({ "status": response[0], "message": response[1] }), response[2]) def post(self, user_id=None, response=None): """ This function handles post requests. """ args = self.parser.parse_args() name = args["name"] description = args["description"] if user_id is not None: if BucketList.query.filter_by(user_id=user_id, name=name).first(): response = ("failed", "Bucketlist with a similar name exists", 409)<|fim▁hole|> bucketlist = BucketList(name, description, user_id) save_record(bucketlist) response = ("success", "Bucketlist created successfully", 201) return make_response(jsonify({ "status": response[0], "message": response[1] }), response[2]) class BucketListResource(Resource): """ This class gets a single bucketlist. """ method_decorators = [login_required] # applies to all inherited resources def __init__(self): self.parser = reqparse.RequestParser() self.parser.add_argument("name", type=str, required=True, help="bucketlist name is required", location="json") self.parser.add_argument("description", type=str, required=True, help="bucketlist description is required", location="json") def get(self, id=None, user_id=None, response=None): """ This function handles get requests. """ if user_id and id is not None: bucketlist = BucketList.query.filter_by(id=id, user_id=user_id).first() if bucketlist: return marshal(bucketlist, bucketlist_fields), 200 else: response = ("failed","Bucketlist not found", 404) else: response = ("failed", "Please login to access your bucketlists", 401) return make_response(jsonify({ "status": response[0], "message": response[1] }), response[2]) def put(self, id=None, user_id=None, response=None): """ This function handles put requests. """ args = self.parser.parse_args() name = args["name"] description = args["description"] if user_id and id is not None: bucketlist = BucketList.query.filter_by(id=id, user_id=user_id).first() if bucketlist: if BucketList.query.filter_by(user_id=user_id, name=name).first(): response = ("failed", "Bucketlist with a similar name exists", 409) else: bucketlist.name = name bucketlist.description = description # save the newly updated record save_record(bucketlist) response = ("success", "bucketlist updated successfully", 200) else: response = ("failed", "Bucketlist not found", 404) else: response = ("failed", "Please login to access your bucketlists", 401) return make_response(jsonify({ "status": response[0], "message": response[1] }), response[2]) def delete(self, id=None, user_id=None, response=None): """ This function handles delete requests. """ if user_id and id is not None: bucketlist = BucketList.query.filter_by(id=id, user_id=user_id).first() if bucketlist: delete_record(bucketlist) response = ("success", "Bucketlist deleted successfully", 200) else: response = ("failed", "Bucketlist not found", 404) else: response = ("failed", "Please login to access your bucketlists", 401) return make_response(jsonify({ "status": response[0], "message": response[1] }), response[2])<|fim▁end|>
else:
<|file_name|>test_M201.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import from .MockPrinter import MockPrinter import mock from random import random class M201_Tests(MockPrinter): def setUp(self): self.printer.path_planner.native_planner.setAcceleration = mock.Mock() self.printer.axis_config = self.printer.AXIS_CONFIG_XY self.printer.speed_factor = 1.0 def exercise(self): values = {} gcode = "M201" for i, v in enumerate(self.printer.acceleration): axis = self.printer.AXES[i] values[axis] = round(random() * 9000.0, 0) gcode += " {:s}{:.0f}".format(axis, values[axis]) self.execute_gcode(gcode) return { "values": values, "call_args": self.printer.path_planner.native_planner.setAcceleration.call_args[0][0] } def test_gcodes_M201_all_axes_G21_mm(self): test_data = self.exercise() for i, axis in enumerate(self.printer.AXES): expected = round(test_data["values"][axis] * self.printer.factor / 3600.0, 4) result = test_data["call_args"][i] self.assertEqual(expected, result, axis + ": expected {:.0f} but got {:.0f}".format(expected, result)) def test_gcodes_M201_all_axes_G20_inches(self): self.printer.factor = 25.4 test_data = self.exercise() for i, axis in enumerate(self.printer.AXES): expected = round(test_data["values"][axis] * self.printer.factor / 3600.0, 4) result = test_data["call_args"][i] self.assertEqual(expected, result, axis + ": expected {:.0f} but got {:.0f}".format(expected, result)) <|fim▁hole|> def test_gcodes_M201_CoreXY(self): self.printer.axis_config = self.printer.AXIS_CONFIG_CORE_XY while True: # account for remote possibility of two equal random numbers for X and Y test_data = self.exercise() if test_data["values"]["X"] != test_data["values"]["Y"]: break self.assertEqual( test_data["call_args"][0], test_data["call_args"][1], "For CoreXY mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1])) def test_gcodes_M201_H_belt(self): self.printer.axis_config = self.printer.AXIS_CONFIG_H_BELT while True: # account for remote possibility of two equal random numbers for X and Y test_data = self.exercise() if test_data["values"]["X"] != test_data["values"]["Y"]: break self.assertEqual( test_data["call_args"][0], test_data["call_args"][1], "For H-Belt mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1])) def test_gcodes_M201_Delta(self): self.printer.axis_config = self.printer.AXIS_CONFIG_DELTA while True: # account for super, ultra-duper remote possibility of three equal random numbers for X , Y and Z test_data = self.exercise() if (test_data["values"]["X"] + test_data["values"]["Y"] + test_data["values"]["Y"]) != ( test_data["values"]["X"] * 3): break self.assertEqual( test_data["call_args"][0] + test_data["call_args"][1] + test_data["call_args"][2], test_data["call_args"][0] * 3, "For CoreXY mechanics, X & Y values must match. But X={}, Y={} (mm/min / 3600)".format( test_data["call_args"][0], test_data["call_args"][1], test_data["call_args"][2]))<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from sqlalchemy import Column, Integer, String, Sequence, ForeignKey, Enum, Float from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import relationship from . import Base from .utils import ModelMixin class Source(Base, ModelMixin): __tablename__ = 'source' __repr_props__ = ['id', 'name'] # internal id id = Column(Integer, Sequence('source_id_seq'), primary_key=True, unique=True, nullable=False) <|fim▁hole|> class Species(Base, ModelMixin): __tablename__ = 'species' __repr_props__ = ['id', 'external_id', 'name'] # internal id id = Column(Integer, Sequence('species_id_seq'), primary_key=True, unique=True, nullable=False) # record origin external_id = Column(Integer, unique=True, nullable=False, index=True) source_id = Column(Integer, ForeignKey('source.id'), nullable=False) source = relationship('Source') name = Column(String(150), unique=False, nullable=False) class Compound(Base, ModelMixin): __tablename__ = 'compound' __repr_props__ = ['id', 'external_id'] # internal id id = Column(Integer, Sequence('compound_id_seq'), primary_key=True, unique=True, nullable=False) # record origin external_id = Column(String, unique=True, nullable=False, index=True) source_id = Column(Integer, ForeignKey('source.id'), nullable=False) source = relationship('Source') smiles = Column(String(750), nullable=False) class Target(Base, ModelMixin): __tablename__ = 'target' __repr_props__ = ['id', 'external_id'] # internal id id = Column(Integer, Sequence('target_id_seq'), primary_key=True, unique=True, nullable=False) # record origin external_id = Column(String, unique=True, nullable=False, index=True) source_id = Column(Integer, ForeignKey('source.id'), nullable=False) source = relationship('Source') # define species species_id = Column(Integer, ForeignKey('species.id'), nullable=False) species = relationship('Species', backref='targets') # define target sequence sequence = Column(String) ASSAYS = Enum('ADMET', 'Binding', 'Functional', 'Property', 'Unassigned', name='assay_type') ACTIVITIES = Enum('Kd', 'AC50', 'Potency', 'XC50', 'IC50', 'Ki', 'EC50', name='activity_type') RELATIONS = Enum('=', '>', '<', '<=', '>=', name='relation') class Activity(Base, ModelMixin): __tablename__ = 'activity' __repr_props__ = ['id', 'compound', 'relation', 'value'] # internal id id = Column(Integer, Sequence('activity_id_seq'), primary_key=True, unique=True, nullable=False) # record origin external_id = Column(String, nullable=False) source_id = Column(Integer, ForeignKey('source.id'), nullable=False) source = relationship('Source') # many to one, no map back # define the activity relation = Column(RELATIONS, nullable=False) value = Column(Float, nullable=False) assay_type = Column(ASSAYS, nullable=False) activity_type = Column(ACTIVITIES, nullable=False) confidence_score = Column(Integer, index=True) #Link to target target_id = Column(Integer, ForeignKey('target.id'), nullable=False) target = relationship('Target', backref='activities') #Link to compound compound_id = Column(Integer, ForeignKey('compound.id'), nullable=False) compound = relationship('Compound', backref='activities') def __repr__(self): return '<Activity(id=\'{id}\' compound=\'{compound}\' '\ 'target=\'{target}\' relation=\'{relation}{value}\')>'\ .format(id=self.id, relation=self.relation, target=self.target.external_id, compound=self.compound.external_id, value=self.value)<|fim▁end|>
# describe source name = Column(String(50), unique=True, nullable=False) description = Column(String(250))
<|file_name|>JsonpMainTemplatePlugin.js<|end_file_name|><|fim▁begin|>/* MIT License http://www.opensource.org/licenses/mit-license.php Author Tobias Koppers @sokra */ "use strict"; const { SyncWaterfallHook } = require("tapable"); const Template = require("../Template"); class JsonpMainTemplatePlugin { apply(mainTemplate) { const needChunkOnDemandLoadingCode = chunk => { for (const chunkGroup of chunk.groupsIterable) { if (chunkGroup.getNumberOfChildren() > 0) return true; } return false; }; const needChunkLoadingCode = chunk => { for (const chunkGroup of chunk.groupsIterable) { if (chunkGroup.chunks.length > 1) return true; if (chunkGroup.getNumberOfChildren() > 0) return true; } return false; }; const needEntryDeferringCode = chunk => { for (const chunkGroup of chunk.groupsIterable) { if (chunkGroup.chunks.length > 1) return true; } return false; }; const needPrefetchingCode = chunk => { const allPrefetchChunks = chunk.getChildIdsByOrdersMap(true).prefetch; return allPrefetchChunks && Object.keys(allPrefetchChunks).length; }; // TODO webpack 5, no adding to .hooks, use WeakMap and static methods ["jsonpScript", "linkPreload", "linkPrefetch"].forEach(hook => { if (!mainTemplate.hooks[hook]) { mainTemplate.hooks[hook] = new SyncWaterfallHook([ "source", "chunk", "hash" ]); } }); const getScriptSrcPath = (hash, chunk, chunkIdExpression) => { const chunkFilename = mainTemplate.outputOptions.chunkFilename; const chunkMaps = chunk.getChunkMaps(); return mainTemplate.getAssetPath(JSON.stringify(chunkFilename), { hash: `" + ${mainTemplate.renderCurrentHashCode(hash)} + "`, hashWithLength: length => `" + ${mainTemplate.renderCurrentHashCode(hash, length)} + "`, chunk: { id: `" + ${chunkIdExpression} + "`, hash: `" + ${JSON.stringify( chunkMaps.hash )}[${chunkIdExpression}] + "`, hashWithLength(length) { const shortChunkHashMap = Object.create(null); for (const chunkId of Object.keys(chunkMaps.hash)) { if (typeof chunkMaps.hash[chunkId] === "string") { shortChunkHashMap[chunkId] = chunkMaps.hash[chunkId].substr( 0, length ); } } return `" + ${JSON.stringify( shortChunkHashMap )}[${chunkIdExpression}] + "`; }, name: `" + (${JSON.stringify( chunkMaps.name )}[${chunkIdExpression}]||${chunkIdExpression}) + "`, contentHash: { javascript: `" + ${JSON.stringify( chunkMaps.contentHash.javascript )}[${chunkIdExpression}] + "` }, contentHashWithLength: { javascript: length => { const shortContentHashMap = {}; const contentHash = chunkMaps.contentHash.javascript; for (const chunkId of Object.keys(contentHash)) { if (typeof contentHash[chunkId] === "string") { shortContentHashMap[chunkId] = contentHash[chunkId].substr( 0, length ); } } return `" + ${JSON.stringify( shortContentHashMap )}[${chunkIdExpression}] + "`; } } }, contentHashType: "javascript" }); }; mainTemplate.hooks.localVars.tap( "JsonpMainTemplatePlugin", (source, chunk, hash) => { const extraCode = []; if (needChunkLoadingCode(chunk)) { extraCode.push( "", "// object to store loaded and loading chunks", "// undefined = chunk not loaded, null = chunk preloaded/prefetched", "// Promise = chunk loading, 0 = chunk loaded", "var installedChunks = {", Template.indent( chunk.ids.map(id => `${JSON.stringify(id)}: 0`).join(",\n") ), "};", "", needEntryDeferringCode(chunk) ? "var deferredModules = [];" : "" ); } if (needChunkOnDemandLoadingCode(chunk)) { extraCode.push( "", "// script path function", "function jsonpScriptSrc(chunkId) {", Template.indent([ `return ${mainTemplate.requireFn}.p + ${getScriptSrcPath( hash, chunk, "chunkId" )}` ]), "}" ); } if (extraCode.length === 0) return source; return Template.asString([source, ...extraCode]); } ); mainTemplate.hooks.jsonpScript.tap( "JsonpMainTemplatePlugin", (_, chunk, hash) => { const crossOriginLoading = mainTemplate.outputOptions.crossOriginLoading; const chunkLoadTimeout = mainTemplate.outputOptions.chunkLoadTimeout; const jsonpScriptType = mainTemplate.outputOptions.jsonpScriptType; return Template.asString([ "var script = document.createElement('script');", "var onScriptComplete;", jsonpScriptType ? `script.type = ${JSON.stringify(jsonpScriptType)};` : "", "script.charset = 'utf-8';", `script.timeout = ${chunkLoadTimeout / 1000};`, `if (${mainTemplate.requireFn}.nc) {`, Template.indent( `script.setAttribute("nonce", ${mainTemplate.requireFn}.nc);` ), "}", "script.src = jsonpScriptSrc(chunkId);", crossOriginLoading ? Template.asString([ "if (script.src.indexOf(window.location.origin + '/') !== 0) {", Template.indent( `script.crossOrigin = ${JSON.stringify(crossOriginLoading)};` ), "}" ]) : "", "onScriptComplete = function (event) {", Template.indent([ "// avoid mem leaks in IE.", "script.onerror = script.onload = null;", "clearTimeout(timeout);", "var chunk = installedChunks[chunkId];", "if(chunk !== 0) {", Template.indent([ "if(chunk) {", Template.indent([ "var errorType = event && (event.type === 'load' ? 'missing' : event.type);", "var realSrc = event && event.target && event.target.src;", "var error = new Error('Loading chunk ' + chunkId + ' failed.\\n(' + errorType + ': ' + realSrc + ')');", "error.type = errorType;", "error.request = realSrc;", "chunk[1](error);" ]), "}", "installedChunks[chunkId] = undefined;" ]), "}" ]), "};", "var timeout = setTimeout(function(){", Template.indent([ "onScriptComplete({ type: 'timeout', target: script });" ]), `}, ${chunkLoadTimeout});`, "script.onerror = script.onload = onScriptComplete;" ]); } ); mainTemplate.hooks.linkPreload.tap( "JsonpMainTemplatePlugin", (_, chunk, hash) => { const crossOriginLoading = mainTemplate.outputOptions.crossOriginLoading; const jsonpScriptType = mainTemplate.outputOptions.jsonpScriptType; return Template.asString([ "var link = document.createElement('link');", jsonpScriptType ? `link.type = ${JSON.stringify(jsonpScriptType)};` : "", "link.charset = 'utf-8';", `if (${mainTemplate.requireFn}.nc) {`, Template.indent( `link.setAttribute("nonce", ${mainTemplate.requireFn}.nc);` ), "}", 'link.rel = "preload";', 'link.as = "script";', "link.href = jsonpScriptSrc(chunkId);", crossOriginLoading ? Template.asString([ "if (link.href.indexOf(window.location.origin + '/') !== 0) {", Template.indent( `link.crossOrigin = ${JSON.stringify(crossOriginLoading)};` ), "}" ]) : "" ]); } ); mainTemplate.hooks.linkPrefetch.tap( "JsonpMainTemplatePlugin", (_, chunk, hash) => { const crossOriginLoading = mainTemplate.outputOptions.crossOriginLoading; return Template.asString([ "var link = document.createElement('link');", crossOriginLoading ? `link.crossOrigin = ${JSON.stringify(crossOriginLoading)};` : "", `if (${mainTemplate.requireFn}.nc) {`, Template.indent( `link.setAttribute("nonce", ${mainTemplate.requireFn}.nc);` ), "}", 'link.rel = "prefetch";', 'link.as = "script";', "link.href = jsonpScriptSrc(chunkId);" ]); } ); mainTemplate.hooks.requireEnsure.tap( "JsonpMainTemplatePlugin load", (source, chunk, hash) => { return Template.asString([ source, "", "// JSONP chunk loading for javascript", "", "var installedChunkData = installedChunks[chunkId];", 'if(installedChunkData !== 0) { // 0 means "already installed".', Template.indent([ "", '// a Promise means "currently loading".', "if(installedChunkData) {", Template.indent(["promises.push(installedChunkData[2]);"]), "} else {", Template.indent([ "// setup Promise in chunk cache", "var promise = new Promise(function(resolve, reject) {", Template.indent([ "installedChunkData = installedChunks[chunkId] = [resolve, reject];" ]), "});", "promises.push(installedChunkData[2] = promise);", "", "// start chunk loading", mainTemplate.hooks.jsonpScript.call("", chunk, hash), "document.head.appendChild(script);" ]), "}" ]), "}" ]); } ); mainTemplate.hooks.requireEnsure.tap( { name: "JsonpMainTemplatePlugin preload", stage: 10 }, (source, chunk, hash) => { const chunkMap = chunk.getChildIdsByOrdersMap().preload; if (!chunkMap || Object.keys(chunkMap).length === 0) return source; return Template.asString([ source, "", "// chunk preloadng for javascript", "", `var chunkPreloadMap = ${JSON.stringify(chunkMap, null, "\t")};`, "", "var chunkPreloadData = chunkPreloadMap[chunkId];", "if(chunkPreloadData) {", Template.indent([ "chunkPreloadData.forEach(function(chunkId) {", Template.indent([ "if(installedChunks[chunkId] === undefined) {", Template.indent([ "installedChunks[chunkId] = null;", mainTemplate.hooks.linkPreload.call("", chunk, hash), "document.head.appendChild(link);" ]), "}" ]), "});" ]), "}" ]); } ); mainTemplate.hooks.requireExtensions.tap( "JsonpMainTemplatePlugin", (source, chunk) => { if (!needChunkOnDemandLoadingCode(chunk)) return source; return Template.asString([ source, "", "// on error function for async loading", `${ mainTemplate.requireFn }.oe = function(err) { console.error(err); throw err; };` ]); } ); mainTemplate.hooks.bootstrap.tap( "JsonpMainTemplatePlugin", (source, chunk, hash) => { if (needChunkLoadingCode(chunk)) { const withDefer = needEntryDeferringCode(chunk); const withPrefetch = needPrefetchingCode(chunk); return Template.asString([ source, "", "// install a JSONP callback for chunk loading", "function webpackJsonpCallback(data) {", Template.indent([ "var chunkIds = data[0];", "var moreModules = data[1];", withDefer ? "var executeModules = data[2];" : "", withPrefetch ? "var prefetchChunks = data[3] || [];" : "", '// add "moreModules" to the modules object,', '// then flag all "chunkIds" as loaded and fire callback', "var moduleId, chunkId, i = 0, resolves = [];", "for(;i < chunkIds.length; i++) {", Template.indent([ "chunkId = chunkIds[i];", "if(installedChunks[chunkId]) {", Template.indent("resolves.push(installedChunks[chunkId][0]);"), "}", "installedChunks[chunkId] = 0;" ]), "}", "for(moduleId in moreModules) {", Template.indent([ "if(Object.prototype.hasOwnProperty.call(moreModules, moduleId)) {", Template.indent( mainTemplate.renderAddModule( hash, chunk, "moduleId", "moreModules[moduleId]" ) ), "}" ]), "}", "if(parentJsonpFunction) parentJsonpFunction(data);", withPrefetch ? Template.asString([ "// chunk prefetching for javascript", "prefetchChunks.forEach(function(chunkId) {", Template.indent([ "if(installedChunks[chunkId] === undefined) {", Template.indent([ "installedChunks[chunkId] = null;", mainTemplate.hooks.linkPrefetch.call("", chunk, hash), "document.head.appendChild(link);" ]), "}" ]), "});" ]) : "", "while(resolves.length) {", Template.indent("resolves.shift()();"), "}", withDefer ? Template.asString([ "", "// add entry modules from loaded chunk to deferred list", "deferredModules.push.apply(deferredModules, executeModules || []);", "", "// run deferred modules when all chunks ready", "return checkDeferredModules();" ]) : "" ]), "};", withDefer ? Template.asString([ "function checkDeferredModules() {", Template.indent([ "var result;", "for(var i = 0; i < deferredModules.length; i++) {", Template.indent([ "var deferredModule = deferredModules[i];", "var fulfilled = true;", "for(var j = 1; j < deferredModule.length; j++) {", Template.indent([ "var depId = deferredModule[j];", "if(installedChunks[depId] !== 0) fulfilled = false;" ]), "}", "if(fulfilled) {", Template.indent([ "deferredModules.splice(i--, 1);", "result = " + mainTemplate.requireFn + "(" + mainTemplate.requireFn + ".s = deferredModule[0]);" ]), "}"<|fim▁hole|> "}" ]) : "" ]); } return source; } ); mainTemplate.hooks.beforeStartup.tap( "JsonpMainTemplatePlugin", (source, chunk, hash) => { if (needChunkLoadingCode(chunk)) { var jsonpFunction = mainTemplate.outputOptions.jsonpFunction; var globalObject = mainTemplate.outputOptions.globalObject; return Template.asString([ `var jsonpArray = ${globalObject}[${JSON.stringify( jsonpFunction )}] = ${globalObject}[${JSON.stringify(jsonpFunction)}] || [];`, "var oldJsonpFunction = jsonpArray.push.bind(jsonpArray);", "jsonpArray.push = webpackJsonpCallback;", "jsonpArray = jsonpArray.slice();", "for(var i = 0; i < jsonpArray.length; i++) webpackJsonpCallback(jsonpArray[i]);", "var parentJsonpFunction = oldJsonpFunction;", "", source ]); } return source; } ); mainTemplate.hooks.beforeStartup.tap( "JsonpMainTemplatePlugin", (source, chunk, hash) => { const prefetchChunks = chunk.getChildIdsByOrders().prefetch; if ( needChunkLoadingCode(chunk) && prefetchChunks && prefetchChunks.length ) { return Template.asString([ source, `webpackJsonpCallback([[], {}, 0, ${JSON.stringify( prefetchChunks )}]);` ]); } return source; } ); mainTemplate.hooks.startup.tap( "JsonpMainTemplatePlugin", (source, chunk, hash) => { if (needEntryDeferringCode(chunk)) { if (chunk.hasEntryModule()) { const entries = [chunk.entryModule].filter(Boolean).map(m => [m.id].concat( Array.from(chunk.groupsIterable)[0] .chunks.filter(c => c !== chunk) .map(c => c.id) ) ); return Template.asString([ "// add entry module to deferred list", `deferredModules.push(${entries .map(e => JSON.stringify(e)) .join(", ")});`, "// run deferred modules when ready", "return checkDeferredModules();" ]); } else { return Template.asString([ "// run deferred modules from other chunks", "checkDeferredModules();" ]); } } return source; } ); mainTemplate.hooks.hotBootstrap.tap( "JsonpMainTemplatePlugin", (source, chunk, hash) => { const globalObject = mainTemplate.outputOptions.globalObject; const hotUpdateChunkFilename = mainTemplate.outputOptions.hotUpdateChunkFilename; const hotUpdateMainFilename = mainTemplate.outputOptions.hotUpdateMainFilename; const crossOriginLoading = mainTemplate.outputOptions.crossOriginLoading; const hotUpdateFunction = mainTemplate.outputOptions.hotUpdateFunction; const currentHotUpdateChunkFilename = mainTemplate.getAssetPath( JSON.stringify(hotUpdateChunkFilename), { hash: `" + ${mainTemplate.renderCurrentHashCode(hash)} + "`, hashWithLength: length => `" + ${mainTemplate.renderCurrentHashCode(hash, length)} + "`, chunk: { id: '" + chunkId + "' } } ); const currentHotUpdateMainFilename = mainTemplate.getAssetPath( JSON.stringify(hotUpdateMainFilename), { hash: `" + ${mainTemplate.renderCurrentHashCode(hash)} + "`, hashWithLength: length => `" + ${mainTemplate.renderCurrentHashCode(hash, length)} + "` } ); const runtimeSource = Template.getFunctionContent( require("./JsonpMainTemplate.runtime") ) .replace(/\/\/\$semicolon/g, ";") .replace(/\$require\$/g, mainTemplate.requireFn) .replace( /\$crossOriginLoading\$/g, crossOriginLoading ? JSON.stringify(crossOriginLoading) : "null" ) .replace(/\$hotMainFilename\$/g, currentHotUpdateMainFilename) .replace(/\$hotChunkFilename\$/g, currentHotUpdateChunkFilename) .replace(/\$hash\$/g, JSON.stringify(hash)); return `${source} function hotDisposeChunk(chunkId) { delete installedChunks[chunkId]; } var parentHotUpdateCallback = ${globalObject}[${JSON.stringify( hotUpdateFunction )}]; ${globalObject}[${JSON.stringify(hotUpdateFunction)}] = ${runtimeSource}`; } ); mainTemplate.hooks.hash.tap("JsonpMainTemplatePlugin", hash => { hash.update("jsonp"); hash.update("6"); }); } } module.exports = JsonpMainTemplatePlugin;<|fim▁end|>
]), "}", "return result;" ]),
<|file_name|>test_airflow.py<|end_file_name|><|fim▁begin|># Copyright The Cloud Custodian Authors. # SPDX-License-Identifier: Apache-2.0 from .common import BaseTest import jmespath class TestApacheAirflow(BaseTest): def test_airflow_environment_value_filter(self): session_factory = self.replay_flight_data('test_airflow_environment_value_filter') p = self.load_policy( { "name": "airflow-name-filter", "resource": "airflow", "filters": [ { "type": "value", "key": "Name", "op": "eq", "value": "testEnvironment", } ] }, session_factory=session_factory, ) resources = p.run() self.assertEqual(len(resources), 1) self.assertEqual(resources[0]['Name'], 'testEnvironment') self.assertEqual(resources[0]['c7n:MatchedFilters'], ['Name']) <|fim▁hole|> kms = session_factory().client('kms') expression = 'KmsKey' p = self.load_policy( { "name": "airflow-kms-filter", "resource": "airflow", "filters": [ { "type": "kms-key", "key": "c7n:AliasName", "value": "alias/mwaa", } ] }, session_factory=session_factory, ) resources = p.run() self.assertTrue(len(resources), 1) aliases = kms.list_aliases(KeyId=(jmespath.search(expression, resources[0]))) self.assertEqual(aliases['Aliases'][0]['AliasName'], 'alias/mwaa') def test_airflow_environment_tag(self): session_factory = self.replay_flight_data('test_airflow_environment_tag') new_tag = {'env': 'dev'} p = self.load_policy( { 'name': 'airflow-tag', 'resource': 'airflow', 'filters': [{ 'tag:env': 'absent' }], 'actions': [{ 'type': 'tag', 'tags': new_tag }] }, session_factory=session_factory ) resources = p.run() self.assertEqual(1, len(resources)) name = resources[0].get('Name') airflow = session_factory().client('mwaa') call = airflow.get_environment(Name=name) self.assertEqual(new_tag, call['Environment'].get('Tags')) def test_airflow_environment_untag(self): session_factory = self.replay_flight_data('test_airflow_environment_untag') p = self.load_policy( { 'name': 'airflow-untag', 'resource': 'airflow', 'filters': [{ 'tag:env': 'dev' }], 'actions': [{ 'type': 'remove-tag', 'tags': ['env'] }] }, session_factory=session_factory ) resources = p.run() self.assertEqual(1, len(resources)) name = resources[0].get('Name') airflow = session_factory().client('mwaa') call = airflow.get_environment(Name=name) self.assertEqual({}, call['Environment'].get('Tags'))<|fim▁end|>
def test_airflow_environment_kms_filter(self): session_factory = self.replay_flight_data('test_airflow_environment_kms_filter')
<|file_name|>mcts.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>use mcts::{statistics, SearchSettings}; use rand::Rng; use search_graph; use std::{cmp, mem}; #[derive(Clone, Debug)] pub struct Game {} impl statistics::two_player::PlayerMapping for Role { fn player_one() -> Self { Role::Dwarf } fn player_two() -> Self { Role::Troll } fn resolve_player(&self) -> statistics::two_player::Player { match *self { Role::Dwarf => statistics::two_player::Player::One, Role::Troll => statistics::two_player::Player::Two, } } } impl mcts::game::State for crate::state::State { type Action = Action; type PlayerId = Role; fn active_player(&self) -> &Role { &self.active_role() } fn actions<'s>(&'s self) -> Box<dyn Iterator<Item = Action> + 's> { Box::new(self.actions()) } fn do_action(&mut self, action: &Action) { self.do_action(action); } } impl mcts::game::Game for Game { type Action = Action; type PlayerId = Role; type Payoff = statistics::two_player::ScoredPayoff; type State = crate::state::State; type Statistics = statistics::two_player::ScoredStatistics<Role>; fn payoff_of(state: &Self::State) -> Option<Self::Payoff> { if state.terminated() { Some(statistics::two_player::ScoredPayoff { visits: 1, score_one: state.score(Role::Dwarf) as u32, score_two: state.score(Role::Troll) as u32, }) } else { None } } } /// Controls how a game action is selected by the [MCTS /// agent](struct.Agent.html) after MCTS search has terminated and all /// statistics have been gathered. #[derive(Debug, Clone, Copy)] pub enum ActionSelect { /// Select the action that was visited the most times. VisitCount, /// Select the action with the best UCB score. Ucb, } /// Controls how graph compaction is done by the [MCTS agent](struct.Agent.html) /// before each round of MCTS search. #[derive(Debug, Clone, Copy)] pub enum GraphCompact { /// Prune the search graph so that the current game state and all its /// descendants are retained, but game states that are not reachable from the /// current game state are removed. Prune, /// Clear the entire search graph. Clear, /// Retain the entire contents of the search graph. Retain, } type SearchGraph = search_graph::Graph<crate::state::State, mcts::graph::VertexData, mcts::graph::EdgeData<Game>>; pub struct Agent<R: Rng> { settings: SearchSettings, iterations: u32, rng: R, action_select: ActionSelect, graph_compact: GraphCompact, graph: SearchGraph, } impl<R: Rng> Agent<R> { pub fn new( settings: SearchSettings, iterations: u32, rng: R, action_select: ActionSelect, graph_compact: GraphCompact, ) -> Self { Agent { settings, iterations, rng, action_select, graph_compact, graph: SearchGraph::new(), } } } fn find_most_visited_child<'a, 'id, R: Rng>( view: &search_graph::view::View< 'a, 'id, crate::state::State, mcts::graph::VertexData, mcts::graph::EdgeData<Game>, >, root: search_graph::view::NodeRef<'id>, mut rng: R, ) -> search_graph::view::EdgeRef<'id> { let mut children = view.children(root); let mut best_child = children.next().unwrap(); let mut best_child_visits = view[best_child].statistics.visits(); let mut reservoir_count = 1u32; for child in children { let visits = view[child].statistics.visits(); match visits.cmp(&best_child_visits) { cmp::Ordering::Less => continue, cmp::Ordering::Equal => { reservoir_count += 1; if !rng.gen_bool(1.0f64 / (reservoir_count as f64)) { continue; } } cmp::Ordering::Greater => reservoir_count = 1, } best_child = child; best_child_visits = visits; } best_child } impl<R: Rng + Send> crate::agent::Agent for Agent<R> { fn propose_action(&mut self, state: &crate::state::State) -> crate::agent::Result { match self.graph_compact { GraphCompact::Prune => { if let Some(node) = self.graph.find_node_mut(state) { search_graph::view::of_node(node, |view, node| { view.retain_reachable_from(Some(node).into_iter()); }); } else { mem::swap(&mut self.graph, &mut SearchGraph::new()); } } GraphCompact::Clear => mem::swap(&mut self.graph, &mut SearchGraph::new()), GraphCompact::Retain => (), } // Borrow/copy stuff out of self because the closure passed to of_graph // can't borrow self. let (rng, graph, settings, iterations, action_select) = ( &mut self.rng, &mut self.graph, self.settings.clone(), self.iterations, self.action_select, ); search_graph::view::of_graph(graph, |view| -> crate::agent::Result { let mut rollout = mcts::RolloutPhase::initialize(rng, settings, state.clone(), view); for _ in 0..iterations { let scoring = match rollout.rollout::<mcts::ucb::Rollout>() { Ok(s) => s, Err(e) => return Err(Box::new(e)), }; let backprop = match scoring.score::<mcts::simulation::RandomSimulator>() { Ok(b) => b, Err(e) => return Err(Box::new(e)), }; rollout = backprop .backprop::<mcts::ucb::BestParentBackprop>() .expand(); } let (rng, view) = rollout.recover_components(); let root = view.find_node(state).unwrap(); let child_edge = match action_select { ActionSelect::Ucb => { match mcts::ucb::find_best_child(&view, root, settings.explore_bias, rng) { Ok(child) => child, Err(e) => return Err(Box::new(e)), } } ActionSelect::VisitCount => find_most_visited_child(&view, root, rng), }; // Because search graph de-duplication maps each set of equivalent game // states to a single "canonical" game state, the state in the search graph // that corresponds to `state` may not actually be the game state at `root`. As // a result, actions on the root game state need to be mapped back into the // set of actions on `state`. let transposed_to_state = view.node_state(view.edge_target(child_edge)); for action in state.actions() { let mut actual_to_state = state.clone(); actual_to_state.do_action(&action); if actual_to_state == *transposed_to_state { return Ok(action); } } unreachable!() }) } }<|fim▁end|>
use crate::actions::Action; use crate::Role;
<|file_name|>aws_test.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 The Kubernetes Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package aws import ( "fmt" "io" "reflect" "strings" "testing" "github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/service/ec2" "github.com/aws/aws-sdk-go/service/elb" "github.com/aws/aws-sdk-go/service/autoscaling" "github.com/golang/glog" "k8s.io/kubernetes/pkg/api" "k8s.io/kubernetes/pkg/types" ) const TestClusterId = "clusterid.test" func TestReadAWSCloudConfig(t *testing.T) { tests := []struct { name string reader io.Reader aws AWSServices expectError bool zone string }{ { "No config reader", nil, nil, true, "", }, { "Empty config, no metadata", strings.NewReader(""), nil, true, "", }, { "No zone in config, no metadata", strings.NewReader("[global]\n"), nil, true, "", }, { "Zone in config, no metadata", strings.NewReader("[global]\nzone = eu-west-1a"), nil, false, "eu-west-1a", }, { "No zone in config, metadata does not have zone", strings.NewReader("[global]\n"), NewFakeAWSServices().withAz(""), true, "", }, { "No zone in config, metadata has zone", strings.NewReader("[global]\n"), NewFakeAWSServices(), false, "us-east-1a", }, { "Zone in config should take precedence over metadata", strings.NewReader("[global]\nzone = eu-west-1a"), NewFakeAWSServices(), false, "eu-west-1a", }, } for _, test := range tests { t.Logf("Running test case %s", test.name) var metadata EC2Metadata if test.aws != nil { metadata, _ = test.aws.Metadata() } cfg, err := readAWSCloudConfig(test.reader, metadata) if test.expectError { if err == nil { t.Errorf("Should error for case %s (cfg=%v)", test.name, cfg) } } else { if err != nil { t.Errorf("Should succeed for case: %s", test.name) } if cfg.Global.Zone != test.zone { t.Errorf("Incorrect zone value (%s vs %s) for case: %s", cfg.Global.Zone, test.zone, test.name) } } } } type FakeAWSServices struct { availabilityZone string instances []*ec2.Instance instanceId string privateDnsName string networkInterfacesMacs []string networkInterfacesVpcIDs []string internalIP string externalIP string ec2 *FakeEC2 elb *FakeELB asg *FakeASG metadata *FakeMetadata } func NewFakeAWSServices() *FakeAWSServices { s := &FakeAWSServices{} s.availabilityZone = "us-east-1a" s.ec2 = &FakeEC2{aws: s} s.elb = &FakeELB{aws: s} s.asg = &FakeASG{aws: s} s.metadata = &FakeMetadata{aws: s} s.networkInterfacesMacs = []string{"aa:bb:cc:dd:ee:00", "aa:bb:cc:dd:ee:01"} s.networkInterfacesVpcIDs = []string{"vpc-mac0", "vpc-mac1"} s.instanceId = "i-self" s.privateDnsName = "ip-172-20-0-100.ec2.internal" s.internalIP = "192.168.0.1" s.externalIP = "1.2.3.4" var selfInstance ec2.Instance selfInstance.InstanceId = &s.instanceId selfInstance.PrivateDnsName = &s.privateDnsName s.instances = []*ec2.Instance{&selfInstance} var tag ec2.Tag tag.Key = aws.String(TagNameKubernetesCluster) tag.Value = aws.String(TestClusterId) selfInstance.Tags = []*ec2.Tag{&tag} return s } func (s *FakeAWSServices) withAz(az string) *FakeAWSServices { s.availabilityZone = az return s } func (s *FakeAWSServices) withInstances(instances []*ec2.Instance) *FakeAWSServices { s.instances = instances return s } func (s *FakeAWSServices) Compute(region string) (EC2, error) { return s.ec2, nil } func (s *FakeAWSServices) LoadBalancing(region string) (ELB, error) { return s.elb, nil } func (s *FakeAWSServices) Autoscaling(region string) (ASG, error) { return s.asg, nil } func (s *FakeAWSServices) Metadata() (EC2Metadata, error) { return s.metadata, nil } func TestFilterTags(t *testing.T) { awsServices := NewFakeAWSServices() c, err := newAWSCloud(strings.NewReader("[global]"), awsServices) if err != nil { t.Errorf("Error building aws cloud: %v", err) return } if len(c.filterTags) != 1 { t.Errorf("unexpected filter tags: %v", c.filterTags) return } if c.filterTags[TagNameKubernetesCluster] != TestClusterId { t.Errorf("unexpected filter tags: %v", c.filterTags) } } func TestNewAWSCloud(t *testing.T) { tests := []struct { name string reader io.Reader awsServices AWSServices expectError bool zone string }{ { "No config reader", nil, NewFakeAWSServices().withAz(""), true, "", }, { "Config specified invalid zone", strings.NewReader("[global]\nzone = blahonga"), NewFakeAWSServices(), true, "", }, { "Config specifies valid zone", strings.NewReader("[global]\nzone = eu-west-1a"), NewFakeAWSServices(), false, "eu-west-1a", }, { "Gets zone from metadata when not in config", strings.NewReader("[global]\n"), NewFakeAWSServices(), false, "us-east-1a", }, { "No zone in config or metadata", strings.NewReader("[global]\n"), NewFakeAWSServices().withAz(""),<|fim▁hole|> } for _, test := range tests { t.Logf("Running test case %s", test.name) c, err := newAWSCloud(test.reader, test.awsServices) if test.expectError { if err == nil { t.Errorf("Should error for case %s", test.name) } } else { if err != nil { t.Errorf("Should succeed for case: %s, got %v", test.name, err) } else if c.availabilityZone != test.zone { t.Errorf("Incorrect zone value (%s vs %s) for case: %s", c.availabilityZone, test.zone, test.name) } } } } type FakeEC2 struct { aws *FakeAWSServices Subnets []*ec2.Subnet DescribeSubnetsInput *ec2.DescribeSubnetsInput } func contains(haystack []*string, needle string) bool { for _, s := range haystack { // (deliberately panic if s == nil) if needle == *s { return true } } return false } func instanceMatchesFilter(instance *ec2.Instance, filter *ec2.Filter) bool { name := *filter.Name if name == "private-dns-name" { if instance.PrivateDnsName == nil { return false } return contains(filter.Values, *instance.PrivateDnsName) } panic("Unknown filter name: " + name) } func (self *FakeEC2) DescribeInstances(request *ec2.DescribeInstancesInput) ([]*ec2.Instance, error) { matches := []*ec2.Instance{} for _, instance := range self.aws.instances { if request.InstanceIds != nil { if instance.InstanceId == nil { glog.Warning("Instance with no instance id: ", instance) continue } found := false for _, instanceId := range request.InstanceIds { if *instanceId == *instance.InstanceId { found = true break } } if !found { continue } } if request.Filters != nil { allMatch := true for _, filter := range request.Filters { if !instanceMatchesFilter(instance, filter) { allMatch = false break } } if !allMatch { continue } } matches = append(matches, instance) } return matches, nil } type FakeMetadata struct { aws *FakeAWSServices } func (self *FakeMetadata) GetMetadata(key string) (string, error) { networkInterfacesPrefix := "network/interfaces/macs/" if key == "placement/availability-zone" { return self.aws.availabilityZone, nil } else if key == "instance-id" { return self.aws.instanceId, nil } else if key == "local-hostname" { return self.aws.privateDnsName, nil } else if key == "local-ipv4" { return self.aws.internalIP, nil } else if key == "public-ipv4" { return self.aws.externalIP, nil } else if strings.HasPrefix(key, networkInterfacesPrefix) { if key == networkInterfacesPrefix { return strings.Join(self.aws.networkInterfacesMacs, "/\n") + "/\n", nil } else { keySplit := strings.Split(key, "/") macParam := keySplit[3] if len(keySplit) == 5 && keySplit[4] == "vpc-id" { for i, macElem := range self.aws.networkInterfacesMacs { if macParam == macElem { return self.aws.networkInterfacesVpcIDs[i], nil } } } return "", nil } } else { return "", nil } } func (ec2 *FakeEC2) AttachVolume(request *ec2.AttachVolumeInput) (resp *ec2.VolumeAttachment, err error) { panic("Not implemented") } func (ec2 *FakeEC2) DetachVolume(request *ec2.DetachVolumeInput) (resp *ec2.VolumeAttachment, err error) { panic("Not implemented") } func (ec2 *FakeEC2) DescribeVolumes(request *ec2.DescribeVolumesInput) ([]*ec2.Volume, error) { panic("Not implemented") } func (ec2 *FakeEC2) CreateVolume(request *ec2.CreateVolumeInput) (resp *ec2.Volume, err error) { panic("Not implemented") } func (ec2 *FakeEC2) DeleteVolume(request *ec2.DeleteVolumeInput) (resp *ec2.DeleteVolumeOutput, err error) { panic("Not implemented") } func (ec2 *FakeEC2) DescribeSecurityGroups(request *ec2.DescribeSecurityGroupsInput) ([]*ec2.SecurityGroup, error) { panic("Not implemented") } func (ec2 *FakeEC2) CreateSecurityGroup(*ec2.CreateSecurityGroupInput) (*ec2.CreateSecurityGroupOutput, error) { panic("Not implemented") } func (ec2 *FakeEC2) DeleteSecurityGroup(*ec2.DeleteSecurityGroupInput) (*ec2.DeleteSecurityGroupOutput, error) { panic("Not implemented") } func (ec2 *FakeEC2) AuthorizeSecurityGroupIngress(*ec2.AuthorizeSecurityGroupIngressInput) (*ec2.AuthorizeSecurityGroupIngressOutput, error) { panic("Not implemented") } func (ec2 *FakeEC2) RevokeSecurityGroupIngress(*ec2.RevokeSecurityGroupIngressInput) (*ec2.RevokeSecurityGroupIngressOutput, error) { panic("Not implemented") } func (ec2 *FakeEC2) DescribeSubnets(request *ec2.DescribeSubnetsInput) ([]*ec2.Subnet, error) { ec2.DescribeSubnetsInput = request return ec2.Subnets, nil } func (ec2 *FakeEC2) CreateTags(*ec2.CreateTagsInput) (*ec2.CreateTagsOutput, error) { panic("Not implemented") } func (s *FakeEC2) DescribeRouteTables(request *ec2.DescribeRouteTablesInput) ([]*ec2.RouteTable, error) { panic("Not implemented") } func (s *FakeEC2) CreateRoute(request *ec2.CreateRouteInput) (*ec2.CreateRouteOutput, error) { panic("Not implemented") } func (s *FakeEC2) DeleteRoute(request *ec2.DeleteRouteInput) (*ec2.DeleteRouteOutput, error) { panic("Not implemented") } func (s *FakeEC2) ModifyInstanceAttribute(request *ec2.ModifyInstanceAttributeInput) (*ec2.ModifyInstanceAttributeOutput, error) { panic("Not implemented") } type FakeELB struct { aws *FakeAWSServices } func (ec2 *FakeELB) CreateLoadBalancer(*elb.CreateLoadBalancerInput) (*elb.CreateLoadBalancerOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) DeleteLoadBalancer(*elb.DeleteLoadBalancerInput) (*elb.DeleteLoadBalancerOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) DescribeLoadBalancers(*elb.DescribeLoadBalancersInput) (*elb.DescribeLoadBalancersOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) RegisterInstancesWithLoadBalancer(*elb.RegisterInstancesWithLoadBalancerInput) (*elb.RegisterInstancesWithLoadBalancerOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) DeregisterInstancesFromLoadBalancer(*elb.DeregisterInstancesFromLoadBalancerInput) (*elb.DeregisterInstancesFromLoadBalancerOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) DetachLoadBalancerFromSubnets(*elb.DetachLoadBalancerFromSubnetsInput) (*elb.DetachLoadBalancerFromSubnetsOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) AttachLoadBalancerToSubnets(*elb.AttachLoadBalancerToSubnetsInput) (*elb.AttachLoadBalancerToSubnetsOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) CreateLoadBalancerListeners(*elb.CreateLoadBalancerListenersInput) (*elb.CreateLoadBalancerListenersOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) DeleteLoadBalancerListeners(*elb.DeleteLoadBalancerListenersInput) (*elb.DeleteLoadBalancerListenersOutput, error) { panic("Not implemented") } func (ec2 *FakeELB) ApplySecurityGroupsToLoadBalancer(*elb.ApplySecurityGroupsToLoadBalancerInput) (*elb.ApplySecurityGroupsToLoadBalancerOutput, error) { panic("Not implemented") } func (elb *FakeELB) ConfigureHealthCheck(*elb.ConfigureHealthCheckInput) (*elb.ConfigureHealthCheckOutput, error) { panic("Not implemented") } type FakeASG struct { aws *FakeAWSServices } func (a *FakeASG) UpdateAutoScalingGroup(*autoscaling.UpdateAutoScalingGroupInput) (*autoscaling.UpdateAutoScalingGroupOutput, error) { panic("Not implemented") } func (a *FakeASG) DescribeAutoScalingGroups(*autoscaling.DescribeAutoScalingGroupsInput) (*autoscaling.DescribeAutoScalingGroupsOutput, error) { panic("Not implemented") } func mockInstancesResp(instances []*ec2.Instance) (*AWSCloud, *FakeAWSServices) { awsServices := NewFakeAWSServices().withInstances(instances) return &AWSCloud{ ec2: awsServices.ec2, availabilityZone: awsServices.availabilityZone, metadata: &FakeMetadata{aws: awsServices}, }, awsServices } func mockAvailabilityZone(region string, availabilityZone string) *AWSCloud { awsServices := NewFakeAWSServices().withAz(availabilityZone) return &AWSCloud{ ec2: awsServices.ec2, availabilityZone: awsServices.availabilityZone, region: region, } } func TestList(t *testing.T) { // TODO this setup is not very clean and could probably be improved var instance0 ec2.Instance var instance1 ec2.Instance var instance2 ec2.Instance var instance3 ec2.Instance //0 tag0 := ec2.Tag{ Key: aws.String("Name"), Value: aws.String("foo"), } instance0.Tags = []*ec2.Tag{&tag0} instance0.InstanceId = aws.String("instance0") instance0.PrivateDnsName = aws.String("instance0.ec2.internal") state0 := ec2.InstanceState{ Name: aws.String("running"), } instance0.State = &state0 //1 tag1 := ec2.Tag{ Key: aws.String("Name"), Value: aws.String("bar"), } instance1.Tags = []*ec2.Tag{&tag1} instance1.InstanceId = aws.String("instance1") instance1.PrivateDnsName = aws.String("instance1.ec2.internal") state1 := ec2.InstanceState{ Name: aws.String("running"), } instance1.State = &state1 //2 tag2 := ec2.Tag{ Key: aws.String("Name"), Value: aws.String("baz"), } instance2.Tags = []*ec2.Tag{&tag2} instance2.InstanceId = aws.String("instance2") instance2.PrivateDnsName = aws.String("instance2.ec2.internal") state2 := ec2.InstanceState{ Name: aws.String("running"), } instance2.State = &state2 //3 tag3 := ec2.Tag{ Key: aws.String("Name"), Value: aws.String("quux"), } instance3.Tags = []*ec2.Tag{&tag3} instance3.InstanceId = aws.String("instance3") instance3.PrivateDnsName = aws.String("instance3.ec2.internal") state3 := ec2.InstanceState{ Name: aws.String("running"), } instance3.State = &state3 instances := []*ec2.Instance{&instance0, &instance1, &instance2, &instance3} aws, _ := mockInstancesResp(instances) table := []struct { input string expect []string }{ {"blahonga", []string{}}, {"quux", []string{"instance3.ec2.internal"}}, {"a", []string{"instance1.ec2.internal", "instance2.ec2.internal"}}, } for _, item := range table { result, err := aws.List(item.input) if err != nil { t.Errorf("Expected call with %v to succeed, failed with %s", item.input, err) } if e, a := item.expect, result; !reflect.DeepEqual(e, a) { t.Errorf("Expected %v, got %v", e, a) } } } func testHasNodeAddress(t *testing.T, addrs []api.NodeAddress, addressType api.NodeAddressType, address string) { for _, addr := range addrs { if addr.Type == addressType && addr.Address == address { return } } t.Errorf("Did not find expected address: %s:%s in %v", addressType, address, addrs) } func TestNodeAddresses(t *testing.T) { // Note these instances have the same name // (we test that this produces an error) var instance0 ec2.Instance var instance1 ec2.Instance var instance2 ec2.Instance //0 instance0.InstanceId = aws.String("i-self") instance0.PrivateDnsName = aws.String("instance-same.ec2.internal") instance0.PrivateIpAddress = aws.String("192.168.0.1") instance0.PublicIpAddress = aws.String("1.2.3.4") instance0.InstanceType = aws.String("c3.large") state0 := ec2.InstanceState{ Name: aws.String("running"), } instance0.State = &state0 //1 instance1.InstanceId = aws.String("i-self") instance1.PrivateDnsName = aws.String("instance-same.ec2.internal") instance1.PrivateIpAddress = aws.String("192.168.0.2") instance1.InstanceType = aws.String("c3.large") state1 := ec2.InstanceState{ Name: aws.String("running"), } instance1.State = &state1 //2 instance2.InstanceId = aws.String("i-self") instance2.PrivateDnsName = aws.String("instance-other.ec2.internal") instance2.PrivateIpAddress = aws.String("192.168.0.1") instance2.PublicIpAddress = aws.String("1.2.3.4") instance2.InstanceType = aws.String("c3.large") state2 := ec2.InstanceState{ Name: aws.String("running"), } instance2.State = &state2 instances := []*ec2.Instance{&instance0, &instance1, &instance2} aws1, _ := mockInstancesResp([]*ec2.Instance{}) _, err1 := aws1.NodeAddresses("instance-mismatch.ec2.internal") if err1 == nil { t.Errorf("Should error when no instance found") } aws2, _ := mockInstancesResp(instances) _, err2 := aws2.NodeAddresses("instance-same.ec2.internal") if err2 == nil { t.Errorf("Should error when multiple instances found") } aws3, _ := mockInstancesResp(instances[0:1]) addrs3, err3 := aws3.NodeAddresses("instance-same.ec2.internal") if err3 != nil { t.Errorf("Should not error when instance found") } if len(addrs3) != 3 { t.Errorf("Should return exactly 3 NodeAddresses") } testHasNodeAddress(t, addrs3, api.NodeInternalIP, "192.168.0.1") testHasNodeAddress(t, addrs3, api.NodeLegacyHostIP, "192.168.0.1") testHasNodeAddress(t, addrs3, api.NodeExternalIP, "1.2.3.4") aws4, fakeServices := mockInstancesResp([]*ec2.Instance{}) fakeServices.externalIP = "2.3.4.5" fakeServices.internalIP = "192.168.0.2" aws4.selfAWSInstance = &awsInstance{nodeName: fakeServices.instanceId} addrs4, err4 := aws4.NodeAddresses(fakeServices.instanceId) if err4 != nil { t.Errorf("unexpected error: %v", err4) } testHasNodeAddress(t, addrs4, api.NodeInternalIP, "192.168.0.2") testHasNodeAddress(t, addrs4, api.NodeExternalIP, "2.3.4.5") } func TestGetRegion(t *testing.T) { aws := mockAvailabilityZone("us-west-2", "us-west-2e") zones, ok := aws.Zones() if !ok { t.Fatalf("Unexpected missing zones impl") } zone, err := zones.GetZone() if err != nil { t.Fatalf("unexpected error %v", err) } if zone.Region != "us-west-2" { t.Errorf("Unexpected region: %s", zone.Region) } if zone.FailureDomain != "us-west-2e" { t.Errorf("Unexpected FailureDomain: %s", zone.FailureDomain) } } func TestFindVPCID(t *testing.T) { awsServices := NewFakeAWSServices() c, err := newAWSCloud(strings.NewReader("[global]"), awsServices) if err != nil { t.Errorf("Error building aws cloud: %v", err) return } vpcID, err := c.findVPCID() if err != nil { t.Errorf("Unexpected error:", err) } if vpcID != "vpc-mac0" { t.Errorf("Unexpected vpcID: %s", vpcID) } } func TestLoadBalancerMatchesClusterRegion(t *testing.T) { awsServices := NewFakeAWSServices() c, err := newAWSCloud(strings.NewReader("[global]"), awsServices) if err != nil { t.Errorf("Error building aws cloud: %v", err) return } badELBRegion := "bad-elb-region" errorMessage := fmt.Sprintf("requested load balancer region '%s' does not match cluster region '%s'", badELBRegion, c.region) _, _, err = c.GetLoadBalancer("elb-name", badELBRegion) if err == nil || err.Error() != errorMessage { t.Errorf("Expected GetLoadBalancer region mismatch error.") } serviceName := types.NamespacedName{Namespace: "foo", Name: "bar"} _, err = c.EnsureLoadBalancer("elb-name", badELBRegion, nil, nil, nil, serviceName, api.ServiceAffinityNone, nil) if err == nil || err.Error() != errorMessage { t.Errorf("Expected EnsureLoadBalancer region mismatch error.") } err = c.EnsureLoadBalancerDeleted("elb-name", badELBRegion) if err == nil || err.Error() != errorMessage { t.Errorf("Expected EnsureLoadBalancerDeleted region mismatch error.") } err = c.UpdateLoadBalancer("elb-name", badELBRegion, nil) if err == nil || err.Error() != errorMessage { t.Errorf("Expected UpdateLoadBalancer region mismatch error.") } } func constructSubnets(subnetsIn map[int]map[string]string) (subnetsOut []*ec2.Subnet) { for i := range subnetsIn { subnetsOut = append( subnetsOut, constructSubnet( subnetsIn[i]["id"], subnetsIn[i]["az"], ), ) } return } func constructSubnet(id string, az string) *ec2.Subnet { return &ec2.Subnet{ SubnetId: &id, AvailabilityZone: &az, } } func TestSubnetIDsinVPC(t *testing.T) { awsServices := NewFakeAWSServices() c, err := newAWSCloud(strings.NewReader("[global]"), awsServices) if err != nil { t.Errorf("Error building aws cloud: %v", err) return } vpcID := "vpc-deadbeef" // test with 3 subnets from 3 different AZs subnets := make(map[int]map[string]string) subnets[0] = make(map[string]string) subnets[0]["id"] = "subnet-a0000001" subnets[0]["az"] = "af-south-1a" subnets[1] = make(map[string]string) subnets[1]["id"] = "subnet-b0000001" subnets[1]["az"] = "af-south-1b" subnets[2] = make(map[string]string) subnets[2]["id"] = "subnet-c0000001" subnets[2]["az"] = "af-south-1c" awsServices.ec2.Subnets = constructSubnets(subnets) result, err := c.listSubnetIDsinVPC(vpcID) if err != nil { t.Errorf("Error listing subnets: %v", err) return } if len(result) != 3 { t.Errorf("Expected 3 subnets but got %d", len(result)) return } result_set := make(map[string]bool) for _, v := range result { result_set[v] = true } for i := range subnets { if !result_set[subnets[i]["id"]] { t.Errorf("Expected subnet%d '%s' in result: %v", i, subnets[i]["id"], result) return } } // test with 4 subnets from 3 different AZs // add duplicate az subnet subnets[3] = make(map[string]string) subnets[3]["id"] = "subnet-c0000002" subnets[3]["az"] = "af-south-1c" awsServices.ec2.Subnets = constructSubnets(subnets) result, err = c.listSubnetIDsinVPC(vpcID) if err != nil { t.Errorf("Error listing subnets: %v", err) return } if len(result) != 3 { t.Errorf("Expected 3 subnets but got %d", len(result)) return } } func TestIpPermissionExistsHandlesMultipleGroupIds(t *testing.T) { oldIpPermission := ec2.IpPermission{ UserIdGroupPairs: []*ec2.UserIdGroupPair{ {GroupId: aws.String("firstGroupId")}, {GroupId: aws.String("secondGroupId")}, {GroupId: aws.String("thirdGroupId")}, }, } existingIpPermission := ec2.IpPermission{ UserIdGroupPairs: []*ec2.UserIdGroupPair{ {GroupId: aws.String("secondGroupId")}, }, } newIpPermission := ec2.IpPermission{ UserIdGroupPairs: []*ec2.UserIdGroupPair{ {GroupId: aws.String("fourthGroupId")}, }, } equals := ipPermissionExists(&existingIpPermission, &oldIpPermission, false) if !equals { t.Errorf("Should have been considered equal since first is in the second array of groups") } equals = ipPermissionExists(&newIpPermission, &oldIpPermission, false) if equals { t.Errorf("Should have not been considered equal since first is not in the second array of groups") } } func TestIpPermissionExistsHandlesRangeSubsets(t *testing.T) { // Two existing scenarios we'll test against emptyIpPermission := ec2.IpPermission{} oldIpPermission := ec2.IpPermission{ IpRanges: []*ec2.IpRange{ {CidrIp: aws.String("10.0.0.0/8")}, {CidrIp: aws.String("192.168.1.0/24")}, }, } // Two already existing ranges and a new one existingIpPermission := ec2.IpPermission{ IpRanges: []*ec2.IpRange{ {CidrIp: aws.String("10.0.0.0/8")}, }, } existingIpPermission2 := ec2.IpPermission{ IpRanges: []*ec2.IpRange{ {CidrIp: aws.String("192.168.1.0/24")}, }, } newIpPermission := ec2.IpPermission{ IpRanges: []*ec2.IpRange{ {CidrIp: aws.String("172.16.0.0/16")}, }, } exists := ipPermissionExists(&emptyIpPermission, &emptyIpPermission, false) if !exists { t.Errorf("Should have been considered existing since we're comparing a range array against itself") } exists = ipPermissionExists(&oldIpPermission, &oldIpPermission, false) if !exists { t.Errorf("Should have been considered existing since we're comparing a range array against itself") } exists = ipPermissionExists(&existingIpPermission, &oldIpPermission, false) if !exists { t.Errorf("Should have been considered existing since 10.* is in oldIpPermission's array of ranges") } exists = ipPermissionExists(&existingIpPermission2, &oldIpPermission, false) if !exists { t.Errorf("Should have been considered existing since 192.* is in oldIpPermission2's array of ranges") } exists = ipPermissionExists(&newIpPermission, &emptyIpPermission, false) if exists { t.Errorf("Should have not been considered existing since we compared against a missing array of ranges") } exists = ipPermissionExists(&newIpPermission, &oldIpPermission, false) if exists { t.Errorf("Should have not been considered existing since 172.* is not in oldIpPermission's array of ranges") } } func TestIpPermissionExistsHandlesMultipleGroupIdsWithUserIds(t *testing.T) { oldIpPermission := ec2.IpPermission{ UserIdGroupPairs: []*ec2.UserIdGroupPair{ {GroupId: aws.String("firstGroupId"), UserId: aws.String("firstUserId")}, {GroupId: aws.String("secondGroupId"), UserId: aws.String("secondUserId")}, {GroupId: aws.String("thirdGroupId"), UserId: aws.String("thirdUserId")}, }, } existingIpPermission := ec2.IpPermission{ UserIdGroupPairs: []*ec2.UserIdGroupPair{ {GroupId: aws.String("secondGroupId"), UserId: aws.String("secondUserId")}, }, } newIpPermission := ec2.IpPermission{ UserIdGroupPairs: []*ec2.UserIdGroupPair{ {GroupId: aws.String("secondGroupId"), UserId: aws.String("anotherUserId")}, }, } equals := ipPermissionExists(&existingIpPermission, &oldIpPermission, true) if !equals { t.Errorf("Should have been considered equal since first is in the second array of groups") } equals = ipPermissionExists(&newIpPermission, &oldIpPermission, true) if equals { t.Errorf("Should have not been considered equal since first is not in the second array of groups") } }<|fim▁end|>
true, "", },
<|file_name|>logging.rs<|end_file_name|><|fim▁begin|>use jack_sys as j; use lazy_static::lazy_static; use std::ffi; use std::io::{stderr, Write}; use std::sync::{Mutex, Once}; lazy_static! { static ref INFO_FN: Mutex<Option<fn(&str)>> = Mutex::new(None); static ref ERROR_FN: Mutex<Option<fn(&str)>> = Mutex::new(None); } unsafe extern "C" fn error_wrapper(msg: *const libc::c_char) { let msg = ffi::CStr::from_ptr(msg) .to_str() .unwrap_or("rust failed to interpret error message"); let f = ERROR_FN.lock().unwrap(); match *f { Some(f) => f(msg), None => writeln!(&mut stderr(), "{}", msg).unwrap(), } } unsafe extern "C" fn info_wrapper(msg: *const libc::c_char) { let msg = ffi::CStr::from_ptr(msg) .to_str() .unwrap_or("rust failed to interpret info message"); let f = INFO_FN.lock().unwrap(); match *f { Some(f) => f(msg), None => println!("{}", msg), } } static IS_INFO_CALLBACK_SET: Once = Once::new(); /// Set the global JACK info callback. It is recommended to specify a callback that uses the [log /// crate](https://cratse.io/crates/log). pub fn set_info_callback(info: fn(&str)) { *INFO_FN.lock().unwrap() = Some(info); IS_INFO_CALLBACK_SET.call_once(|| unsafe { j::jack_set_info_function(Some(info_wrapper)) }) } /// Resets the JACK info callback to use stdio. /// Get the info callback that was set using `set_info_callback`. This corresponds to the one set /// using rust-jack, not JACK itself. `None` is returned if rust-jack hasn't set a callback or has /// reset it to use stdout. pub fn info_callback() -> Option<fn(&str)> { *INFO_FN.lock().unwrap() } /// Restores the JACK info callback to the JACK default, which is to write to /// stdout. pub fn reset_info_callback() { *INFO_FN.lock().unwrap() = None; } static IS_ERROR_CALLBACK_SET: Once = Once::new(); /// Set the global JACK info callback. It is recommended to specify a callback that uses the [log /// crate](https://cratse.io/crates/log). pub fn set_error_callback(error: fn(&str)) { *ERROR_FN.lock().unwrap() = Some(error); IS_ERROR_CALLBACK_SET.call_once(|| unsafe { j::jack_set_error_function(Some(error_wrapper)) }) } /// Get the error callback that was set using `set_error_callback`. This corresponds to the one set /// using rust-jack, not JACK itself. `None` is returned if rust-jack hasn't set a callback or has<|fim▁hole|> *ERROR_FN.lock().unwrap() } /// Restores the JACK info callback to the JACK default, which is to write to /// stderr. pub fn reset_error_callback() { *ERROR_FN.lock().unwrap() = None; } #[cfg(test)] mod test { use super::*; fn null_log_fn(_: &str) {} #[test] fn logging_can_set_info() { // initial state reset_info_callback(); assert!(info_callback().is_none()); // set set_info_callback(null_log_fn); assert!(info_callback().is_some()); info_callback().unwrap()("Using info callback!."); // reset reset_info_callback(); assert!(info_callback().is_none()); } #[test] fn logging_can_set_error() { // initial state reset_error_callback(); assert!(error_callback().is_none()); // set set_error_callback(null_log_fn); assert!(error_callback().is_some()); error_callback().unwrap()("Using error callback!."); // reset reset_error_callback(); assert!(error_callback().is_none()); } }<|fim▁end|>
/// reset it to use stderr. pub fn error_callback() -> Option<fn(&str)> {
<|file_name|>core_test.py<|end_file_name|><|fim▁begin|>from pytest import fixture from functional.core import ( builder, PreparedImagesOutputChecker, PDFDocumentChecker, DjVuDocumentChecker) @fixture() def checker_classes(): """ Run all checkers in one test for optimization reason. """ return [ PreparedImagesOutputChecker, PDFDocumentChecker, DjVuDocumentChecker] @fixture() def toc_checker_classes(): return [PDFDocumentChecker, DjVuDocumentChecker] def put_transform_contents(builder, directory): builder.save_transform_ini( directory, "[transform]\n" + "justconvert: yes\n") def check_all_valid(builder, checkers): for class_ in checkers: assert builder.valid(class_) def check_all_invalid(builder, checkers): for class_ in checkers: assert not builder.valid(class_) def test_checker_valid_page(builder, checker_classes): builder.create_unused_image("000-001", "0001.jpg") builder.create_used_image("001-002", "0001.jpg") builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-001") put_transform_contents(builder, "001-002") builder.run_program() check_all_valid(builder, checker_classes) def test_checker_invalid_page(builder, checker_classes): builder.create_used_image("000-001", "0001.jpg") builder.create_unused_image("001-002", "0001.jpg") builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-001") put_transform_contents(builder, "001-002") builder.run_program() check_all_invalid(builder, checker_classes)<|fim▁hole|> builder.create_used_image("000-001", "0000.jpg") builder.create_used_image("000-001", "0001.jpg") builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-001") builder.run_program() check_all_valid(builder, checker_classes) def test_checker_valid_reference_override(builder, checker_classes): builder.create_used_image("000-001", "0000.jpg") builder.override_reference_image() builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-001") builder.run_program() check_all_valid(builder, checker_classes) def test_checker_invalid_reference_override(builder, checker_classes): (builder.create_used_image("000-001", "0000.jpg") .add_border(20, 20, 20, 20, (0, 0, 0))) (builder.override_reference_image() .add_border(50, 50, 50, 50, (0, 0, 0))) builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-001") builder.run_program() check_all_invalid(builder, checker_classes) def test_checker_invalid_order(builder, checker_classes): builder.create_used_image("000-001", "0001.jpg") builder.create_used_image("000-001", "0000.jpg") builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-001") builder.run_program() check_all_invalid(builder, checker_classes) def test_checker_invalid_count(builder, checker_classes): builder.create_used_image("000-002", "0000.jpg") builder.create_used_image("000-002", "0001.jpg") builder.create_unused_image("000-002", "0002.jpg") builder.save_images() builder.save_toc([]) put_transform_contents(builder, "000-002") builder.run_program() check_all_invalid(builder, checker_classes) def prepare_three_images(builder): for i in range(1, 4): builder.create_used_image("001-003", "%04d.jpg" % i) builder.save_images() put_transform_contents(builder, "001-003") def test_checker_valid_toc(builder, toc_checker_classes): prepare_three_images(builder) builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2"], [0, 3, "Page 3"] ]) builder.run_program() check_all_valid(builder, toc_checker_classes) def test_checker_invalid_level_toc(builder, toc_checker_classes): prepare_three_images(builder) builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2"] ]) builder.run_program() builder.save_toc([ [0, 1, "Page 1"], [0, 2, "Page 2"] ]) check_all_invalid(builder, toc_checker_classes) def test_checker_invalid_pagenum_toc(builder, toc_checker_classes): prepare_three_images(builder) builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2"] ]) builder.run_program() builder.save_toc([ [0, 1, "Page 1"], [1, 3, "Page 2"] ]) check_all_invalid(builder, toc_checker_classes) def test_checker_invalid_description_toc(builder, toc_checker_classes): prepare_three_images(builder) builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2"] ]) builder.run_program() builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2 2 2"] ]) check_all_invalid(builder, toc_checker_classes) def test_checker_invalid_toc_extra_line(builder, toc_checker_classes): prepare_three_images(builder) builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2"] ]) builder.run_program() builder.save_toc([ [0, 1, "Page 1"], [1, 2, "Page 2"], [2, 3, "Page 3"] ]) check_all_invalid(builder, toc_checker_classes)<|fim▁end|>
def test_checker_valid_order(builder, checker_classes):
<|file_name|>path_utils.rs<|end_file_name|><|fim▁begin|>use crate::borrow_set::{BorrowData, BorrowSet, TwoPhaseActivation}; use crate::places_conflict; use crate::AccessDepth; use crate::BorrowIndex; use crate::Upvar; use rustc_data_structures::graph::dominators::Dominators; use rustc_middle::mir::BorrowKind; use rustc_middle::mir::{BasicBlock, Body, Field, Location, Place, PlaceRef, ProjectionElem}; use rustc_middle::ty::TyCtxt; /// Returns `true` if the borrow represented by `kind` is /// allowed to be split into separate Reservation and /// Activation phases. pub(super) fn allow_two_phase_borrow(kind: BorrowKind) -> bool { kind.allows_two_phase_borrow() } /// Control for the path borrow checking code #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub(super) enum Control { Continue, Break, } /// Encapsulates the idea of iterating over every borrow that involves a particular path pub(super) fn each_borrow_involving_path<'tcx, F, I, S>( s: &mut S, tcx: TyCtxt<'tcx>, body: &Body<'tcx>, _location: Location, access_place: (AccessDepth, Place<'tcx>), borrow_set: &BorrowSet<'tcx>, candidates: I, mut op: F, ) where F: FnMut(&mut S, BorrowIndex, &BorrowData<'tcx>) -> Control, I: Iterator<Item = BorrowIndex>, { let (access, place) = access_place; // FIXME: analogous code in check_loans first maps `place` to // its base_path. // check for loan restricting path P being used. Accounts for // borrows of P, P.a.b, etc. for i in candidates { let borrowed = &borrow_set[i]; if places_conflict::borrow_conflicts_with_place( tcx, body, borrowed.borrowed_place, borrowed.kind, place.as_ref(), access, places_conflict::PlaceConflictBias::Overlap, ) { debug!( "each_borrow_involving_path: {:?} @ {:?} vs. {:?}/{:?}", i, borrowed, place, access ); let ctrl = op(s, i, borrowed); if ctrl == Control::Break { return; } } } } pub(super) fn is_active<'tcx>( dominators: &Dominators<BasicBlock>, borrow_data: &BorrowData<'tcx>, location: Location, ) -> bool { debug!("is_active(borrow_data={:?}, location={:?})", borrow_data, location); let activation_location = match borrow_data.activation_location { // If this is not a 2-phase borrow, it is always active. TwoPhaseActivation::NotTwoPhase => return true, // And if the unique 2-phase use is not an activation, then it is *never* active. TwoPhaseActivation::NotActivated => return false, // Otherwise, we derive info from the activation point `loc`: TwoPhaseActivation::ActivatedAt(loc) => loc, }; // Otherwise, it is active for every location *except* in between // the reservation and the activation: // // X // / // R <--+ Except for this // / \ | diamond // \ / | // A <------+ // | // Z // // Note that we assume that: // - the reservation R dominates the activation A // - the activation A post-dominates the reservation R (ignoring unwinding edges). // // This means that there can't be an edge that leaves A and // comes back into that diamond unless it passes through R. // // Suboptimal: In some cases, this code walks the dominator // tree twice when it only has to be walked once. I am // lazy. -nmatsakis // If dominated by the activation A, then it is active. The // activation occurs upon entering the point A, so this is // also true if location == activation_location. if activation_location.dominates(location, dominators) { return true; } // The reservation starts *on exiting* the reservation block, // so check if the location is dominated by R.successor. If so, // this point falls in between the reservation and location. let reserve_location = borrow_data.reserve_location.successor_within_block(); if reserve_location.dominates(location, dominators) { false } else { // Otherwise, this point is outside the diamond, so // consider the borrow active. This could happen for // example if the borrow remains active around a loop (in // which case it would be active also for the point R,<|fim▁hole|>} /// Determines if a given borrow is borrowing local data /// This is called for all Yield expressions on movable generators pub(super) fn borrow_of_local_data(place: Place<'_>) -> bool { // Reborrow of already borrowed data is ignored // Any errors will be caught on the initial borrow !place.is_indirect() } /// If `place` is a field projection, and the field is being projected from a closure type, /// then returns the index of the field being projected. Note that this closure will always /// be `self` in the current MIR, because that is the only time we directly access the fields /// of a closure type. pub(crate) fn is_upvar_field_projection( tcx: TyCtxt<'tcx>, upvars: &[Upvar<'tcx>], place_ref: PlaceRef<'tcx>, body: &Body<'tcx>, ) -> Option<Field> { let mut place_ref = place_ref; let mut by_ref = false; if let Some((place_base, ProjectionElem::Deref)) = place_ref.last_projection() { place_ref = place_base; by_ref = true; } match place_ref.last_projection() { Some((place_base, ProjectionElem::Field(field, _ty))) => { let base_ty = place_base.ty(body, tcx).ty; if (base_ty.is_closure() || base_ty.is_generator()) && (!by_ref || upvars[field.index()].by_ref) { Some(field) } else { None } } _ => None, } }<|fim▁end|>
// which would generate an error). true }
<|file_name|>IMethodRefForm.java<|end_file_name|><|fim▁begin|>/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.unpack200.bytecode.forms; import org.apache.harmony.unpack200.SegmentConstantPool; import org.apache.harmony.unpack200.bytecode.ByteCode; import org.apache.harmony.unpack200.bytecode.CPInterfaceMethodRef; import org.apache.harmony.unpack200.bytecode.OperandManager; /** <|fim▁hole|> * IMethod references (and only IMethod references). */ public class IMethodRefForm extends ReferenceForm { public IMethodRefForm(int opcode, String name, int[] rewrite) { super(opcode, name, rewrite); } protected int getOffset(OperandManager operandManager) { return operandManager.nextIMethodRef(); } protected int getPoolID() { return SegmentConstantPool.CP_IMETHOD; } /* * (non-Javadoc) * * @see org.apache.harmony.unpack200.bytecode.forms.ByteCodeForm#setByteCodeOperands(org.apache.harmony.unpack200.bytecode.ByteCode, * org.apache.harmony.unpack200.bytecode.OperandTable, * org.apache.harmony.unpack200.Segment) */ public void setByteCodeOperands(ByteCode byteCode, OperandManager operandManager, int codeLength) { super.setByteCodeOperands(byteCode, operandManager, codeLength); final int count = ((CPInterfaceMethodRef) byteCode .getNestedClassFileEntries()[0]).invokeInterfaceCount(); byteCode.getRewrite()[3] = count; } }<|fim▁end|>
* This class implements the byte code form for those bytecodes which have
<|file_name|>people.py<|end_file_name|><|fim▁begin|>from scrapelib import HTTPError<|fim▁hole|>from openstates.utils import LXMLMixin from pupa.scrape import Person, Scraper class UTPersonScraper(Scraper, LXMLMixin): def scrape(self): PARTIES = {"R": "Republican", "D": "Democratic"} representative_url = "http://house.utah.gov/rep/{}" senator_url = "http://senate.utah.gov/senators/district{}.html" json_link = "http://le.utah.gov/data/legislators.json" person_json = self.get(json_link).json() for info in person_json["legislators"]: chamber = "lower" if info["house"] == "H" else "upper" person = Person( name=info["formatName"], district=info["district"], party=PARTIES[info["party"]], image=info["image"], primary_org=chamber, ) person.add_source(json_link) if chamber == "lower": link = representative_url.format(info["id"]) else: link = senator_url.format(info["district"]) try: self.head(link) except HTTPError: self.logger.warning("Bad URL for {}".format(info["formatName"])) else: person.add_link(link) address = info.get("address") email = info.get("email") fax = info.get("fax") # Work phone seems to be the person's non-legislative # office phone, and thus a last option # For example, we called one and got the firm # where he's a lawyer. We're picking # them in order of how likely we think they are # to actually get us to the person we care about. phone = info.get("cell") or info.get("homePhone") or info.get("workPhone") if address: person.add_contact_detail( type="address", value=address, note="District Office" ) if phone: person.add_contact_detail( type="voice", value=phone, note="District Office" ) if email: person.add_contact_detail( type="email", value=email, note="District Office" ) if fax: person.add_contact_detail(type="fax", value=fax, note="District Office") BASE_FINANCE_URL = "http://www.disclosures.utah.gov/Search/PublicSearch" conflicts_of_interest = info.get("CofI") or [] finance_reports = info.get("FinanceReport") or [] extra_links = [] for conflict in conflicts_of_interest: extra_links.append(conflict["url"]) for finance in finance_reports: # Some links are just to the base disclosure website # Presumably, these members don't yet have their forms up if finance != BASE_FINANCE_URL: extra_links.append(finance["url"]) if extra_links: person.extras["links"] = extra_links yield person<|fim▁end|>
<|file_name|>Serializer.java<|end_file_name|><|fim▁begin|><|fim▁hole|> * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.tango.logstash.flume.redis.sink.serializer; import org.apache.flume.Event; import org.apache.flume.conf.Configurable; import org.apache.flume.conf.ConfigurableComponent; public interface Serializer extends Configurable, ConfigurableComponent { /** * Serialize an event for storage in Redis * * @param event * Event to serialize * @return Serialized data */ byte[] serialize(Event event) throws RedisSerializerException; }<|fim▁end|>
/** * Copyright 2014 TangoMe Inc. *
<|file_name|>test_dns_relay.py<|end_file_name|><|fim▁begin|># Copyright 2016 Cisco Systems, Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # from binascii import hexlify import mock import socket import unittest from networking_cisco.plugins.cisco.cpnr.cpnr_client import UnexpectedError from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import ( DnsRelayAgent) from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import cfg from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import DnsPacket from networking_cisco.plugins.cisco.cpnr.cpnr_dns_relay_agent import OPTS class TestDnsRelayAgent(unittest.TestCase): @mock.patch('networking_cisco.plugins.cisco.' 'cpnr.cpnr_dns_relay_agent.netns') @mock.patch('socket.socket') def test_open_dns_ext_socket(self, mock_socket, mock_netns): cfg.CONF.register_opts(OPTS, 'cisco_pnr') relay = DnsRelayAgent() mock_netns.iflist.return_value = [] mock_netns.iflist.return_value.append(('lo', '127.0.0.1', '255.0.0.0')) sock = mock_socket.return_value sock.getsockname.return_value = ('127.0.0.1', 123456) sock, addr, port = relay._open_dns_ext_socket() mock_socket.assert_has_calls([ mock.call(socket.AF_INET, socket.SOCK_DGRAM), mock.call().bind(('127.0.0.1', 0)), mock.call().getsockname(), mock.call().connect(('127.0.0.1', 53))] ) # check exception thrown if no interfaces with self.assertRaises(UnexpectedError): mock_netns.iflist.return_value = [] sock, addr, port = relay._open_dns_ext_socket() # check exception thrown if no matching interfaces with self.assertRaises(UnexpectedError): mock_netns.iflist.return_value = [] mock_netns.iflist.return_value.append(('eth0', '10.0.0.10', '255.255.255.0')) sock, addr, port = relay._open_dns_ext_socket() # check matching interface found if not first in list mock_netns.iflist.return_value = [] mock_netns.iflist.return_value.append(('eth0', '10.0.0.10', '255.255.255.0')) mock_netns.iflist.return_value.append(('lo', '127.0.0.1', '255.0.0.0'))<|fim▁hole|> @mock.patch('networking_cisco.plugins.cisco.' 'cpnr.cpnr_dns_relay_agent.netns') @mock.patch('socket.socket') def test_open_dns_int_socket(self, mock_socket, mock_netns): cfg.CONF.register_opts(OPTS, 'cisco_pnr') relay = DnsRelayAgent() mock_netns.iflist.return_value = [] mock_netns.iflist.return_value.append(('eth0', '10.21.1.13', '255.255.255.0')) sock, addr, port = relay._open_dns_int_socket() self.assertTrue(mock_netns.iflist.called, "Failed to call iflist.") mock_socket.assert_has_calls([ mock.call(socket.AF_INET, socket.SOCK_DGRAM), mock.call().setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1), mock.call().bind(('10.21.1.13', 53))] ) # check exception thrown if no interfaces with self.assertRaises(UnexpectedError): mock_netns.iflist.return_value = [] sock, addr, port = relay._open_dns_int_socket() def test_convert_namespace_to_viewid(self): cfg.CONF.register_opts(OPTS, 'cisco_pnr') relay = DnsRelayAgent() namespace = 'qdhcp-d7c31f74-5d9e-47b7-86f2-64879023c04d' viewid = relay._convert_namespace_to_viewid(namespace) tmp = 0x64879023c04d & 0x7fffffff self.assertEqual(viewid, str(tmp)) class TestDnsPacket(unittest.TestCase): def test_parse(self): # test regular DNS request line = ('84 a5 01 00 00 01 00 00 00 00 00 00 06 72 ' '65 64 68 61 74 03 63 6f 6d 00 00 01 00 01') buf = bytearray.fromhex(line) pkt = DnsPacket.parse(buf, 28) self.assertEqual(0x84a5, pkt.get_msgid()) self.assertTrue(pkt.isreq) self.assertEqual(0, pkt.arcnt) self.assertEqual(0, pkt.optlen) self.assertEqual(28, pkt.txt_insert_pos) # test DNS request with EDNS0 line = ('81 71 01 20 00 01 00 00 00 00 00 01 06 72 65 ' '64 68 61 74 03 63 6f 6d 00 00 01 00 01 00 00 ' '29 10 00 00 00 00 00 00 00') buf = bytearray.fromhex(line) pkt = DnsPacket.parse(buf, 38) self.assertEqual(0x8171, pkt.get_msgid()) self.assertTrue(pkt.isreq) self.assertEqual(1, pkt.arcnt) self.assertEqual(10, pkt.optlen) self.assertEqual(28, pkt.txt_insert_pos) # test regular DNS response line = ('b6 5e 81 80 00 01 00 01 00 00 00 00 06 72 65 ' '64 68 61 74 03 63 6f 6d 00 00 01 00 01 c0 0c ' '00 01 00 01 00 00 00 08 00 04 d1 84 b7 69') buf = bytearray.fromhex(line) pkt = DnsPacket.parse(buf, 44) self.assertEqual(0xb65e, pkt.get_msgid()) self.assertFalse(pkt.isreq) self.assertEqual(0, pkt.arcnt) self.assertEqual(0, pkt.optlen) self.assertEqual(-1, pkt.txt_insert_pos) def test_set_viewid(self): pkt = DnsPacket() pkt.set_viewid('123456789') self.assertEqual(pkt.viewid, '123456789') def test_data(self): # call with regular DNS request line = ('84 a5 01 00 00 01 00 00 00 00 00 00 06 72 ' '65 64 68 61 74 03 63 6f 6d 00 00 01 00 01') buf = bytearray.fromhex(line) pktbuf = bytearray(4096) pktbuf[0:len(buf)] = buf pkt = DnsPacket.parse(pktbuf, 28) pkt.set_viewid('123456') mod_buf = pkt.data() self.assertEqual(pkt.arcnt, 1) hextxtstr = hexlify(DnsPacket.TXT_RR) hexstr = hexlify(mod_buf) self.assertNotEqual(-1, hexstr.find(hextxtstr)) # call with DNS request with EDNS0 line = ('81 71 01 20 00 01 00 00 00 00 00 01 06 72 65 ' '64 68 61 74 03 63 6f 6d 00 00 01 00 01 00 00 ' '29 10 00 00 00 00 00 00 00') buf = bytearray.fromhex(line) pktbuf = bytearray(4096) pktbuf[0:len(buf)] = buf pkt = DnsPacket.parse(pktbuf, 38) pkt.set_viewid('123456') mod_buf = pkt.data() self.assertEqual(2, pkt.arcnt) hexstr = hexlify(mod_buf) self.assertNotEqual(-1, hexstr.find(hextxtstr)) def test_skip_over_domain_name(self): # test skip over name at beginning, end up on ^ # 4test5cisco3com0^ bytes = bytearray(b'\x04\x74\x65\x73\x74\x05\x63\x69\x73\x63' b'\x6f\x03\x63\x6f\x6d\x00\x5e') pos = DnsPacket.skip_over_domain_name(bytes, 0) self.assertEqual(16, pos) self.assertEqual('^', chr(bytes[pos])) # test skip over name in the middle, end up on ^ # 2552552552554test5cisco3com0^ bytes = bytearray(b'\xff\xff\xff\xff\x04\x74\x65\x73\x74\x05\x63' b'\x69\x73\x63\x6f\x03\x63\x6f\x6d\x00\x5e') pos = DnsPacket.skip_over_domain_name(bytes, 4) self.assertEqual(20, pos) self.assertEqual('^', chr(bytes[pos])) # test skip over length and pointer at beginning, end up on ^ bytes = bytearray(b'\xc0\x55\x5e') pos = DnsPacket.skip_over_domain_name(bytes, 0) self.assertEqual(2, pos) self.assertEqual('^', chr(bytes[pos])) # test skip over length and pointer in the middle, end up on ^ bytes = bytearray(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xc0\x55\x5e') pos = DnsPacket.skip_over_domain_name(bytes, 9) self.assertEqual(11, pos) self.assertEqual('^', chr(bytes[pos]))<|fim▁end|>
sock, addr, port = relay._open_dns_ext_socket()
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: UTF-8 -*- # YaBlog # (c) Regis FLORET # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of the <organization> nor the # names of its contributors may be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL Regis FLORET BE LIABLE FOR ANY<|fim▁hole|># (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND # ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.<|fim▁end|>
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
<|file_name|>grpc.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import warnings from typing import Callable, Dict, Optional, Sequence, Tuple from google.api_core import grpc_helpers # type: ignore from google.api_core import gapic_v1 # type: ignore import google.auth # type: ignore<|fim▁hole|> from google.ads.googleads.v8.resources.types import gender_view from google.ads.googleads.v8.services.types import gender_view_service from .base import GenderViewServiceTransport, DEFAULT_CLIENT_INFO class GenderViewServiceGrpcTransport(GenderViewServiceTransport): """gRPC backend transport for GenderViewService. Service to manage gender views. This class defines the same methods as the primary client, so the primary client can load the underlying transport implementation and call it. It sends protocol buffers over the wire using gRPC (which is built on top of HTTP/2); the ``grpcio`` package must be installed. """ def __init__( self, *, host: str = "googleads.googleapis.com", credentials: ga_credentials.Credentials = None, credentials_file: str = None, scopes: Sequence[str] = None, channel: grpc.Channel = None, api_mtls_endpoint: str = None, client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, ssl_channel_credentials: grpc.ChannelCredentials = None, quota_project_id: Optional[str] = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiate the transport. Args: host (Optional[str]): The hostname to connect to. credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. This argument is ignored if ``channel`` is provided. credentials_file (Optional[str]): A file with credentials that can be loaded with :func:`google.auth.load_credentials_from_file`. This argument is ignored if ``channel`` is provided. scopes (Optional(Sequence[str])): A list of scopes. This argument is ignored if ``channel`` is provided. channel (Optional[grpc.Channel]): A ``Channel`` instance through which to make calls. api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. If provided, it overrides the ``host`` argument and tries to create a mutual TLS channel with client SSL credentials from ``client_cert_source`` or applicatin default SSL credentials. client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): Deprecated. A callback to provide client SSL certificate bytes and private key bytes, both in PEM format. It is ignored if ``api_mtls_endpoint`` is None. ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials for grpc channel. It is ignored if ``channel`` is provided. quota_project_id (Optional[str]): An optional project to use for billing and quota. client_info (google.api_core.gapic_v1.client_info.ClientInfo): The client info used to send a user-agent string along with API requests. If ``None``, then default info will be used. Generally, you only need to set this if you're developing your own client library. Raises: google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport creation failed for any reason. """ self._ssl_channel_credentials = ssl_channel_credentials if channel: # Sanity check: Ensure that channel and credentials are not both # provided. credentials = False # If a channel was explicitly provided, set it. self._grpc_channel = channel self._ssl_channel_credentials = None elif api_mtls_endpoint: warnings.warn( "api_mtls_endpoint and client_cert_source are deprecated", DeprecationWarning, ) host = ( api_mtls_endpoint if ":" in api_mtls_endpoint else api_mtls_endpoint + ":443" ) if credentials is None: credentials, _ = google.auth.default( scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id ) # Create SSL credentials with client_cert_source or application # default SSL credentials. if client_cert_source: cert, key = client_cert_source() ssl_credentials = grpc.ssl_channel_credentials( certificate_chain=cert, private_key=key ) else: ssl_credentials = SslCredentials().ssl_credentials # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, credentials_file=credentials_file, ssl_credentials=ssl_credentials, scopes=scopes or self.AUTH_SCOPES, quota_project_id=quota_project_id, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) self._ssl_channel_credentials = ssl_credentials else: host = host if ":" in host else host + ":443" if credentials is None: credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES) # create a new channel. The provided one is ignored. self._grpc_channel = type(self).create_channel( host, credentials=credentials, ssl_credentials=ssl_channel_credentials, scopes=self.AUTH_SCOPES, options=[ ("grpc.max_send_message_length", -1), ("grpc.max_receive_message_length", -1), ], ) self._stubs = {} # type: Dict[str, Callable] # Run the base constructor. super().__init__( host=host, credentials=credentials, client_info=client_info, ) @classmethod def create_channel( cls, host: str = "googleads.googleapis.com", credentials: ga_credentials.Credentials = None, scopes: Optional[Sequence[str]] = None, **kwargs, ) -> grpc.Channel: """Create and return a gRPC channel object. Args: address (Optionsl[str]): The host for the channel to use. credentials (Optional[~.Credentials]): The authorization credentials to attach to requests. These credentials identify this application to the service. If none are specified, the client will attempt to ascertain the credentials from the environment. scopes (Optional[Sequence[str]]): A optional list of scopes needed for this service. These are only used when credentials are not specified and are passed to :func:`google.auth.default`. kwargs (Optional[dict]): Keyword arguments, which are passed to the channel creation. Returns: grpc.Channel: A gRPC channel object. """ return grpc_helpers.create_channel( host, credentials=credentials, scopes=scopes or cls.AUTH_SCOPES, **kwargs, ) @property def grpc_channel(self) -> grpc.Channel: """Return the channel designed to connect to this service. """ return self._grpc_channel @property def get_gender_view( self, ) -> Callable[ [gender_view_service.GetGenderViewRequest], gender_view.GenderView ]: r"""Return a callable for the get gender view method over gRPC. Returns the requested gender view in full detail. List of thrown errors: `AuthenticationError <>`__ `AuthorizationError <>`__ `HeaderError <>`__ `InternalError <>`__ `QuotaError <>`__ `RequestError <>`__ Returns: Callable[[~.GetGenderViewRequest], ~.GenderView]: A function that, when called, will call the underlying RPC on the server. """ # Generate a "stub function" on-the-fly which will actually make # the request. # gRPC handles serialization and deserialization, so we just need # to pass in the functions for each. if "get_gender_view" not in self._stubs: self._stubs["get_gender_view"] = self.grpc_channel.unary_unary( "/google.ads.googleads.v8.services.GenderViewService/GetGenderView", request_serializer=gender_view_service.GetGenderViewRequest.serialize, response_deserializer=gender_view.GenderView.deserialize, ) return self._stubs["get_gender_view"] __all__ = ("GenderViewServiceGrpcTransport",)<|fim▁end|>
from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore import grpc # type: ignore
<|file_name|>OCAPI.js<|end_file_name|><|fim▁begin|>var Client = require('node-rest-client').Client; //REST server properties var host_url = "http://sapient5-evaluation-dw.demandware.net"; var api_path = "/s/SiteGenesis/dw/shop/v17_2/"; var server_url = host_url+api_path; var client_id = "5a40714c-52c3-44df-a00d-9d3bb2dc8ea8"; var max_suggestion = 2; var getURL = function(method){ return server_url+method; }; var client = new Client(); client.registerMethod("products",getURL('products')+"/${id}?all_images=true&expand=images,prices&client_id="+client_id, "GET"); client.registerMethod("productimage",getURL('products')+"/${id}/images?all_images=true&client_id="+client_id, "GET"); client.registerMethod("searchsuggestion",getURL('search_suggestion')+"?q=${arg1}&count=${arg2}&client_id="+client_id, "GET"); client.registerMethod("getCategories",getURL('categories')+"/${id}?level=${2}&client_id="+client_id, "GET"); client.registerMethod("searchproducts",getURL('product_search')+"?q=${arg1}&refine1=cgid=${arg2}&count=100&expand=images,prices&client_id="+client_id, "GET"); module.exports = { getProduct : function(product_id, callbackmethod) { var args = { path: { "id": product_id } // path substitution var }; client.methods.products(args,callbackmethod); }, getSuggestion : function(query,callbackmethod) { var args = { path: { "arg1" : query, "arg2" : max_suggestion } }; client.methods.searchsuggestion(args, callbackmethod); //http://hostname:port/dw/shop/v17_2/search_suggestion?q={String}&count={Integer}&currency={String}&locale={String} }, getProductImages : function(product_id,callbackmethod) { var args = { path: { "id": product_id } // path substitution var }; client.methods.productimage(args,callbackmethod); //http://hostname:port/dw/shop/v17_2/search_suggestion?q={String}&count={Integer}&currency={String}&locale={String} }, searchProducts : function(callbackmethod,query,refine) { var args = { path: { "arg1" : query, "arg2" : ((refine) ? refine : 'root')} }; console.log(JSON.stringify(args)); client.methods.searchproducts(args, callbackmethod); }, getCategories : function(callbackmethod,cgid) { var args = { path: { "id" : ((cgid) ? cgid : 'root'), "arg1" : 1} }; client.methods.getCategories(args, callbackmethod);<|fim▁hole|> //http://sapient3-evaluation-dw.demandware.net/s/SiteGenesis/dw/shop/v17_2/product_search?q=shirt&client_id=5a40714c-52c3-44df-a00d-9d3bb2dc8ea8&expand=images,prices&refine_1=cgid=mens}, };<|fim▁end|>
}
<|file_name|>sql_func.py<|end_file_name|><|fim▁begin|>from django.db import connection from laboratory.settings import TIME_ZONE from utils.db import namedtuplefetchall def get_history_dir(d_s, d_e, card_id, who_create_dir, services, is_serv, iss_pk, is_parent, for_slave_hosp): with connection.cursor() as cursor: cursor.execute( """WITH t_iss AS (SELECT directions_issledovaniya.id as iss_id, directions_napravleniya.client_id, directory_researches.title as res_title, directory_researches.id as res_id, directory_researches.code, directory_researches.is_hospital, directory_researches.is_slave_hospital, directory_researches.is_treatment, directory_researches.is_stom, directory_researches.is_doc_refferal, directory_researches.is_paraclinic, directory_researches.is_form, directory_researches.is_microbiology, directory_researches.podrazdeleniye_id, directions_napravleniya.parent_id, directions_napravleniya.data_sozdaniya, directions_napravleniya.doc_who_create_id, directions_issledovaniya.napravleniye_id, directions_napravleniya.cancel, directions_issledovaniya.time_confirmation, directions_issledovaniya.maybe_onco, to_char(directions_issledovaniya.time_save AT TIME ZONE %(tz)s, 'DD.MM.YYYY-HH24:MI:SS') as ch_time_save, directions_issledovaniya.study_instance_uid, directions_napravleniya.parent_slave_hosp_id, directory_researches.is_application, directory_researches.is_expertise, person_contract.id as person_contract_id, person_contract.dir_list as contract_dirs FROM directions_issledovaniya LEFT JOIN directory_researches ON directions_issledovaniya.research_id = directory_researches.Id LEFT JOIN directions_napravleniya ON directions_issledovaniya.napravleniye_id = directions_napravleniya.id LEFT JOIN directions_personcontract person_contract on directions_napravleniya.num_contract = person_contract.num_contract WHERE directions_napravleniya.data_sozdaniya AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s AND NOT directory_researches.is_expertise AND CASE WHEN %(is_parent)s = TRUE AND %(for_slave_hosp)s = FALSE THEN directions_napravleniya.parent_id = %(iss_pk)s WHEN %(is_parent)s = TRUE AND %(for_slave_hosp)s = TRUE THEN directions_napravleniya.parent_slave_hosp_id = %(iss_pk)s when %(card_id)s > -1 THEN directions_napravleniya.client_id = %(card_id)s when %(who_create)s > -1 THEN directions_napravleniya.doc_who_create_id = %(who_create)s END), t_tubes AS (SELECT tubesregistration_id, issledovaniya_id as tubes_iss_id FROM directions_issledovaniya_tubes WHERE issledovaniya_id IN (SELECT iss_id FROM t_iss)), t_iss_tubes AS (SELECT * from t_iss LEFT JOIN t_tubes ON t_iss.iss_id = t_tubes.tubes_iss_id), t_recive AS (SELECT time_recive, id as id_t_recive FROM directions_tubesregistration WHERE directions_tubesregistration.id in (SELECT tubesregistration_id FROM t_tubes)), t_podrazdeleniye AS (SELECT id AS podr_id, can_has_pacs, title AS podr_title FROM podrazdeleniya_podrazdeleniya) SELECT napravleniye_id, cancel, iss_id, tubesregistration_id, <|fim▁hole|> to_char(time_recive AT TIME ZONE %(tz)s, 'DD.MM.YY HH24:MI:SS.US'), ch_time_save, podr_title, is_hospital, maybe_onco, can_has_pacs, is_slave_hospital, is_treatment, is_stom, is_doc_refferal, is_paraclinic, is_microbiology, parent_id, study_instance_uid, parent_slave_hosp_id, is_form, is_application, is_expertise, person_contract_id, contract_dirs FROM t_iss_tubes LEFT JOIN t_recive ON t_iss_tubes.tubesregistration_id = t_recive.id_t_recive LEFT JOIN t_podrazdeleniye ON t_iss_tubes.podrazdeleniye_id = t_podrazdeleniye.podr_id WHERE CASE WHEN %(is_serv)s = TRUE THEN res_id = ANY(ARRAY[%(services_p)s]) WHEN %(is_serv)s = FALSE THEN EXISTS (SELECT res_id FROM t_iss) END ORDER BY napravleniye_id DESC""", params={ 'd_start': d_s, 'd_end': d_e, 'card_id': card_id, 'who_create': who_create_dir, 'services_p': services, 'is_serv': is_serv, 'tz': TIME_ZONE, 'iss_pk': iss_pk, 'is_parent': is_parent, 'for_slave_hosp': for_slave_hosp, }, ) row = cursor.fetchall() return row def get_patient_contract(d_s, d_e, card_pk,): with connection.cursor() as cursor: cursor.execute( """ SELECT directions_napravleniya.num_contract, directions_personcontract.id, directions_personcontract.cancel, directions_personcontract.create_at, directions_personcontract.sum_contract, to_char(directions_personcontract.create_at AT TIME ZONE %(tz)s, 'DD.MM.YY') as date_create, directions_issledovaniya.napravleniye_id, directions_issledovaniya.coast, directions_issledovaniya.discount, directory_researches.title, directions_personcontract.dir_list FROM directions_issledovaniya LEFT JOIN directory_researches ON directory_researches.id=directions_issledovaniya.research_id LEFT JOIN directions_napravleniya ON directions_napravleniya.id=directions_issledovaniya.napravleniye_id LEFT JOIN directions_personcontract ON directions_personcontract.num_contract=directions_napravleniya.num_contract WHERE directions_issledovaniya.napravleniye_id::varchar in ( select regexp_split_to_table(directions_personcontract.dir_list, ',') from directions_personcontract where directions_personcontract.patient_card_id=%(card_pk)s and directions_personcontract.create_at AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s ) order by directions_personcontract.create_at DESC """, params={ 'd_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'card_pk': card_pk, }, ) rows = namedtuplefetchall(cursor) return rows def get_lab_podr(): with connection.cursor() as cursor: cursor.execute( """ SELECT id FROM public.podrazdeleniya_podrazdeleniya WHERE p_type=2 """ ) row = cursor.fetchall() return row def get_confirm_direction(d_s, d_e, lab_podr, is_lab=False, is_paraclinic=False, is_doc_refferal=False): with connection.cursor() as cursor: cursor.execute( """ SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya WHERE time_confirmation AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s AND research_id IN (SELECT id FROM directory_researches WHERE CASE WHEN %(is_lab)s = FALSE AND %(is_paraclinic)s = TRUE AND %(is_doc_refferal)s = FALSE THEN is_paraclinic = TRUE WHEN %(is_lab)s = FALSE AND %(is_paraclinic)s = FALSE AND %(is_doc_refferal)s = TRUE THEN is_doc_refferal = TRUE WHEN %(is_lab)s = FALSE AND %(is_paraclinic)s = TRUE AND %(is_doc_refferal)s = TRUE THEN is_paraclinic = TRUE or is_doc_refferal = TRUE WHEN %(is_lab)s = TRUE AND %(is_paraclinic)s = FALSE AND %(is_doc_refferal)s = FALSE THEN podrazdeleniye_id = ANY(ARRAY[%(lab_podr)s]) WHEN %(is_lab)s = TRUE AND %(is_paraclinic)s = TRUE AND %(is_doc_refferal)s = FALSE THEN is_paraclinic = TRUE and is_doc_refferal = FALSE or podrazdeleniye_id = ANY(ARRAY[%(lab_podr)s]) WHEN %(is_lab)s = TRUE AND %(is_paraclinic)s = FALSE AND %(is_doc_refferal)s = TRUE THEN is_paraclinic = FALSE and is_doc_refferal = TRUE or podrazdeleniye_id = ANY(ARRAY[%(lab_podr)s]) WHEN %(is_lab)s = TRUE AND %(is_paraclinic)s = TRUE AND %(is_doc_refferal)s = TRUE THEN is_paraclinic = TRUE or is_doc_refferal = TRUE or podrazdeleniye_id = ANY(ARRAY[%(lab_podr)s]) END ) """, params={'d_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'is_lab': is_lab, 'is_paraclinic': is_paraclinic, 'is_doc_refferal': is_doc_refferal, 'lab_podr': lab_podr}, ) row = cursor.fetchall() return row def filter_direction_department(list_dirs, podrazdeleniye_id): with connection.cursor() as cursor: cursor.execute( """ SELECT DISTINCT ON (id) id FROM public.directions_napravleniya WHERE id = ANY(ARRAY[%(num_dirs)s]) AND doc_id IN (SELECT id from users_doctorprofile WHERE podrazdeleniye_id = %(podrazdeleniye_id)s) """, params={'num_dirs': list_dirs, 'podrazdeleniye_id': podrazdeleniye_id}, ) row = cursor.fetchall() return row def filter_direction_doctor(list_dirs, doc_id): with connection.cursor() as cursor: cursor.execute( """ SELECT DISTINCT ON (id) id FROM public.directions_napravleniya WHERE id = ANY(ARRAY[%(num_dirs)s]) AND doc_id = %(doc_id)s """, params={'num_dirs': list_dirs, 'doc_id': doc_id}, ) row = cursor.fetchall() return row def get_confirm_direction_pathology(d_s, d_e): with connection.cursor() as cursor: cursor.execute( """ SELECT DISTINCT ON (napravleniye_id) napravleniye_id FROM public.directions_issledovaniya WHERE time_confirmation AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s AND research_id IN (SELECT id FROM public.directory_researches where title ILIKE '%%профпатолог%%') """, params={'d_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE}, ) row = cursor.fetchall() return row def get_confirm_direction_patient_year(d_s, d_e, lab_podr, card_pk1, is_lab=False, is_paraclinic=False, is_doc_refferal=False): with connection.cursor() as cursor: cursor.execute( """ SELECT directions_napravleniya.id as direction, directions_issledovaniya.time_confirmation, to_char(directions_issledovaniya.time_confirmation AT TIME ZONE %(tz)s, 'DD.MM.YYYY') as ch_time_confirmation, directions_issledovaniya.research_id, directory_researches.title as research_title FROM directions_napravleniya INNER JOIN directions_issledovaniya ON (directions_napravleniya.id = directions_issledovaniya.napravleniye_id) AND directions_issledovaniya.research_id IN (SELECT directory_researches.id FROM directory_researches WHERE CASE WHEN %(is_lab)s = TRUE THEN directory_researches.podrazdeleniye_id = ANY(ARRAY[%(lab_podr)s]) WHEN %(is_doc_refferal)s = TRUE THEN is_doc_refferal = TRUE WHEN %(is_paraclinic)s = TRUE THEN is_paraclinic = TRUE END ) LEFT JOIN directory_researches ON directions_issledovaniya.research_id=directory_researches.id WHERE directions_issledovaniya.time_confirmation IS NOT NULL AND directions_issledovaniya.time_confirmation AT TIME ZONE 'ASIA/Irkutsk' BETWEEN %(d_start)s AND %(d_end)s AND NOT EXISTS (SELECT directions_issledovaniya.napravleniye_id FROM directions_issledovaniya WHERE time_confirmation IS NULL AND directions_issledovaniya.napravleniye_id = directions_napravleniya.id) AND client_id=%(card_pk)s ORDER BY directions_issledovaniya.time_confirmation DESC, directions_napravleniya.id """, params={ 'd_start': d_s, 'd_end': d_e, 'tz': TIME_ZONE, 'is_lab': is_lab, 'is_paraclinic': is_paraclinic, 'is_doc_refferal': is_doc_refferal, 'lab_podr': lab_podr, 'card_pk': card_pk1, }, ) rows = namedtuplefetchall(cursor) return rows def direction_by_card(d_s, d_e, card_id): with connection.cursor() as cursor: cursor.execute( """ SELECT directions_issledovaniya.id as iss_id, directions_issledovaniya.napravleniye_id, directions_issledovaniya.time_confirmation, to_char(directions_issledovaniya.time_confirmation AT TIME ZONE %(tz)s, 'DD.MM.YYYY') date_confirm, to_char(directions_issledovaniya.time_save AT TIME ZONE %(tz)s, 'DD.MM.YYYY-HH24:MI:SS') as ch_time_save, directions_issledovaniya.study_instance_uid, directory_researches.title as research_title, directory_researches.id as research_id, directory_researches.is_hospital, directory_researches.is_slave_hospital, directory_researches.is_treatment, directory_researches.is_stom, directory_researches.is_doc_refferal, directory_researches.is_paraclinic, directory_researches.is_form, directory_researches.is_microbiology, directory_researches.is_application, directory_researches.is_expertise, directory_researches.podrazdeleniye_id, directions_napravleniya.parent_slave_hosp_id, directions_napravleniya.client_id, directions_napravleniya.parent_id, directions_napravleniya.data_sozdaniya, to_char(data_sozdaniya AT TIME ZONE %(tz)s, 'DD.MM.YY') as date_create, directions_napravleniya.cancel FROM directions_issledovaniya LEFT JOIN directory_researches ON directions_issledovaniya.research_id = directory_researches.id LEFT JOIN directions_napravleniya ON directions_issledovaniya.napravleniye_id = directions_napravleniya.id WHERE directions_napravleniya.data_sozdaniya AT TIME ZONE %(tz)s BETWEEN %(d_start)s AND %(d_end)s AND directions_napravleniya.client_id = %(card_id)s AND NOT directory_researches.is_expertise AND NOT directory_researches.is_hospital AND NOT directory_researches.is_slave_hospital AND NOT directory_researches.is_application ORDER BY directions_issledovaniya.napravleniye_id DESC""", params={ 'd_start': d_s, 'd_end': d_e, 'card_id': card_id, 'tz': TIME_ZONE, }, ) rows = namedtuplefetchall(cursor) return rows def get_type_confirm_direction(directions_tuple): if not directions_tuple: return [] with connection.cursor() as cursor: cursor.execute( """ SELECT DISTINCT (directions_issledovaniya.napravleniye_id) as napravleniye_id, directory_researches.podrazdeleniye_id, directory_researches.is_stom, directory_researches.is_doc_refferal, directory_researches.is_paraclinic, directory_researches.is_form, directory_researches.is_microbiology, directory_researches.is_application FROM directions_issledovaniya LEFT JOIN directory_researches ON directions_issledovaniya.research_id = directory_researches.id WHERE directions_issledovaniya.napravleniye_id in %(directions_tuple)s ORDER BY directions_issledovaniya.napravleniye_id DESC""", params={ 'directions_tuple': directions_tuple, }, ) rows = namedtuplefetchall(cursor) return rows<|fim▁end|>
res_id, res_title, to_char(data_sozdaniya AT TIME ZONE %(tz)s, 'DD.MM.YY') as date_create, time_confirmation,
<|file_name|>auction_collection.js<|end_file_name|><|fim▁begin|>/** * Auction collection */ 'use strict'; var Model = require('../models/auction_model.js');<|fim▁hole|> model: Model }); module.exports = AuctionCollection;<|fim▁end|>
var Collection = require('tungstenjs/adaptors/backbone').Collection; var AuctionCollection = Collection.extend({
<|file_name|>compiler.hh<|end_file_name|><|fim▁begin|>// Copyright (c) 2019 ASMlover. All rights reserved. //<|fim▁hole|>// are met: // // * Redistributions of source code must retain the above copyright // notice, this list ofconditions and the following disclaimer. // // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in // the documentation and/or other materialsprovided with the // distribution. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS // FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE // COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, // INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, // BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; // LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER // CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT // LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN // ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. #pragma once #include "value.hh" namespace wrencc { class Compiler; FunctionObject* compile(WrenVM& vm, ModuleObject* module, const str_t& source_bytes, bool is_expression = false, bool print_errors = false); void mark_compiler(WrenVM& vm, Compiler* compiler); }<|fim▁end|>
// Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions
<|file_name|>flow_label.py<|end_file_name|><|fim▁begin|># =============================================================================== # Copyright 2013 Jake Ross # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # =============================================================================== # ============= enthought library imports ======================= from chaco.data_label import DataLabel from chaco.plot_label import PlotLabel # ============= standard library imports ======================== from numpy import max from traits.api import Bool, Str # ============= local library imports ========================== from pychron.pipeline.plot.overlays.mean_indicator_overlay import MovableMixin try: class FlowPlotLabel(PlotLabel, MovableMixin): def overlay(self, component, gc, *args, **kw): if self.ox: self.x = self.ox - self.offset_x self.y = self.oy - self.offset_y super(FlowPlotLabel, self).overlay(component, gc, *args, **kw) def hittest(self, pt): x, y = pt w, h = self.get_preferred_size() return abs(x - self.x) < w and abs(y - self.y) < h except TypeError: # documentation auto doc hack class FlowPlotLabel: pass class FlowDataLabel(DataLabel): """ this label repositions itself if doesn't fit within the its component bounds. <|fim▁hole|> constrain_x = Bool(True) constrain_y = Bool(True) # position_event=Event id = Str # _ox=None # def _draw(self, gc, **kw): # self.font='modern 18' # gc.set_font(self.font) # print 'draw', self.font # super(FlowDataLabel, self)._draw(gc,**kw) # def _set_x(self, val): # super(FlowDataLabel, self)._set_x(val) # if self._ox is None: # self._ox = val # elif self._ox != val: # self.position_event=(self.x, self.y) # # def _set_y(self, val): # super(FlowDataLabel, self)._set_y(val) # if val>0: # self.position_event = (self.x, self.y) def overlay(self, component, gc, *args, **kw): # face name was getting set to "Helvetica" by reportlab during pdf generation # set face_name back to "" to prevent font display issue. see issue #72 self.font.face_name = "" super(FlowDataLabel, self).overlay(component, gc, *args, **kw) def do_layout(self, **kw): DataLabel.do_layout(self, **kw) ws, hs = self._cached_line_sizes.T if self.constrain_x: w = max(ws) d = self.component.x2 - (self.x + w + 3 * self.border_padding) if d < 0: self.x += d self.x = max((self.x, 0)) if self.constrain_y: h = max(hs) self.y = max((self.y, 0)) yd = self.component.y2 - h - 2 * self.border_padding - self.line_spacing self.y = min((self.y, yd)) # ============= EOF =============================================<|fim▁end|>
"""
<|file_name|>bitcoin_ca_ES.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="ca_ES" version="2.1"> <context> <name>AboutDialog</name> <message> <location filename="../forms/aboutdialog.ui" line="+14"/> <source>About BitSeeds</source> <translation>Sobre BitSeeds</translation> </message> <message> <location line="+39"/> <source>&lt;b&gt;BitSeeds&lt;/b&gt; version</source> <translation>versió &lt;b&gt;BitSeeds&lt;/b&gt;</translation> </message> <message> <location line="+41"/> <source>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The BitSeeds developers</source> <translation>Copyright © 2009-2014 The Bitcoin developers Copyright © 2012-2014 The NovaCoin developers Copyright © 2014 The BitSeeds developers</translation> </message> <message> <location line="+15"/> <source> This is experimental software. Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php. This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young ([email protected]) and UPnP software written by Thomas Bernard.</source> <translation>\n Aquest és software experimental.\n\n Distribuït sota llicència de software MIT/11, veure l&apos;arxiu COPYING o http://www.opensource.org/licenses/mit-license.php.\n\nAquest producte inclou software desarrollat pel projecte OpenSSL per a l&apos;ús de OppenSSL Toolkit (http://www.openssl.org/) i de software criptogràfic escrit per l&apos;Eric Young ([email protected]) i software UPnP escrit per en Thomas Bernard.</translation> </message> </context> <context> <name>AddressBookPage</name> <message> <location filename="../forms/addressbookpage.ui" line="+14"/> <source>Address Book</source> <translation>Llibreta d&apos;adreces</translation> </message> <message> <location line="+22"/> <source>Double-click to edit address or label</source> <translation>Feu doble clic per editar l&apos;adreça o l&apos;etiqueta</translation> </message> <message> <location line="+27"/> <source>Create a new address</source> <translation>Crear una nova adreça</translation> </message> <message> <location line="+14"/> <source>Copy the currently selected address to the system clipboard</source> <translation>Copiar l&apos;adreça seleccionada al porta-retalls del sistema</translation> </message> <message> <location line="-11"/> <source>&amp;New Address</source> <translation>&amp;Nova adreça</translation> </message> <message> <location line="-46"/> <source>These are your BitSeeds addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source> <translation>Aquestes són les teves adreces de BitSeeds per rebre els pagaments. És possible que vulgueu donar una diferent a cada remitent per a poder realitzar un seguiment de qui li está pagant.</translation> </message> <message> <location line="+60"/> <source>&amp;Copy Address</source> <translation>&amp;Copiar adreça</translation> </message> <message> <location line="+11"/> <source>Show &amp;QR Code</source> <translation>Mostra el códi &amp;QR</translation> </message> <message> <location line="+11"/> <source>Sign a message to prove you own a BitSeeds address</source> <translation>Signar un missatge per demostrar que és propietari d&apos;una adreça BitSeeds</translation> </message> <message> <location line="+3"/> <source>Sign &amp;Message</source> <translation>Signar &amp;Message</translation> </message> <message> <location line="+25"/> <source>Delete the currently selected address from the list</source> <translation>Esborrar l&apos;adreça sel·leccionada</translation> </message> <message> <location line="-14"/> <source>Verify a message to ensure it was signed with a specified BitSeeds address</source> <translation>Comproveu el missatge per assegurar-se que es va signar amb una adreça BitSeeds especificada.</translation> </message> <message> <location line="+3"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="+14"/> <source>&amp;Delete</source> <translation>&amp;Esborrar</translation> </message> <message> <location filename="../addressbookpage.cpp" line="+65"/> <source>Copy &amp;Label</source> <translation>Copiar &amp;Etiqueta</translation> </message> <message> <location line="+2"/> <source>&amp;Edit</source> <translation>&amp;Editar</translation> </message> <message> <location line="+250"/> <source>Export Address Book Data</source> <translation>Exportar dades de la llibreta d&apos;adreces </translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+13"/> <source>Error exporting</source> <translation>Error a l&apos;exportar</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure al fitxer %1.</translation> </message> </context> <context> <name>AddressTableModel</name> <message> <location filename="../addresstablemodel.cpp" line="+144"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+36"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> </context> <context> <name>AskPassphraseDialog</name> <message> <location filename="../forms/askpassphrasedialog.ui" line="+26"/> <source>Passphrase Dialog</source> <translation>Dialeg de contrasenya</translation> </message> <message> <location line="+21"/> <source>Enter passphrase</source> <translation>Introdueix contrasenya</translation> </message> <message> <location line="+14"/> <source>New passphrase</source> <translation>Nova contrasenya</translation> </message> <message> <location line="+14"/> <source>Repeat new passphrase</source> <translation>Repeteix la nova contrasenya</translation> </message> <message> <location line="+33"/> <source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source> <translation>Serveix per desactivar l&apos;enviament trivial de diners quan el compte del sistema operatiu ha estat compromès. No ofereix seguretat real.</translation> </message> <message> <location line="+3"/> <source>For staking only</source> <translation>Només per a fer &quot;stake&quot;</translation> </message> <message> <location filename="../askpassphrasedialog.cpp" line="+35"/> <source>Enter the new passphrase to the wallet.&lt;br/&gt;Please use a passphrase of &lt;b&gt;10 or more random characters&lt;/b&gt;, or &lt;b&gt;eight or more words&lt;/b&gt;.</source> <translation>Introdueixi la nova contrasenya al moneder&lt;br/&gt;Si us plau useu una contrasenya de &lt;b&gt;10 o més caracters aleatoris&lt;/b&gt;, o &lt;b&gt;vuit o més paraules&lt;/b&gt;.</translation> </message> <message> <location line="+1"/> <source>Encrypt wallet</source> <translation>Xifrar el moneder</translation> </message> <message> <location line="+7"/> <source>This operation needs your wallet passphrase to unlock the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desbloquejar-lo.</translation> </message> <message> <location line="+5"/> <source>Unlock wallet</source> <translation>Desbloqueja el moneder</translation> </message> <message> <location line="+3"/> <source>This operation needs your wallet passphrase to decrypt the wallet.</source> <translation>Aquesta operació requereix la seva contrasenya del moneder per a desencriptar-lo.</translation> </message> <message> <location line="+5"/> <source>Decrypt wallet</source> <translation>Desencripta el moneder</translation> </message> <message> <location line="+3"/> <source>Change passphrase</source> <translation>Canviar la contrasenya</translation> </message> <message> <location line="+1"/> <source>Enter the old and new passphrase to the wallet.</source> <translation>Introdueixi tant l&apos;antiga com la nova contrasenya de moneder.</translation> </message> <message> <location line="+46"/> <source>Confirm wallet encryption</source> <translation>Confirmar l&apos;encriptació del moneder</translation> </message> <message> <location line="+1"/> <source>Warning: If you encrypt your wallet and lose your passphrase, you will &lt;b&gt;LOSE ALL OF YOUR COINS&lt;/b&gt;!</source> <translation>Avís: Si xifra el seu moneder i perd la contrasenya, podrà &lt;b&gt; PERDRE TOTES LES SEVES MONEDES &lt;/ b&gt;!</translation> </message> <message> <location line="+0"/> <source>Are you sure you wish to encrypt your wallet?</source> <translation>Esteu segur que voleu encriptar el vostre moneder?</translation> </message> <message> <location line="+15"/> <source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source> <translation>IMPORTANT: Tota copia de seguretat que hagis realitzat hauria de ser reemplaçada pel, recentment generat, arxiu encriptat del moneder.</translation> </message> <message> <location line="+103"/> <location line="+24"/> <source>Warning: The Caps Lock key is on!</source> <translation>Advertència: Les lletres majúscules estàn activades!</translation> </message> <message> <location line="-133"/> <location line="+60"/> <source>Wallet encrypted</source> <translation>Moneder encriptat</translation> </message> <message> <location line="-58"/> <source>BitSeeds will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source> <translation>BitSeeds tancarà ara per acabar el procés de xifrat. Recordeu que l&apos;encriptació del seu moneder no pot protegir completament les seves monedes de ser robades pel malware que pugui infectar al seu equip.</translation> </message> <message> <location line="+13"/> <location line="+7"/> <location line="+44"/> <location line="+6"/> <source>Wallet encryption failed</source> <translation>L&apos;encriptació del moneder ha fallat</translation> </message> <message> <location line="-56"/> <source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source> <translation>L&apos;encriptació del moneder ha fallat per un error intern. El seu moneder no ha estat encriptat.</translation> </message> <message> <location line="+7"/> <location line="+50"/> <source>The supplied passphrases do not match.</source> <translation>La contrasenya introduïda no coincideix.</translation> </message> <message> <location line="-38"/> <source>Wallet unlock failed</source> <translation>El desbloqueig del moneder ha fallat</translation> </message> <message> <location line="+1"/> <location line="+12"/> <location line="+19"/> <source>The passphrase entered for the wallet decryption was incorrect.</source> <translation>La contrasenya introduïda per a desencriptar el moneder és incorrecte.</translation> </message> <message> <location line="-20"/> <source>Wallet decryption failed</source> <translation>La desencriptació del moneder ha fallat</translation> </message> <message> <location line="+14"/> <source>Wallet passphrase was successfully changed.</source> <translation>La contrasenya del moneder ha estat modificada correctament.</translation> </message> </context> <context> <name>BitcoinGUI</name> <message> <location filename="../bitcoingui.cpp" line="+280"/> <source>Sign &amp;message...</source> <translation>Signar &amp;missatge...</translation> </message> <message> <location line="+242"/> <source>Synchronizing with network...</source> <translation>Sincronitzant amb la xarxa ...</translation> </message> <message> <location line="-308"/> <source>&amp;Overview</source> <translation>&amp;Panorama general</translation> </message> <message> <location line="+1"/> <source>Show general overview of wallet</source> <translation>Mostra panorama general del moneder</translation> </message> <message> <location line="+17"/> <source>&amp;Transactions</source> <translation>&amp;Transaccions</translation> </message> <message> <location line="+1"/> <source>Browse transaction history</source> <translation>Cerca a l&apos;historial de transaccions</translation> </message> <message> <location line="+5"/> <source>&amp;Address Book</source> <translation>&amp;Llibreta d&apos;adreces</translation> </message> <message> <location line="+1"/> <source>Edit the list of stored addresses and labels</source> <translation>Edició de la llista d&apos;adreces i etiquetes emmagatzemades</translation> </message> <message> <location line="-13"/> <source>&amp;Receive coins</source> <translation>&amp;Rebre monedes</translation> </message> <message> <location line="+1"/> <source>Show the list of addresses for receiving payments</source> <translation>Mostra la llista d&apos;adreces per rebre pagaments</translation> </message> <message> <location line="-7"/> <source>&amp;Send coins</source> <translation>&amp;Enviar monedes</translation> </message> <message> <location line="+35"/> <source>E&amp;xit</source> <translation>S&amp;ortir</translation> </message> <message> <location line="+1"/> <source>Quit application</source> <translation>Sortir de l&apos;aplicació</translation> </message> <message> <location line="+4"/> <source>Show information about BitSeeds</source> <translation>Mostra informació sobre BitSeeds</translation> </message> <message> <location line="+2"/> <source>About &amp;Qt</source> <translation>Sobre &amp;Qt</translation> </message> <message> <location line="+1"/> <source>Show information about Qt</source> <translation>Mostra informació sobre Qt</translation> </message> <message> <location line="+2"/> <source>&amp;Options...</source> <translation>&amp;Opcions...</translation> </message> <message> <location line="+4"/> <source>&amp;Encrypt Wallet...</source> <translation>&amp;Xifrar moneder</translation> </message> <message> <location line="+3"/> <source>&amp;Backup Wallet...</source> <translation>&amp;Realitzar copia de seguretat del moneder...</translation> </message> <message> <location line="+2"/> <source>&amp;Change Passphrase...</source> <translation>&amp;Canviar contrasenya...</translation> </message> <message numerus="yes"> <location line="+250"/> <source>~%n block(s) remaining</source> <translation><numerusform>~%n bloc restant</numerusform><numerusform>~%n blocs restants</numerusform></translation> </message> <message> <location line="+6"/> <source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source> <translation>Descarregats %1 de %2 blocs d&apos;historial de transaccions (%3% completat).</translation> </message> <message> <location line="-247"/> <source>&amp;Export...</source> <translation>&amp;Exportar...</translation> </message> <message> <location line="-62"/> <source>Send coins to a BitSeeds address</source> <translation>Enviar monedes a una adreça BitSeeds</translation> </message> <message> <location line="+45"/> <source>Modify configuration options for BitSeeds</source> <translation>Modificar les opcions de configuració per a BitSeeds</translation> </message> <message> <location line="+18"/> <source>Export the data in the current tab to a file</source> <translation>Exportar les dades de la pestanya actual a un arxiu</translation> </message> <message> <location line="-14"/> <source>Encrypt or decrypt wallet</source> <translation>Xifrar o desxifrar moneder</translation> </message> <message> <location line="+3"/> <source>Backup wallet to another location</source> <translation>Realitzar còpia de seguretat del moneder a un altre directori</translation> </message> <message> <location line="+2"/> <source>Change the passphrase used for wallet encryption</source> <translation>Canviar la constrasenya d&apos;encriptació del moneder</translation> </message> <message> <location line="+10"/> <source>&amp;Debug window</source> <translation>&amp;Finestra de depuració</translation> </message> <message> <location line="+1"/> <source>Open debugging and diagnostic console</source> <translation>Obrir la consola de diagnòstic i depuració</translation> </message> <message> <location line="-5"/> <source>&amp;Verify message...</source> <translation>&amp;Verifica el missatge..</translation> </message> <message> <location line="-200"/> <source>BitSeeds</source> <translation>BitSeeds</translation> </message> <message> <location line="+0"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+178"/> <source>&amp;About BitSeeds</source> <translation>&amp;Sobre BitSeeds</translation> </message> <message> <location line="+9"/> <source>&amp;Show / Hide</source> <translation>&amp;Mostrar / Amagar</translation> </message> <message> <location line="+9"/> <source>Unlock wallet</source> <translation>Desbloquejar el moneder</translation> </message> <message> <location line="+1"/> <source>&amp;Lock Wallet</source> <translation>&amp;Bloquejar moneder</translation> </message> <message> <location line="+1"/> <source>Lock wallet</source> <translation>Bloquejar moneder</translation> </message> <message> <location line="+34"/> <source>&amp;File</source> <translation>&amp;Arxiu</translation> </message> <message> <location line="+8"/> <source>&amp;Settings</source> <translation>&amp;Configuració</translation> </message> <message> <location line="+8"/> <source>&amp;Help</source> <translation>&amp;Ajuda</translation> </message> <message> <location line="+9"/> <source>Tabs toolbar</source> <translation>Barra d&apos;eines de seccions</translation> </message> <message> <location line="+8"/> <source>Actions toolbar</source> <translation>Barra d&apos;eines d&apos;accions</translation> </message> <message> <location line="+13"/> <location line="+9"/> <source>[testnet]</source> <translation>[testnet]</translation> </message> <message> <location line="+0"/> <location line="+60"/> <source>BitSeeds client</source> <translation>Client BitSeeds</translation> </message> <message numerus="yes"> <location line="+70"/> <source>%n active connection(s) to BitSeeds network</source> <translation><numerusform>%n conexió activa a la xarxa BitSeeds</numerusform><numerusform>%n conexions actives a la xarxa BitSeeds</numerusform></translation> </message> <message> <location line="+40"/> <source>Downloaded %1 blocks of transaction history.</source> <translation>Descarregats %1 blocs d&apos;historial de transaccions</translation> </message> <message> <location line="+413"/> <source>Staking.&lt;br&gt;Your weight is %1&lt;br&gt;Network weight is %2&lt;br&gt;Expected time to earn reward is %3</source> <translation>Fent &quot;stake&quot;.&lt;br&gt;El teu pes és %1&lt;br&gt;El pes de la xarxa és %2&lt;br&gt;El temps estimat per a guanyar una recompensa és 3%</translation> </message> <message> <location line="+6"/> <source>Not staking because wallet is locked</source> <translation>No s&apos;està fent &quot;stake&quot; perquè el moneder esa bloquejat</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is offline</source> <translation>No s&apos;està fent &quot;stake&quot; perquè el moneder està fora de línia</translation> </message> <message> <location line="+2"/> <source>Not staking because wallet is syncing</source> <translation>No s&apos;està fent &quot;stake&quot; perquè el moneder està sincronitzant</translation> </message> <message> <location line="+2"/> <source>Not staking because you don&apos;t have mature coins</source> <translation>No s&apos;està fent &quot;stake&quot; perquè no tens monedes madures</translation> </message> <message numerus="yes"> <location line="-403"/> <source>%n second(s) ago</source> <translation><numerusform>fa %n segon</numerusform><numerusform>fa %n segons</numerusform></translation> </message> <message> <location line="-284"/> <source>&amp;Unlock Wallet...</source> <translation>&amp;Desbloquejar moneder</translation> </message> <message numerus="yes"> <location line="+288"/> <source>%n minute(s) ago</source> <translation><numerusform>fa %n minut</numerusform><numerusform>fa %n minuts</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s) ago</source> <translation><numerusform>fa %n hora</numerusform><numerusform>fa %n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s) ago</source> <translation><numerusform>fa %n dia</numerusform><numerusform>fa %n dies</numerusform></translation> </message> <message> <location line="+6"/> <source>Up to date</source> <translation>Al dia</translation> </message> <message> <location line="+7"/> <source>Catching up...</source> <translation>Posar-se al dia ...</translation> </message> <message> <location line="+10"/> <source>Last received block was generated %1.</source> <translation>El darrer bloc rebut s&apos;ha generat %1.</translation> </message> <message> <location line="+59"/> <source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source> <translation>Aquesta transacció es troba sobre el límit de mida. Encara pot enviar-la amb una comisió de 1%, aquesta va als nodes que processen la seva transacció i ajuda a mantenir la xarxa. Vol pagar la quota?</translation> </message> <message> <location line="+5"/> <source>Confirm transaction fee</source> <translation>Confirmeu comisió</translation> </message> <message> <location line="+27"/> <source>Sent transaction</source> <translation>Transacció enviada</translation> </message> <message> <location line="+1"/> <source>Incoming transaction</source> <translation>Transacció entrant</translation> </message> <message> <location line="+1"/> <source>Date: %1 Amount: %2 Type: %3 Address: %4 </source> <translation>Data: %1\nQuantitat %2\n Tipus: %3\n Adreça: %4\n</translation> </message> <message> <location line="+100"/> <location line="+15"/> <source>URI handling</source> <translation>Manejant URI</translation> </message> <message> <location line="-15"/> <location line="+15"/> <source>URI can not be parsed! This can be caused by an invalid BitSeeds address or malformed URI parameters.</source> <translation>l&apos;URI no es pot analitzar! Això pot ser causat per una adreça BitSeeds no vàlida o paràmetres URI malformats.</translation> </message> <message> <location line="+18"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;unlocked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;desbloquejat&lt;/b&gt;</translation> </message> <message> <location line="+10"/> <source>Wallet is &lt;b&gt;encrypted&lt;/b&gt; and currently &lt;b&gt;locked&lt;/b&gt;</source> <translation>El moneder està &lt;b&gt;encriptat&lt;/b&gt; i actualment &lt;b&gt;bloquejat&lt;/b&gt;</translation> </message> <message> <location line="+25"/> <source>Backup Wallet</source> <translation>Realitzar còpia de seguretat del moneder</translation> </message> <message> <location line="+0"/> <source>Wallet Data (*.dat)</source> <translation>Dades del moneder (*.dat)</translation> </message> <message> <location line="+3"/> <source>Backup Failed</source> <translation>Còpia de seguretat fallida</translation> </message> <message> <location line="+0"/> <source>There was an error trying to save the wallet data to the new location.</source> <translation>Hi ha un error al tractar de salvar les dades del seu moneder a la nova ubicació.</translation> </message> <message numerus="yes"> <location line="+76"/> <source>%n second(s)</source> <translation><numerusform>%n segon</numerusform><numerusform>%n segons</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n minute(s)</source> <translation><numerusform>%n minut</numerusform><numerusform>%n minuts</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n hour(s)</source> <translation><numerusform>%n hora</numerusform><numerusform>%n hores</numerusform></translation> </message> <message numerus="yes"> <location line="+4"/> <source>%n day(s)</source> <translation><numerusform>%n dia</numerusform><numerusform>%n dies</numerusform></translation> </message> <message> <location line="+18"/> <source>Not staking</source> <translation>No s&apos;està fent &quot;stake&quot; </translation> </message> <message> <location filename="../bitcoin.cpp" line="+109"/> <source>A fatal error occurred. BitSeeds can no longer continue safely and will quit.</source> <translation>S&apos;ha produït un error fatal. BitSeeds ja no pot continuar de forma segura i es tancarà.</translation> </message> </context> <context> <name>ClientModel</name> <message> <location filename="../clientmodel.cpp" line="+90"/> <source>Network Alert</source> <translation>Alerta de xarxa</translation> </message> </context> <context> <name>CoinControlDialog</name> <message> <location filename="../forms/coincontroldialog.ui" line="+14"/> <source>Coin Control</source> <translation>Opcions del control de monedes</translation> </message> <message> <location line="+31"/> <source>Quantity:</source> <translation>Quantitat:</translation> </message> <message> <location line="+32"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+48"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="+32"/> <source>Priority:</source> <translation>Prioritat:</translation> </message> <message> <location line="+48"/> <source>Fee:</source> <translation>Quota:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Sortida baixa:</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="+551"/> <source>no</source> <translation>no</translation> </message> <message> <location filename="../forms/coincontroldialog.ui" line="+51"/> <source>After Fee:</source> <translation>Quota posterior:</translation> </message> <message> <location line="+35"/> <source>Change:</source> <translation>Canvi:</translation> </message> <message> <location line="+69"/> <source>(un)select all</source> <translation>(de)seleccionar tot</translation> </message> <message> <location line="+13"/> <source>Tree mode</source> <translation>Mode arbre</translation> </message> <message> <location line="+16"/> <source>List mode</source> <translation>Mode llista</translation> </message> <message> <location line="+45"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+5"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+5"/> <source>Address</source> <translation>Adreça</translation> </message> <message> <location line="+5"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+5"/> <source>Confirmations</source> <translation>Confirmacions</translation> </message> <message> <location line="+3"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+5"/> <source>Priority</source> <translation>Prioritat</translation> </message> <message> <location filename="../coincontroldialog.cpp" line="-515"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <location line="+26"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="-25"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+24"/> <source>Copy quantity</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+2"/> <source>Copy fee</source> <translation>Copiar comisió</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar després de comisió</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioritat</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Copiar sortida baixa</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar canvi</translation> </message> <message> <location line="+317"/> <source>highest</source> <translation>El més alt</translation> </message> <message> <location line="+1"/> <source>high</source> <translation>Alt</translation> </message> <message> <location line="+1"/> <source>medium-high</source> <translation>mig-alt</translation> </message> <message> <location line="+1"/> <source>medium</source> <translation>mig</translation> </message> <message> <location line="+4"/> <source>low-medium</source> <translation>baix-mig</translation> </message> <message> <location line="+1"/> <source>low</source> <translation>baix</translation> </message> <message> <location line="+1"/> <source>lowest</source> <translation>el més baix</translation> </message> <message> <location line="+155"/> <source>DUST</source> <translation>POLS</translation> </message> <message> <location line="+0"/> <source>yes</source> <translation>si</translation> </message> <message> <location line="+10"/> <source>This label turns red, if the transaction size is bigger than 10000 bytes. This means a fee of at least %1 per kb is required. Can vary +/- 1 Byte per input.</source> <translation>Aquesta etiqueta es tornarà vermell, si la mida de la transacció és més gran que 10000 bytes. En aquest cas es requereix una comisió d&apos;almenys el 1% per kb. Pot variar + / - 1 Byte per entrada.</translation> </message> <message> <location line="+1"/> <source>Transactions with higher priority get more likely into a block. This label turns red, if the priority is smaller than &quot;medium&quot;. This means a fee of at least %1 per kb is required.</source> <translation>Les operacions amb més prioritat entren mes facilment a un bloc. Aquesta etiqueta es torna vermella, si la prioritat és menor que &quot;mitja&quot;. En aquest cas es requereix una comisió d&apos;almenys el 1% per kb.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if any recipient receives an amount smaller than %1. This means a fee of at least %2 is required. Amounts below 0.546 times the minimum relay fee are shown as DUST.</source> <translation>Aquesta etiqueta es torna vermella, si qualsevol beneficiari rep una quantitat inferior a 1%. En aquest cas es requereix una comisió d&apos;almenys 2%. Les quantitats inferiors a 0.546 vegades la quota mínima del relé es mostren com a POLS.</translation> </message> <message> <location line="+1"/> <source>This label turns red, if the change is smaller than %1. This means a fee of at least %2 is required.</source> <translation>Aquesta etiqueta es torna vermella, si el canvi és menor que 1%. En aquest cas es requereix una comisió d&apos;almenys 2%.</translation> </message> <message> <location line="+37"/> <location line="+66"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> <message> <location line="-9"/> <source>change from %1 (%2)</source> <translation>canvi desde %1 (%2)</translation> </message> <message> <location line="+1"/> <source>(change)</source> <translation>(canviar)</translation> </message> </context> <context> <name>EditAddressDialog</name> <message> <location filename="../forms/editaddressdialog.ui" line="+14"/> <source>Edit Address</source> <translation>Editar Adreça</translation> </message> <message> <location line="+11"/> <source>&amp;Label</source> <translation>&amp;Etiqueta</translation> </message> <message> <location line="+10"/> <source>The label associated with this address book entry</source> <translation>L&apos;etiqueta associada amb aquesta entrada de la llibreta d&apos;adreces</translation> </message> <message> <location line="+7"/> <source>&amp;Address</source> <translation>&amp;Direcció</translation> </message> <message> <location line="+10"/> <source>The address associated with this address book entry. This can only be modified for sending addresses.</source> <translation>La direcció associada amb aquesta entrada de la llibreta d&apos;adreces. Només pot ser modificada per a l&apos;enviament d&apos;adreces.</translation> </message> <message> <location filename="../editaddressdialog.cpp" line="+20"/> <source>New receiving address</source> <translation>Nova adreça de recepció.</translation> </message> <message> <location line="+4"/> <source>New sending address</source> <translation>Nova adreça d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>Edit receiving address</source> <translation>Editar adreces de recepció</translation> </message> <message> <location line="+4"/> <source>Edit sending address</source> <translation>Editar adreces d&apos;enviament</translation> </message> <message> <location line="+76"/> <source>The entered address &quot;%1&quot; is already in the address book.</source> <translation>L&apos;adreça introduïda &quot;%1&quot; ja és present a la llibreta d&apos;adreces.</translation> </message> <message> <location line="-5"/> <source>The entered address &quot;%1&quot; is not a valid BitSeeds address.</source> <translation>La direcció introduïda &quot;%1&quot; no és una adreça BitSeeds vàlida.</translation> </message> <message> <location line="+10"/> <source>Could not unlock wallet.</source> <translation>No s&apos;ha pogut desbloquejar el moneder.</translation> </message> <message> <location line="+5"/> <source>New key generation failed.</source> <translation>Ha fallat la generació d&apos;una nova clau.</translation> </message> </context> <context> <name>GUIUtil::HelpMessageBox</name> <message> <location filename="../guiutil.cpp" line="+420"/> <location line="+12"/> <source>BitSeeds-Qt</source> <translation>BitSeeds-Qt</translation> </message> <message> <location line="-12"/> <source>version</source> <translation>versió</translation> </message> <message> <location line="+2"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+4"/> <source>UI options</source> <translation>Opcions de IU</translation> </message> <message> <location line="+1"/> <source>Set language, for example &quot;de_DE&quot; (default: system locale)</source> <translation>Definir llenguatge, per exemple &quot;de_DE&quot; (per defecte: Preferències locals de sistema)</translation> </message> <message> <location line="+1"/> <source>Start minimized</source> <translation>Iniciar minimitzat</translation> </message> <message> <location line="+1"/> <source>Show splash screen on startup (default: 1)</source> <translation>Mostrar finestra de benvinguda a l&apos;inici (per defecte: 1)</translation> </message> </context> <context> <name>OptionsDialog</name> <message> <location filename="../forms/optionsdialog.ui" line="+14"/> <source>Options</source> <translation>Opcions</translation> </message> <message> <location line="+16"/> <source>&amp;Main</source> <translation>&amp;Principal</translation> </message> <message> <location line="+6"/> <source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source> <translation>Comisió opcional per kB que ajuda a assegurar-se que les seves transaccions es processen ràpidament. La majoria de les transaccions són 1 kB. Comisió d&apos;0.01 recomenada.</translation> </message> <message> <location line="+15"/> <source>Pay transaction &amp;fee</source> <translation>Pagar &amp;comisió de transacció</translation> </message> <message> <location line="+31"/> <source>Reserved amount does not participate in staking and is therefore spendable at any time.</source> <translation>La quantitat reservada no participa en fer &quot;stake&quot; i per tant esta disponible en qualsevol moment.</translation> </message> <message> <location line="+15"/> <source>Reserve</source> <translation>Reserva</translation> </message> <message> <location line="+31"/> <source>Automatically start BitSeeds after logging in to the system.</source> <translation>Inicia automàticament BitSeeds després d&apos;entrar en el sistema.</translation> </message> <message> <location line="+3"/> <source>&amp;Start BitSeeds on system login</source> <translation>&amp;Iniciar BitSeeds amb l&apos;inici de sessió</translation> </message> <message> <location line="+7"/> <source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source> <translation>Separeu el bloc i les bases de dades d&apos;adreces en apagar l&apos;equip. En aquest cas es pot moure a un altre directori de dades, però alenteix l&apos;apagada. El moneder està sempre separat.</translation> </message> <message> <location line="+3"/> <source>&amp;Detach databases at shutdown</source> <translation>&amp;Separar bases de dades a l&apos;apagar l&apos;equip</translation> </message> <message> <location line="+21"/> <source>&amp;Network</source> <translation>&amp;Xarxa</translation> </message> <message> <location line="+6"/> <source>Automatically open the BitSeeds client port on the router. This only works when your router supports UPnP and it is enabled.</source> <translation>Obrir automàticament el port de client BitSeeds en el router. Això només funciona quan el router és compatible amb UPnP i està habilitat.</translation> </message> <message> <location line="+3"/> <source>Map port using &amp;UPnP</source> <translation>Obrir ports amb &amp;UPnP</translation> </message> <message> <location line="+7"/> <source>Connect to the BitSeeds network through a SOCKS proxy (e.g. when connecting through Tor).</source> <translation>Connecteu-vos a la xarxa BitSeeds través d&apos;un proxy SOCKS (per exemple, quan es connecta a través de Tor).</translation> </message> <message> <location line="+3"/> <source>&amp;Connect through SOCKS proxy:</source> <translation>&amp;Conectar a través d&apos;un proxy SOCKS:</translation> </message> <message> <location line="+9"/> <source>Proxy &amp;IP:</source> <translation>&amp;IP del proxy:</translation> </message> <message> <location line="+19"/> <source>IP address of the proxy (e.g. 127.0.0.1)</source> <translation>Adreça IP del servidor proxy (per exemple, 127.0.0.1)</translation> </message> <message> <location line="+7"/> <source>&amp;Port:</source> <translation>&amp;Port:</translation> </message> <message> <location line="+19"/> <source>Port of the proxy (e.g. 9050)</source> <translation>Port del proxy (per exemple 9050)</translation> </message> <message> <location line="+7"/> <source>SOCKS &amp;Version:</source> <translation>&amp;Versió de SOCKS:</translation> </message> <message> <location line="+13"/> <source>SOCKS version of the proxy (e.g. 5)</source> <translation>Versió SOCKS del proxy (per exemple 5)</translation> </message> <message> <location line="+36"/> <source>&amp;Window</source> <translation>&amp;Finestra</translation> </message> <message> <location line="+6"/> <source>Show only a tray icon after minimizing the window.</source> <translation>Mostrar només l&apos;icona de la barra al minimitzar l&apos;aplicació.</translation> </message> <message> <location line="+3"/> <source>&amp;Minimize to the tray instead of the taskbar</source> <translation>&amp;Minimitzar a la barra d&apos;aplicacions</translation> </message> <message> <location line="+7"/> <source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source> <translation>Minimitza en comptes de sortir de la aplicació al tancar la finestra. Quan aquesta opció està activa, la aplicació només es tancarà al seleccionar Sortir al menú.</translation> </message> <message> <location line="+3"/> <source>M&amp;inimize on close</source> <translation>M&amp;inimitzar al tancar</translation> </message> <message> <location line="+21"/> <source>&amp;Display</source> <translation>&amp;Pantalla</translation> </message> <message> <location line="+8"/> <source>User Interface &amp;language:</source> <translation>Llenguatge de la Interfície d&apos;Usuari:</translation> </message> <message> <location line="+13"/> <source>The user interface language can be set here. This setting will take effect after restarting BitSeeds.</source> <translation>L&apos;idioma de la interfície d&apos;usuari es pot configurar aquí. Aquesta configuració s&apos;aplicarà després de reiniciar BitSeeds.</translation> </message> <message> <location line="+11"/> <source>&amp;Unit to show amounts in:</source> <translation>&amp;Unitats per mostrar les quantitats en:</translation> </message> <message> <location line="+13"/> <source>Choose the default subdivision unit to show in the interface and when sending coins.</source> <translation>Sel·lecciona la unitat de subdivisió per defecte per mostrar en la interficie quan s&apos;envien monedes.</translation> </message> <message> <location line="+9"/> <source>Whether to show BitSeeds addresses in the transaction list or not.</source> <translation>Per mostrar BitSeeds adreces a la llista de transaccions o no.</translation> </message> <message> <location line="+3"/> <source>&amp;Display addresses in transaction list</source> <translation>&amp;Mostrar adreces al llistat de transaccions</translation> </message> <message> <location line="+7"/> <source>Whether to show coin control features or not.</source> <translation>Per mostrar les característiques de control de la moneda o no.</translation> </message> <message> <location line="+3"/> <source>Display coin &amp;control features (experts only!)</source> <translation>Mostrar controls i característiques de la moneda (només per a experts!)</translation> </message> <message> <location line="+71"/> <source>&amp;OK</source> <translation>&amp;OK</translation> </message> <message> <location line="+7"/> <source>&amp;Cancel</source> <translation>&amp;Cancel·la</translation> </message> <message> <location line="+10"/> <source>&amp;Apply</source> <translation>&amp;Aplicar</translation> </message> <message> <location filename="../optionsdialog.cpp" line="+55"/> <source>default</source> <translation>Per defecte</translation> </message> <message> <location line="+149"/> <location line="+9"/> <source>Warning</source> <translation>Avís</translation> </message> <message> <location line="-9"/> <location line="+9"/> <source>This setting will take effect after restarting BitSeeds.</source> <translation>Aquesta configuració s&apos;aplicarà després de reiniciar BitSeeds.</translation> </message> <message> <location line="+29"/> <source>The supplied proxy address is invalid.</source> <translation>L&apos;adreça proxy introduïda és invalida.</translation> </message> </context> <context> <name>OverviewPage</name> <message> <location filename="../forms/overviewpage.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+33"/> <location line="+231"/> <source>The displayed information may be out of date. Your wallet automatically synchronizes with the BitSeeds network after a connection is established, but this process has not completed yet.</source> <translation>La informació mostrada pot estar fora de data. El seu moneder es sincronitza automàticament amb la xarxa BitSeeds després d&apos;establir una connexió, però aquest procés no s&apos;ha completat encara.</translation> </message> <message> <location line="-160"/> <source>Stake:</source> <translation>En &quot;stake&quot;:</translation> </message> <message> <location line="+29"/> <source>Unconfirmed:</source> <translation>Sense confirmar:</translation> </message> <message> <location line="-107"/> <source>Wallet</source> <translation>Moneder</translation> </message> <message> <location line="+49"/> <source>Spendable:</source> <translation>Disponible:</translation> </message> <message> <location line="+16"/> <source>Your current spendable balance</source> <translation>El balanç de saldo actual disponible</translation> </message> <message> <location line="+71"/> <source>Immature:</source> <translation>Immatur:</translation> </message> <message> <location line="+13"/> <source>Mined balance that has not yet matured</source> <translation>Balanç minat que encara no ha madurat</translation> </message> <message> <location line="+20"/> <source>Total:</source> <translation>Total:</translation> </message> <message> <location line="+16"/> <source>Your current total balance</source> <translation>El seu balanç total</translation> </message> <message> <location line="+46"/> <source>&lt;b&gt;Recent transactions&lt;/b&gt;</source> <translation>&lt;b&gt;Transaccions recents&lt;/b&gt;</translation> </message> <message> <location line="-108"/> <source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source> <translation>Total de transaccions que encara no s&apos;han confirmat, i encara no compten per al balanç actual</translation> </message> <message> <location line="-29"/> <source>Total of coins that was staked, and do not yet count toward the current balance</source> <translation>Total de les monedes que s&apos;han posat a fer &quot;stake&quot; (en joc, aposta), i encara no compten per al balanç actual</translation> </message> <message> <location filename="../overviewpage.cpp" line="+113"/> <location line="+1"/> <source>out of sync</source> <translation>Fora de sincronia</translation> </message> </context> <context> <name>QRCodeDialog</name> <message> <location filename="../forms/qrcodedialog.ui" line="+14"/> <source>QR Code Dialog</source> <translation>Diàleg de codi QR</translation> </message> <message> <location line="+59"/> <source>Request Payment</source> <translation>Sol·licitud de pagament</translation> </message> <message> <location line="+56"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="-44"/> <source>Label:</source> <translation>Etiqueta:</translation> </message> <message> <location line="+19"/> <source>Message:</source> <translation>Missatge:</translation> </message> <message> <location line="+71"/> <source>&amp;Save As...</source> <translation>&amp;Desa com ...</translation> </message> <message> <location filename="../qrcodedialog.cpp" line="+62"/> <source>Error encoding URI into QR Code.</source> <translation>Error codificant la URI en un codi QR.</translation> </message> <message> <location line="+40"/> <source>The entered amount is invalid, please check.</source> <translation>La quantitat introduïda no és vàlida, comproveu-ho si us plau.</translation> </message> <message> <location line="+23"/> <source>Resulting URI too long, try to reduce the text for label / message.</source> <translation>URI resultant massa llarga, intenta reduir el text per a la etiqueta / missatge</translation> </message> <message> <location line="+25"/> <source>Save QR Code</source> <translation>Desar codi QR</translation> </message> <message> <location line="+0"/> <source>PNG Images (*.png)</source> <translation>Imatges PNG (*.png)</translation> </message> </context> <context> <name>RPCConsole</name> <message> <location filename="../forms/rpcconsole.ui" line="+46"/> <source>Client name</source> <translation>Nom del client</translation> </message> <message> <location line="+10"/> <location line="+23"/> <location line="+26"/> <location line="+23"/> <location line="+23"/> <location line="+36"/> <location line="+53"/> <location line="+23"/> <location line="+23"/> <location filename="../rpcconsole.cpp" line="+348"/> <source>N/A</source> <translation>N/A</translation> </message> <message> <location line="-217"/> <source>Client version</source> <translation>Versió del client</translation> </message> <message> <location line="-45"/> <source>&amp;Information</source> <translation>&amp;Informació</translation> </message> <message> <location line="+68"/> <source>Using OpenSSL version</source> <translation>Utilitzant OpenSSL versió</translation> </message> <message> <location line="+49"/> <source>Startup time</source> <translation>&amp;Temps d&apos;inici</translation> </message> <message> <location line="+29"/> <source>Network</source> <translation>Xarxa</translation> </message> <message> <location line="+7"/> <source>Number of connections</source> <translation>Nombre de connexions</translation> </message> <message> <location line="+23"/> <source>On testnet</source> <translation>A testnet</translation> </message> <message> <location line="+23"/> <source>Block chain</source> <translation>Bloquejar cadena</translation> </message> <message> <location line="+7"/> <source>Current number of blocks</source> <translation>Nombre de blocs actuals</translation> </message> <message> <location line="+23"/> <source>Estimated total blocks</source> <translation>Total estimat de blocs</translation> </message> <message> <location line="+23"/> <source>Last block time</source> <translation>Últim temps de bloc</translation> </message> <message> <location line="+52"/> <source>&amp;Open</source> <translation>&amp;Obrir</translation> </message> <message> <location line="+16"/> <source>Command-line options</source> <translation>Opcions de la línia d&apos;ordres</translation> </message> <message> <location line="+7"/> <source>Show the BitSeeds-Qt help message to get a list with possible BitSeeds command-line options.</source> <translation>Mostra el missatge d&apos;ajuda de BitSeeds-Qt per obtenir una llista amb les possibles opcions de línia d&apos;ordres BitSeeds.</translation> </message> <message> <location line="+3"/> <source>&amp;Show</source> <translation>&amp;Mostra</translation> </message> <message> <location line="+24"/> <source>&amp;Console</source> <translation>&amp;Consola</translation> </message> <message> <location line="-260"/> <source>Build date</source> <translation>Data de compilació</translation> </message> <message> <location line="-104"/> <source>BitSeeds - Debug window</source> <translation>BitSeeds - Finestra Depuració</translation> </message> <message> <location line="+25"/> <source>BitSeeds Core</source> <translation>Nucli BitSeeds</translation> </message> <message> <location line="+279"/> <source>Debug log file</source> <translation>Dietàri de depuració</translation> </message> <message> <location line="+7"/> <source>Open the BitSeeds debug log file from the current data directory. This can take a few seconds for large log files.</source> <translation>Obriu el fitxer de registre de depuració BitSeeds des del directori de dades actual. Això pot trigar uns segons en els arxius de registre de grans dimensions.</translation> </message> <message> <location line="+102"/> <source>Clear console</source> <translation>Netejar consola</translation> </message> <message> <location filename="../rpcconsole.cpp" line="-33"/> <source>Welcome to the BitSeeds RPC console.</source> <translation>Benvingut a la consola RPC de BitSeeds.</translation> </message> <message> <location line="+1"/> <source>Use up and down arrows to navigate history, and &lt;b&gt;Ctrl-L&lt;/b&gt; to clear screen.</source> <translation>Utilitza les fletxes d&apos;amunt i avall per navegar per l&apos;històric, i &lt;b&gt;Ctrl-L&lt;\b&gt; per netejar la pantalla.</translation> </message> <message> <location line="+1"/> <source>Type &lt;b&gt;help&lt;/b&gt; for an overview of available commands.</source> <translation>Escriu &lt;b&gt;help&lt;\b&gt; per a obtenir una llistat de les ordres disponibles.</translation> </message> </context> <context> <name>SendCoinsDialog</name> <message> <location filename="../forms/sendcoinsdialog.ui" line="+14"/> <location filename="../sendcoinsdialog.cpp" line="+182"/> <location line="+5"/> <location line="+5"/> <location line="+5"/> <location line="+6"/> <location line="+5"/> <location line="+5"/> <source>Send Coins</source> <translation>Enviar monedes</translation> </message> <message> <location line="+76"/> <source>Coin Control Features</source> <translation>(Opcions del control del Coin)</translation> </message> <message> <location line="+20"/> <source>Inputs...</source> <translation>Entrades</translation> </message> <message> <location line="+7"/> <source>automatically selected</source> <translation>Seleccionat automàticament</translation> </message> <message> <location line="+19"/> <source>Insufficient funds!</source> <translation>Fons insuficient</translation> </message> <message> <location line="+77"/> <source>Quantity:</source> <translation>Quantitat:</translation> </message> <message> <location line="+22"/> <location line="+35"/> <source>0</source> <translation>0</translation> </message> <message> <location line="-19"/> <source>Bytes:</source> <translation>Bytes:</translation> </message> <message> <location line="+51"/> <source>Amount:</source> <translation>Quantitat:</translation> </message> <message> <location line="+22"/> <location line="+86"/> <location line="+86"/> <location line="+32"/> <source>0.00 BITS</source> <translation>123.456 BITS {0.00 ?}</translation> </message> <message> <location line="-191"/> <source>Priority:</source> <translation>Prioritat:</translation> </message> <message> <location line="+19"/> <source>medium</source> <translation>mig</translation> </message> <message> <location line="+32"/> <source>Fee:</source> <translation>Quota:</translation> </message> <message> <location line="+35"/> <source>Low Output:</source> <translation>Sortida baixa:</translation> </message> <message> <location line="+19"/> <source>no</source> <translation>no</translation> </message> <message> <location line="+32"/> <source>After Fee:</source> <translation>Quota posterior:</translation> </message> <message> <location line="+35"/> <source>Change</source> <translation>Canvi</translation> </message> <message> <location line="+50"/> <source>custom change address</source> <translation>Adreça de canvi pròpia</translation> </message> <message> <location line="+106"/> <source>Send to multiple recipients at once</source> <translation>Enviar a multiples destinataris al mateix temps</translation> </message> <message> <location line="+3"/> <source>Add &amp;Recipient</source> <translation>Afegir &amp;Destinatari</translation> </message> <message> <location line="+20"/> <source>Remove all transaction fields</source> <translation>Traieu tots els camps de transacció</translation> </message> <message> <location line="+3"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="+28"/> <source>Balance:</source> <translation>Balanç:</translation> </message> <message> <location line="+16"/> <source>123.456 BITS</source> <translation>123.456 BITS</translation> </message> <message> <location line="+31"/> <source>Confirm the send action</source> <translation>Confirmi l&apos;acció d&apos;enviament</translation> </message> <message> <location line="+3"/> <source>S&amp;end</source> <translation>E&amp;nviar</translation> </message> <message> <location filename="../sendcoinsdialog.cpp" line="-173"/> <source>Enter a BitSeeds address (e.g. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>Introdueix una adreça BitSeeds (p.ex. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+15"/> <source>Copy quantity</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy fee</source> <translation>Copiar comisió</translation> </message> <message> <location line="+1"/> <source>Copy after fee</source> <translation>Copiar després de comisió</translation> </message> <message> <location line="+1"/> <source>Copy bytes</source> <translation>Copiar bytes</translation> </message> <message> <location line="+1"/> <source>Copy priority</source> <translation>Copiar prioritat</translation> </message> <message> <location line="+1"/> <source>Copy low output</source> <translation>Copiar sortida baixa</translation> </message> <message> <location line="+1"/> <source>Copy change</source> <translation>Copiar canvi</translation> </message> <message> <location line="+86"/> <source>&lt;b&gt;%1&lt;/b&gt; to %2 (%3)</source> <translation>&lt;b&gt;%1&lt;/b&gt; a %2 (%3)</translation> </message> <message> <location line="+5"/> <source>Confirm send coins</source> <translation>Confirmar l&apos;enviament de monedes</translation> </message> <message> <location line="+1"/> <source>Are you sure you want to send %1?</source> <translation>Esteu segur que voleu enviar %1?</translation> </message> <message> <location line="+0"/> <source> and </source> <translation>i</translation> </message> <message> <location line="+29"/> <source>The recipient address is not valid, please recheck.</source> <translation>L&apos;adreça remetent no és vàlida, si us plau comprovi-la.</translation> </message> <message> <location line="+5"/> <source>The amount to pay must be larger than 0.</source> <translation>La quantitat a pagar ha de ser major que 0.</translation> </message> <message> <location line="+5"/> <source>The amount exceeds your balance.</source> <translation>Import superi el saldo de la seva compte.</translation> </message> <message> <location line="+5"/> <source>The total exceeds your balance when the %1 transaction fee is included.</source> <translation>El total excedeix el teu balanç quan s&apos;afegeix la comisió a la transacció %1.</translation> </message> <message> <location line="+6"/> <source>Duplicate address found, can only send to each address once per send operation.</source> <translation>S&apos;ha trobat una adreça duplicada, tan sols es pot enviar a cada adreça un cop per ordre de enviament.</translation> </message> <message> <location line="+5"/> <source>Error: Transaction creation failed.</source> <translation>Error: La creació de transacció ha fallat.</translation> </message> <message> <location line="+5"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha sigut rebutjada. Això pot passar si algunes de les monedes al moneder ja s&apos;han gastat, per exemple, si vostè utilitza una còpia del wallet.dat i les monedes han estat gastades a la cópia pero no s&apos;han marcat com a gastades aqui.</translation> </message> <message> <location line="+251"/> <source>WARNING: Invalid BitSeeds address</source> <translation>ADVERTÈNCIA: Direcció BitSeeds invàlida</translation> </message> <message> <location line="+13"/> <source>(no label)</source> <translation>(sense etiqueta)</translation> </message> <message> <location line="+4"/> <source>WARNING: unknown change address</source> <translation>ADVERTÈNCIA: direcció de canvi desconeguda</translation> </message> </context> <context> <name>SendCoinsEntry</name> <message> <location filename="../forms/sendcoinsentry.ui" line="+14"/> <source>Form</source> <translation>Formulari</translation> </message> <message> <location line="+15"/> <source>A&amp;mount:</source> <translation>Q&amp;uantitat:</translation> </message> <message> <location line="+13"/> <source>Pay &amp;To:</source> <translation>Pagar &amp;A:</translation> </message> <message> <location line="+24"/> <location filename="../sendcoinsentry.cpp" line="+25"/> <source>Enter a label for this address to add it to your address book</source> <translation>Introdueixi una etiquera per a aquesta adreça per afegir-la a la llibreta d&apos;adreces</translation> </message> <message> <location line="+9"/> <source>&amp;Label:</source> <translation>&amp;Etiqueta:</translation> </message> <message> <location line="+18"/> <source>The address to send the payment to (e.g. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>L&apos;adreça per a enviar el pagament (per exemple: A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+10"/> <source>Choose address from address book</source> <translation>Trieu la direcció de la llibreta d&apos;adreces</translation> </message> <message> <location line="+10"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="+7"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+7"/> <source>Remove this recipient</source> <translation>Eliminar aquest destinatari</translation> </message> <message> <location filename="../sendcoinsentry.cpp" line="+1"/> <source>Enter a BitSeeds address (e.g. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>Introdueix una adreça BitSeeds (p.ex. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> </context> <context> <name>SignVerifyMessageDialog</name> <message> <location filename="../forms/signverifymessagedialog.ui" line="+14"/> <source>Signatures - Sign / Verify a Message</source> <translation>Signatures .Signar/Verificar un Missatge</translation> </message> <message> <location line="+13"/> <location line="+124"/> <source>&amp;Sign Message</source> <translation>&amp;Signar Missatge</translation> </message> <message> <location line="-118"/> <source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source> <translation>Pots signar missatges amb la teva adreça per provar que són teus. Sigues cautelòs al signar qualsevol cosa, ja que els atacs phising poden intentar confondre&apos;t per a que els hi signis amb la teva identitat. Tan sols signa als documents completament detallats amb els que hi estàs d&apos;acord.</translation> </message> <message> <location line="+18"/> <source>The address to sign the message with (e.g. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>L&apos;adreça per a signar el missatge (per exemple A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+10"/> <location line="+203"/> <source>Choose an address from the address book</source> <translation>Trieu una adreça de la llibreta d&apos;adreces</translation> </message> <message> <location line="-193"/> <location line="+203"/> <source>Alt+A</source> <translation>Alta+A</translation> </message> <message> <location line="-193"/> <source>Paste address from clipboard</source> <translation>Enganxar adreça del porta-retalls</translation> </message> <message> <location line="+10"/> <source>Alt+P</source> <translation>Alt+P</translation> </message> <message> <location line="+12"/> <source>Enter the message you want to sign here</source> <translation>Introdueix aqui el missatge que vols signar</translation> </message> <message> <location line="+24"/> <source>Copy the current signature to the system clipboard</source> <translation>Copiar la signatura actual al porta-retalls del sistema</translation> </message> <message> <location line="+21"/> <source>Sign the message to prove you own this BitSeeds address</source> <translation>Signar un missatge per demostrar que és propietari d&apos;aquesta adreça BitSeeds</translation> </message> <message> <location line="+17"/> <source>Reset all sign message fields</source> <translation>Neteja tots els camps de clau</translation> </message> <message> <location line="+3"/> <location line="+146"/> <source>Clear &amp;All</source> <translation>Esborrar &amp;Tot</translation> </message> <message> <location line="-87"/> <location line="+70"/> <source>&amp;Verify Message</source> <translation>&amp;Verificar el missatge</translation> </message> <message> <location line="-64"/> <source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source> <translation>Introdueixi l&apos;adreça signant, missatge (assegura&apos;t que copies salts de línia, espais, tabuladors, etc excactament tot el text) i la signatura a sota per verificar el missatge. Per evitar ser enganyat per un atac home-entre-mig, vés amb compte de no llegir més en la signatura del que hi ha al missatge signat mateix.</translation> </message> <message> <location line="+21"/> <source>The address the message was signed with (e.g. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>La direcció que va ser signada amb un missatge (per exemple A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="+40"/> <source>Verify the message to ensure it was signed with the specified BitSeeds address</source> <translation>Comproveu el missatge per assegurar-se que es va signar amb l&apos;adreça BitSeeds especificada.</translation> </message> <message> <location line="+17"/> <source>Reset all verify message fields</source> <translation>Neteja tots els camps de verificació de missatge</translation> </message> <message> <location filename="../signverifymessagedialog.cpp" line="+27"/> <location line="+3"/> <source>Enter a BitSeeds address (e.g. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source> <translation>Introdueix una adreça BitSeeds (p.ex. A8gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</translation> </message> <message> <location line="-2"/> <source>Click &quot;Sign Message&quot; to generate signature</source> <translation>Clica &quot;Signar Missatge&quot; per a generar una signatura</translation> </message> <message> <location line="+3"/> <source>Enter BitSeeds signature</source> <translation>Introduïu la signatura BitSeeds</translation> </message> <message> <location line="+82"/> <location line="+81"/> <source>The entered address is invalid.</source> <translation>L&apos;adreça intoduïda és invàlida.</translation> </message> <message> <location line="-81"/> <location line="+8"/> <location line="+73"/> <location line="+8"/> <source>Please check the address and try again.</source> <translation>Siu us plau, comprovi l&apos;adreça i provi de nou.</translation> </message> <message> <location line="-81"/> <location line="+81"/> <source>The entered address does not refer to a key.</source> <translation>L&apos;adreça introduïda no referencia a cap clau.</translation> </message> <message> <location line="-73"/> <source>Wallet unlock was cancelled.</source> <translation>El desbloqueig del moneder ha estat cancelat.</translation> </message> <message> <location line="+8"/> <source>Private key for the entered address is not available.</source> <translation>La clau privada per a la adreça introduïda no està disponible.</translation> </message> <message> <location line="+12"/> <source>Message signing failed.</source> <translation>El signat del missatge ha fallat.</translation> </message> <message> <location line="+5"/> <source>Message signed.</source> <translation>Missatge signat.</translation> </message> <message> <location line="+59"/> <source>The signature could not be decoded.</source> <translation>La signatura no s&apos;ha pogut decodificar .</translation> </message> <message> <location line="+0"/> <location line="+13"/> <source>Please check the signature and try again.</source> <translation>Su us plau, comprovi la signatura i provi de nou.</translation> </message> <message> <location line="+0"/> <source>The signature did not match the message digest.</source> <translation>La signatura no coincideix amb el resum del missatge.</translation> </message> <message> <location line="+7"/> <source>Message verification failed.</source> <translation>Ha fallat la verificació del missatge.</translation> </message> <message> <location line="+5"/> <source>Message verified.</source> <translation>Missatge verificat.</translation> </message> </context> <context> <name>TransactionDesc</name> <message> <location filename="../transactiondesc.cpp" line="+19"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message numerus="yes"> <location line="-2"/> <source>Open for %n block(s)</source> <translation><numerusform>Obert per a %n bloc</numerusform><numerusform>Obert per a %n blocs</numerusform></translation> </message> <message> <location line="+8"/> <source>conflicted</source> <translation>conflicte</translation> </message> <message> <location line="+2"/> <source>%1/offline</source> <translation>%1/offline</translation> </message> <message> <location line="+2"/> <source>%1/unconfirmed</source> <translation>%1/sense confirmar</translation> </message> <message> <location line="+2"/> <source>%1 confirmations</source> <translation>%1 confrimacions</translation> </message> <message> <location line="+18"/> <source>Status</source> <translation>Estat</translation> </message> <message numerus="yes"> <location line="+7"/> <source>, broadcast through %n node(s)</source> <translation><numerusform>, transmès a través de %n node</numerusform><numerusform>, transmès a través de %n nodes</numerusform></translation> </message> <message> <location line="+4"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+7"/> <source>Source</source> <translation>Font</translation> </message> <message> <location line="+0"/> <source>Generated</source> <translation>Generat</translation> </message> <message> <location line="+5"/> <location line="+17"/> <source>From</source> <translation>Des de</translation> </message> <message> <location line="+1"/> <location line="+22"/> <location line="+58"/> <source>To</source> <translation>A</translation> </message> <message> <location line="-77"/> <location line="+2"/> <source>own address</source> <translation>Adreça pròpia</translation> </message> <message> <location line="-2"/> <source>label</source> <translation>etiqueta</translation> </message> <message> <location line="+37"/> <location line="+12"/> <location line="+45"/> <location line="+17"/> <location line="+30"/> <source>Credit</source> <translation>Crèdit</translation> </message> <message numerus="yes"> <location line="-102"/> <source>matures in %n more block(s)</source> <translation><numerusform>madura en %n bloc més</numerusform><numerusform>madura en %n blocs més</numerusform></translation> </message> <message> <location line="+2"/> <source>not accepted</source> <translation>no acceptat</translation> </message> <message> <location line="+44"/> <location line="+8"/> <location line="+15"/> <location line="+30"/> <source>Debit</source> <translation>Dèbit</translation> </message> <message> <location line="-39"/> <source>Transaction fee</source> <translation>Comissió de transacció</translation> </message> <message> <location line="+16"/> <source>Net amount</source> <translation>Quantitat neta</translation> </message> <message> <location line="+6"/> <source>Message</source> <translation>Missatge</translation> </message> <message> <location line="+2"/> <source>Comment</source> <translation>Comentar</translation> </message> <message> <location line="+2"/> <source>Transaction ID</source> <translation>ID de transacció</translation> </message> <message> <location line="+3"/> <source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to &quot;not accepted&quot; and it won&apos;t be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source> <translation>Les monedes generades han de madurar 510 blocs abans per a poder estar disponibles. En generar aquest bloc, que va ser transmès a la xarxa per ser afegit a la cadena de bloc. Si no aconsegueix entrar a la cadena, el seu estat canviarà a &quot;no acceptat&quot; i no es podrà gastar. Això pot succeir de tant en tant si un altre node genera un bloc a pocs segons del seu.</translation> </message> <message> <location line="+7"/> <source>Debug information</source> <translation>Informació de depuració</translation> </message> <message> <location line="+8"/> <source>Transaction</source> <translation>Transacció</translation> </message> <message> <location line="+5"/> <source>Inputs</source> <translation>Entrades</translation> </message> <message> <location line="+23"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>true</source> <translation>cert</translation> </message> <message> <location line="+0"/> <source>false</source> <translation>fals</translation> </message> <message> <location line="-211"/> <source>, has not been successfully broadcast yet</source> <translation>, encara no ha estat emès correctement</translation> </message> <message> <location line="+35"/> <source>unknown</source> <translation>desconegut</translation> </message> </context> <context> <name>TransactionDescDialog</name> <message> <location filename="../forms/transactiondescdialog.ui" line="+14"/> <source>Transaction details</source> <translation>Detall de la transacció</translation> </message> <message> <location line="+6"/> <source>This pane shows a detailed description of the transaction</source> <translation>Aquest panell mostra una descripció detallada de la transacció</translation> </message> </context> <context> <name>TransactionTableModel</name> <message> <location filename="../transactiontablemodel.cpp" line="+226"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+0"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+0"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+0"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+60"/> <source>Open until %1</source> <translation>Obert fins %1</translation> </message> <message> <location line="+12"/> <source>Confirmed (%1 confirmations)</source> <translation>Confirmat (%1 confirmacions)</translation> </message> <message numerus="yes"> <location line="-15"/> <source>Open for %n more block(s)</source> <translation><numerusform>Obert per a %n bloc més</numerusform><numerusform>Obert per a %n blocs més</numerusform></translation> </message> <message> <location line="+6"/> <source>Offline</source> <translation>Desconnectat</translation> </message> <message> <location line="+3"/> <source>Unconfirmed</source> <translation>Sense confirmar</translation> </message> <message> <location line="+3"/> <source>Confirming (%1 of %2 recommended confirmations)</source> <translation>Confirmant (%1 de %2 confirmacions recomanat)</translation> </message> <message> <location line="+6"/> <source>Conflicted</source> <translation>Conflicte</translation> </message> <message> <location line="+3"/> <source>Immature (%1 confirmations, will be available after %2)</source> <translation>Immadurs (%1 confirmacions, estaran disponibles després de %2)</translation> </message> <message> <location line="+3"/> <source>This block was not received by any other nodes and will probably not be accepted!</source> <translation>Aquest bloc no ha estat rebut per cap altre node i probablement no serà acceptat!</translation> </message> <message> <location line="+3"/> <source>Generated but not accepted</source> <translation>Generat però no acceptat</translation> </message> <message> <location line="+42"/> <source>Received with</source> <translation>Rebut desde</translation> </message> <message> <location line="+2"/> <source>Received from</source> <translation>Rebut de</translation> </message> <message> <location line="+3"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>Payment to yourself</source> <translation>Pagament a un mateix</translation> </message> <message> <location line="+2"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+38"/> <source>(n/a)</source> <translation>(n/a)</translation> </message> <message> <location line="+190"/> <source>Transaction status. Hover over this field to show number of confirmations.</source> <translation>Estat de la transacció. Desplaça&apos;t per aquí sobre per mostrar el nombre de confirmacions.</translation> </message> <message> <location line="+2"/> <source>Date and time that the transaction was received.</source> <translation>Data i hora en que la transacció va ser rebuda.</translation> </message> <message> <location line="+2"/> <source>Type of transaction.</source> <translation>Tipus de transacció.</translation> </message> <message> <location line="+2"/> <source>Destination address of transaction.</source> <translation>Adreça del destinatari de la transacció.</translation> </message> <message> <location line="+2"/> <source>Amount removed from or added to balance.</source> <translation>Quantitat extreta o afegida del balanç.</translation> </message> </context> <context> <name>TransactionView</name> <message> <location filename="../transactionview.cpp" line="+55"/> <location line="+16"/> <source>All</source> <translation>Tot</translation> </message> <message> <location line="-15"/> <source>Today</source> <translation>Avui</translation> </message> <message> <location line="+1"/> <source>This week</source> <translation>Aquesta setmana</translation> </message> <message> <location line="+1"/> <source>This month</source> <translation>Aquest mes</translation> </message> <message> <location line="+1"/> <source>Last month</source> <translation>El mes passat</translation> </message> <message> <location line="+1"/> <source>This year</source> <translation>Enguany</translation> </message> <message> <location line="+1"/> <source>Range...</source> <translation>Rang...</translation> </message> <message> <location line="+11"/> <source>Received with</source> <translation>Rebut desde</translation> </message> <message> <location line="+2"/> <source>Sent to</source> <translation>Enviat a</translation> </message> <message> <location line="+2"/> <source>To yourself</source> <translation>A tu mateix</translation> </message> <message> <location line="+1"/> <source>Mined</source> <translation>Minat</translation> </message> <message> <location line="+1"/> <source>Other</source> <translation>Altres</translation> </message> <message> <location line="+7"/> <source>Enter address or label to search</source> <translation>Introdueix una adreça o una etiqueta per cercar</translation> </message> <message> <location line="+7"/> <source>Min amount</source> <translation>Quantitat mínima</translation> </message> <message> <location line="+34"/> <source>Copy address</source> <translation>Copiar adreça </translation> </message> <message> <location line="+1"/> <source>Copy label</source> <translation>Copiar etiqueta</translation> </message> <message> <location line="+1"/> <source>Copy amount</source> <translation>Copiar quantitat</translation> </message> <message> <location line="+1"/> <source>Copy transaction ID</source> <translation>Copiar ID de transacció</translation> </message> <message> <location line="+1"/> <source>Edit label</source> <translation>Editar etiqueta</translation> </message> <message><|fim▁hole|> <translation>Mostra detalls de la transacció</translation> </message> <message> <location line="+144"/> <source>Export Transaction Data</source> <translation>Exportació de dades de transaccions</translation> </message> <message> <location line="+1"/> <source>Comma separated file (*.csv)</source> <translation>Arxiu de separació per comes (*.csv)</translation> </message> <message> <location line="+8"/> <source>Confirmed</source> <translation>Confirmat</translation> </message> <message> <location line="+1"/> <source>Date</source> <translation>Data</translation> </message> <message> <location line="+1"/> <source>Type</source> <translation>Tipus</translation> </message> <message> <location line="+1"/> <source>Label</source> <translation>Etiqueta</translation> </message> <message> <location line="+1"/> <source>Address</source> <translation>Direcció</translation> </message> <message> <location line="+1"/> <source>Amount</source> <translation>Quantitat</translation> </message> <message> <location line="+1"/> <source>ID</source> <translation>ID</translation> </message> <message> <location line="+4"/> <source>Error exporting</source> <translation>Error a l&apos;exportar</translation> </message> <message> <location line="+0"/> <source>Could not write to file %1.</source> <translation>No s&apos;ha pogut escriure al fitxer %1.</translation> </message> <message> <location line="+100"/> <source>Range:</source> <translation>Rang:</translation> </message> <message> <location line="+8"/> <source>to</source> <translation>a</translation> </message> </context> <context> <name>WalletModel</name> <message> <location filename="../walletmodel.cpp" line="+206"/> <source>Sending...</source> <translation>Enviant...</translation> </message> </context> <context> <name>bitcoin-core</name> <message> <location filename="../bitcoinstrings.cpp" line="+33"/> <source>BitSeeds version</source> <translation>versió BitSeeds</translation> </message> <message> <location line="+1"/> <source>Usage:</source> <translation>Ús:</translation> </message> <message> <location line="+1"/> <source>Send command to -server or bitseedsd</source> <translation>Enviar comandes a -server o bitseedsd</translation> </message> <message> <location line="+1"/> <source>List commands</source> <translation>Llista d&apos;ordres</translation> </message> <message> <location line="+1"/> <source>Get help for a command</source> <translation>Obtenir ajuda per a un ordre.</translation> </message> <message> <location line="+2"/> <source>Options:</source> <translation>Opcions:</translation> </message> <message> <location line="+2"/> <source>Specify configuration file (default: bitseeds.conf)</source> <translation>Especifiqueu el fitxer de configuració (per defecte: bitseeds.conf)</translation> </message> <message> <location line="+1"/> <source>Specify pid file (default: bitseedsd.pid)</source> <translation>Especificar arxiu pid (per defecte: bitseedsd.pid)</translation> </message> <message> <location line="+2"/> <source>Specify wallet file (within data directory)</source> <translation>Especifica un arxiu de moneder (dintre del directori de les dades)</translation> </message> <message> <location line="-1"/> <source>Specify data directory</source> <translation>Especificar directori de dades</translation> </message> <message> <location line="+2"/> <source>Set database cache size in megabytes (default: 25)</source> <translation>Establir tamany de la memoria cau en megabytes (per defecte: 25)</translation> </message> <message> <location line="+1"/> <source>Set database disk log size in megabytes (default: 100)</source> <translation>Configurar la mida del registre en disc de la base de dades en megabytes (per defecte: 100)</translation> </message> <message> <location line="+6"/> <source>Listen for connections on &lt;port&gt; (default: 15714 or testnet: 25714)</source> <translation>Escoltar connexions en &lt;port&gt; (per defecte: 15714 o testnet: 25714)</translation> </message> <message> <location line="+1"/> <source>Maintain at most &lt;n&gt; connections to peers (default: 125)</source> <translation>Mantenir com a molt &lt;n&gt; connexions a peers (per defecte: 125)</translation> </message> <message> <location line="+3"/> <source>Connect to a node to retrieve peer addresses, and disconnect</source> <translation>Connectar al node per obtenir les adreces de les connexions, i desconectar</translation> </message> <message> <location line="+1"/> <source>Specify your own public address</source> <translation>Especificar la teva adreça pública</translation> </message> <message> <location line="+5"/> <source>Bind to given address. Use [host]:port notation for IPv6</source> <translation>Enllaçar a l&apos;adreça donada. Utilitzeu la notació [host]:port per a IPv6</translation> </message> <message> <location line="+2"/> <source>Stake your coins to support network and gain reward (default: 1)</source> <translation>Posa les teves monedes a fer &quot;stake&quot; per donar suport a la xarxa i obtenir una recompensa (per defecte: 1)</translation> </message> <message> <location line="+5"/> <source>Threshold for disconnecting misbehaving peers (default: 100)</source> <translation>Límit per a desconectar connexions errònies (per defecte: 100)</translation> </message> <message> <location line="+1"/> <source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source> <translation>Nombre de segons abans de reconectar amb connexions errònies (per defecte: 86400)</translation> </message> <message> <location line="-44"/> <source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv4: %s</translation> </message> <message> <location line="+51"/> <source>Detach block and address databases. Increases shutdown time (default: 0)</source> <translation>Separeu el bloc i les bases de dades d&apos;adreces. Augmenta el temps d&apos;apagada (per defecte: 0)</translation> </message> <message> <location line="+109"/> <source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source> <translation>Error: La transacció ha sigut rebutjada. Això pot passar si algunes de les monedes al moneder ja s&apos;han gastat, per exemple, si vostè utilitza una còpia del wallet.dat i les monedes han estat gastades a la cópia pero no s&apos;han marcat com a gastades aqui.</translation> </message> <message> <location line="-5"/> <source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source> <translation>Error: Aquesta transacció requereix una comisió d&apos;almenys %s degut a la seva quantitat, complexitat, o l&apos;ús dels fons rebuts recentment</translation> </message> <message> <location line="-87"/> <source>Listen for JSON-RPC connections on &lt;port&gt; (default: 15715 or testnet: 25715)</source> <translation>Escoltar connexions JSON-RPC al port &lt;port&gt; (per defecte: 15715 o testnet: 25715)</translation> </message> <message> <location line="-11"/> <source>Accept command line and JSON-RPC commands</source> <translation>Acceptar línia d&apos;ordres i ordres JSON-RPC </translation> </message> <message> <location line="+101"/> <source>Error: Transaction creation failed </source> <translation>Error: La creació de transacció ha fallat.</translation> </message> <message> <location line="-5"/> <source>Error: Wallet locked, unable to create transaction </source> <translation>Error: Moneder bloquejat, no es pot de crear la transacció</translation> </message> <message> <location line="-8"/> <source>Importing blockchain data file.</source> <translation>Important fitxer de dades de la cadena de blocs</translation> </message> <message> <location line="+1"/> <source>Importing bootstrap blockchain data file.</source> <translation>Important fitxer de dades d&apos;arrencada de la cadena de blocs</translation> </message> <message> <location line="-88"/> <source>Run in the background as a daemon and accept commands</source> <translation>Executar en segon pla com a programa dimoni i acceptar ordres</translation> </message> <message> <location line="+1"/> <source>Use the test network</source> <translation>Usar la xarxa de prova</translation> </message> <message> <location line="-24"/> <source>Accept connections from outside (default: 1 if no -proxy or -connect)</source> <translation>Aceptar connexions d&apos;afora (per defecte: 1 si no -proxy o -connect)</translation> </message> <message> <location line="-38"/> <source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source> <translation>Ha sorgit un error al configurar el port RPC %u escoltant a IPv6, retrocedint a IPv4: %s</translation> </message> <message> <location line="+117"/> <source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source> <translation>Error en inicialitzar l&apos;entorn de base de dades %s! Per recuperar, FACI UNA COPIA DE SEGURETAT D&apos;AQUEST DIRECTORI, a continuació, retiri tot d&apos;ella excepte l&apos;arxiu wallet.dat.</translation> </message> <message> <location line="-20"/> <source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source> <translation>Establir la grandària màxima de les transaccions alta-prioritat/baixa-comisió en bytes (per defecte: 27000)</translation> </message> <message> <location line="+11"/> <source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source> <translation>Advertència: el -paytxfee és molt elevat! Aquesta és la comissió de transacció que pagaràs quan enviis una transacció.</translation> </message> <message> <location line="+61"/> <source>Warning: Please check that your computer&apos;s date and time are correct! If your clock is wrong BitSeeds will not work properly.</source> <translation>Avís: Comproveu que la data i hora de l&apos;equip siguin correctes! Si el seu rellotge és erroni BitSeeds no funcionarà correctament.</translation> </message> <message> <location line="-31"/> <source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source> <translation>Advertència: Error llegint l&apos;arxiu wallet.dat!! Totes les claus es llegeixen correctament, però hi ha dades de transaccions o entrades del llibre d&apos;adreces absents o bé son incorrectes.</translation> </message> <message> <location line="-18"/> <source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source> <translation>Advertència: L&apos;arxiu wallet.dat és corrupte, dades rescatades! L&apos;arxiu wallet.dat original ha estat desat com wallet.{estampa_temporal}.bak al directori %s; si el teu balanç o transaccions son incorrectes hauries de restaurar-lo de un backup.</translation> </message> <message> <location line="-30"/> <source>Attempt to recover private keys from a corrupt wallet.dat</source> <translation>Intentar recuperar les claus privades d&apos;un arxiu wallet.dat corrupte</translation> </message> <message> <location line="+4"/> <source>Block creation options:</source> <translation>Opcions de la creació de blocs:</translation> </message> <message> <location line="-62"/> <source>Connect only to the specified node(s)</source> <translation>Connectar només al(s) node(s) especificats</translation> </message> <message> <location line="+4"/> <source>Discover own IP address (default: 1 when listening and no -externalip)</source> <translation>Descobrir la pròpia adreça IP (per defecte: 1 quan escoltant i no -externalip)</translation> </message> <message> <location line="+94"/> <source>Failed to listen on any port. Use -listen=0 if you want this.</source> <translation>Error al escoltar a qualsevol port. Utilitza -listen=0 si vols això.</translation> </message> <message> <location line="-90"/> <source>Find peers using DNS lookup (default: 1)</source> <translation>Trobar companys utilitzant la recerca de DNS (per defecte: 1)</translation> </message> <message> <location line="+5"/> <source>Sync checkpoints policy (default: strict)</source> <translation>Política dels punts de control de sincronització (per defecte: estricta)</translation> </message> <message> <location line="+83"/> <source>Invalid -tor address: &apos;%s&apos;</source> <translation>Adreça -tor invalida: &apos;%s&apos;</translation> </message> <message> <location line="+4"/> <source>Invalid amount for -reservebalance=&lt;amount&gt;</source> <translation>Quantitat invalida per a -reservebalance=&lt;amount&gt;</translation> </message> <message> <location line="-82"/> <source>Maximum per-connection receive buffer, &lt;n&gt;*1000 bytes (default: 5000)</source> <translation>Mida màxima del buffer de recepció per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="+1"/> <source>Maximum per-connection send buffer, &lt;n&gt;*1000 bytes (default: 1000)</source> <translation>Mida màxima del buffer d&apos;enviament per a cada connexió, &lt;n&gt;*1000 bytes (default: 5000)</translation> </message> <message> <location line="-16"/> <source>Only connect to nodes in network &lt;net&gt; (IPv4, IPv6 or Tor)</source> <translation>Només connectar als nodes de la xarxa &lt;net&gt; (IPv4, IPv6 o Tor)</translation> </message> <message> <location line="+28"/> <source>Output extra debugging information. Implies all other -debug* options</source> <translation>Sortida d&apos;informació de depuració extra. Implica totes les opcions de depuracó -debug*</translation> </message> <message> <location line="+1"/> <source>Output extra network debugging information</source> <translation>Sortida d&apos;informació de depuració de xarxa addicional</translation> </message> <message> <location line="+1"/> <source>Prepend debug output with timestamp</source> <translation>Anteposar marca de temps a la sortida de depuració</translation> </message> <message> <location line="+35"/> <source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source> <translation>Opcions SSL: (veure la Wiki de Bitcoin per a instruccions de configuració SSL)</translation> </message> <message> <location line="-74"/> <source>Select the version of socks proxy to use (4-5, default: 5)</source> <translation>Seleccioneu la versió de proxy socks per utilitzar (4-5, per defecte: 5)</translation> </message> <message> <location line="+41"/> <source>Send trace/debug info to console instead of debug.log file</source> <translation>Enviar informació de traça/depuració a la consola en comptes del arxiu debug.log</translation> </message> <message> <location line="+1"/> <source>Send trace/debug info to debugger</source> <translation>Enviar informació de traça/depuració al depurador</translation> </message> <message> <location line="+28"/> <source>Set maximum block size in bytes (default: 250000)</source> <translation>Establir una mida máxima de bloc en bytes (per defecte: 250000)</translation> </message> <message> <location line="-1"/> <source>Set minimum block size in bytes (default: 0)</source> <translation>Establir una mida mínima de bloc en bytes (per defecte: 0)</translation> </message> <message> <location line="-29"/> <source>Shrink debug.log file on client startup (default: 1 when no -debug)</source> <translation>Reduir l&apos;arxiu debug.log al iniciar el client (per defecte 1 quan no -debug)</translation> </message> <message> <location line="-42"/> <source>Specify connection timeout in milliseconds (default: 5000)</source> <translation>Especificar el temps limit per a un intent de connexió en milisegons (per defecte: 5000)</translation> </message> <message> <location line="+109"/> <source>Unable to sign checkpoint, wrong checkpointkey? </source> <translation>No es pot signar el punt de control, la clau del punt de control esta malament? </translation> </message> <message> <location line="-80"/> <source>Use UPnP to map the listening port (default: 0)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 0)</translation> </message> <message> <location line="-1"/> <source>Use UPnP to map the listening port (default: 1 when listening)</source> <translation>Utilitza UPnP per a mapejar els ports d&apos;escolta (per defecte: 1 quan s&apos;escolta)</translation> </message> <message> <location line="-25"/> <source>Use proxy to reach tor hidden services (default: same as -proxy)</source> <translation>Utilitza proxy per arribar als serveis ocults de Tor (per defecte: la mateixa que -proxy)</translation> </message> <message> <location line="+42"/> <source>Username for JSON-RPC connections</source> <translation>Nom d&apos;usuari per a connexions JSON-RPC</translation> </message> <message> <location line="+47"/> <source>Verifying database integrity...</source> <translation>Comprovant la integritat de la base de dades ...</translation> </message> <message> <location line="+57"/> <source>WARNING: syncronized checkpoint violation detected, but skipped!</source> <translation>ADVERTÈNCIA: violació de punt de control sincronitzat detectada, es saltarà!</translation> </message> <message> <location line="+1"/> <source>Warning: Disk space is low!</source> <translation>Avís: L&apos;espai en disc és baix!</translation> </message> <message> <location line="-2"/> <source>Warning: This version is obsolete, upgrade required!</source> <translation>Advertència: Aquetsa versió està obsoleta, és necessari actualitzar!</translation> </message> <message> <location line="-48"/> <source>wallet.dat corrupt, salvage failed</source> <translation>L&apos;arxiu wallet.data és corrupte, el rescat de les dades ha fallat</translation> </message> <message> <location line="-54"/> <source>Password for JSON-RPC connections</source> <translation>Contrasenya per a connexions JSON-RPC</translation> </message> <message> <location line="-84"/> <source>%s, you must set a rpcpassword in the configuration file: %s It is recommended you use the following random password: rpcuser=bitseedsrpc rpcpassword=%s (you do not need to remember this password) The username and password MUST NOT be the same. If the file does not exist, create it with owner-readable-only file permissions. It is also recommended to set alertnotify so you are notified of problems; for example: alertnotify=echo %%s | mail -s &quot;BitSeeds Alert&quot; [email protected] </source> <translation>%s, ha d&apos;establir un rpcpassword al fitxer de configuració: %s Es recomana utilitzar la següent contrasenya aleatòria: rpcuser=bitseedsrpc rpcpassword=%s (No cal recordar aquesta contrasenya) El nom d&apos;usuari i contrasenya NO HA DE SER el mateix. Si no hi ha l&apos;arxiu, s&apos;ha de crear amb els permisos de només lectura per al propietari. També es recomana establir alertnotify per a que se li notifiquin els problemes; per exemple: alertnotify=echo %%s | mail -s &quot;BitSeeds Alert&quot; [email protected] </translation> </message> <message> <location line="+51"/> <source>Find peers using internet relay chat (default: 0)</source> <translation>Trobar companys utilitzant l&apos;IRC (per defecte: 1) {0)?}</translation> </message> <message> <location line="+5"/> <source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source> <translation>Sincronitzar el temps amb altres nodes. Desactivar si el temps al seu sistema és precís, per exemple, si fa ús de sincronització amb NTP (per defecte: 1)</translation> </message> <message> <location line="+15"/> <source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source> <translation>En crear transaccions, ignorar les entrades amb valor inferior a aquesta (per defecte: 0.01)</translation> </message> <message> <location line="+16"/> <source>Allow JSON-RPC connections from specified IP address</source> <translation>Permetre connexions JSON-RPC d&apos;adreces IP específiques</translation> </message> <message> <location line="+1"/> <source>Send commands to node running on &lt;ip&gt; (default: 127.0.0.1)</source> <translation>Enviar ordre al node en execució a &lt;ip&gt; (per defecte: 127.0.0.1)</translation> </message> <message> <location line="+1"/> <source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source> <translation>Executar orde quan el millor bloc canviï (%s al cmd es reemplaça per un bloc de hash)</translation> </message> <message> <location line="+3"/> <source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source> <translation>Executar una ordre quan una transacció del moneder canviï (%s in cmd es canvia per TxID)</translation> </message> <message> <location line="+3"/> <source>Require a confirmations for change (default: 0)</source> <translation>Requerir les confirmacions de canvi (per defecte: 0)</translation> </message> <message> <location line="+1"/> <source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source> <translation>Fer complir als scripts de transaccions d&apos;utilitzar operadors PUSH canòniques (per defecte: 1)</translation> </message> <message> <location line="+2"/> <source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source> <translation> Executar una ordre quan es rep un avís rellevant (%s en cmd es substitueix per missatge)</translation> </message> <message> <location line="+3"/> <source>Upgrade wallet to latest format</source> <translation>Actualitzar moneder a l&apos;últim format</translation> </message> <message> <location line="+1"/> <source>Set key pool size to &lt;n&gt; (default: 100)</source> <translation>Establir límit de nombre de claus a &lt;n&gt; (per defecte: 100)</translation> </message> <message> <location line="+1"/> <source>Rescan the block chain for missing wallet transactions</source> <translation>Re-escanejar cadena de blocs en cerca de transaccions de moneder perdudes</translation> </message> <message> <location line="+2"/> <source>How many blocks to check at startup (default: 2500, 0 = all)</source> <translation>Quants blocs s&apos;han de confirmar a l&apos;inici (per defecte: 2500, 0 = tots)</translation> </message> <message> <location line="+1"/> <source>How thorough the block verification is (0-6, default: 1)</source> <translation>Com és de minuciosa la verificació del bloc (0-6, per defecte: 1)</translation> </message> <message> <location line="+1"/> <source>Imports blocks from external blk000?.dat file</source> <translation>Importar blocs desde l&apos;arxiu extern blk000?.dat</translation> </message> <message> <location line="+8"/> <source>Use OpenSSL (https) for JSON-RPC connections</source> <translation>Utilitzar OpenSSL (https) per a connexions JSON-RPC</translation> </message> <message> <location line="+1"/> <source>Server certificate file (default: server.cert)</source> <translation>Arxiu del certificat de servidor (per defecte: server.cert)</translation> </message> <message> <location line="+1"/> <source>Server private key (default: server.pem)</source> <translation>Clau privada del servidor (per defecte: server.pem)</translation> </message> <message> <location line="+1"/> <source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source> <translation>Xifres acceptables (per defecte: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation> </message> <message> <location line="+53"/> <source>Error: Wallet unlocked for staking only, unable to create transaction.</source> <translation>Error: Cartera bloquejada nomès per a fer &quot;stake&quot;, no es pot de crear la transacció</translation> </message> <message> <location line="+18"/> <source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source> <translation>ADVERTÈNCIA: Punt de control invàlid! Les transaccions mostrades podríen no ser correctes! Podria ser necessari actualitzar o notificar-ho als desenvolupadors.</translation> </message> <message> <location line="-158"/> <source>This help message</source> <translation>Aquest misatge d&apos;ajuda</translation> </message> <message> <location line="+95"/> <source>Wallet %s resides outside data directory %s.</source> <translation>El moneder %s resideix fora del directori de dades %s.</translation> </message> <message> <location line="+1"/> <source>Cannot obtain a lock on data directory %s. BitSeeds is probably already running.</source> <translation>No es pot obtenir un bloqueig en el directori de dades %s. BitSeeds probablement ja estigui en funcionament.</translation> </message> <message> <location line="-98"/> <source>BitSeeds</source> <translation>BitSeeds</translation> </message> <message> <location line="+140"/> <source>Unable to bind to %s on this computer (bind returned error %d, %s)</source> <translation>Impossible d&apos;unir %s a aquest ordinador (s&apos;ha retornat l&apos;error %d, %s)</translation> </message> <message> <location line="-130"/> <source>Connect through socks proxy</source> <translation>Conectar a través d&apos;un proxy SOCKS</translation> </message> <message> <location line="+3"/> <source>Allow DNS lookups for -addnode, -seednode and -connect</source> <translation>Permetre consultes DNS per a -addnode, -seednode i -connect</translation> </message> <message> <location line="+122"/> <source>Loading addresses...</source> <translation>Carregant adreces...</translation> </message> <message> <location line="-15"/> <source>Error loading blkindex.dat</source> <translation>Error carregant blkindex.dat</translation> </message> <message> <location line="+2"/> <source>Error loading wallet.dat: Wallet corrupted</source> <translation>Error carregant wallet.dat: Moneder corrupte</translation> </message> <message> <location line="+4"/> <source>Error loading wallet.dat: Wallet requires newer version of BitSeeds</source> <translation>Error en carregar wallet.dat: El moneder requereix la versió més recent de BitSeeds</translation> </message> <message> <location line="+1"/> <source>Wallet needed to be rewritten: restart BitSeeds to complete</source> <translation>El moneder necessita ser reescrita: reiniciar BitSeeds per completar</translation> </message> <message> <location line="+1"/> <source>Error loading wallet.dat</source> <translation>Error carregant wallet.dat</translation> </message> <message> <location line="-16"/> <source>Invalid -proxy address: &apos;%s&apos;</source> <translation>Adreça -proxy invalida: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown network specified in -onlynet: &apos;%s&apos;</source> <translation>Xarxa desconeguda especificada a -onlynet: &apos;%s&apos;</translation> </message> <message> <location line="-1"/> <source>Unknown -socks proxy version requested: %i</source> <translation>S&apos;ha demanat una versió desconeguda de -socks proxy: %i</translation> </message> <message> <location line="+4"/> <source>Cannot resolve -bind address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -bind: &apos;%s&apos;</translation> </message> <message> <location line="+2"/> <source>Cannot resolve -externalip address: &apos;%s&apos;</source> <translation>No es pot resoldre l&apos;adreça -externalip: &apos;%s&apos;</translation> </message> <message> <location line="-24"/> <source>Invalid amount for -paytxfee=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -paytxfee=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+44"/> <source>Error: could not start node</source> <translation>Error: no s&apos;ha pogut iniciar el node</translation> </message> <message> <location line="+11"/> <source>Sending...</source> <translation>Enviant...</translation> </message> <message> <location line="+5"/> <source>Invalid amount</source> <translation>Quanitat invalida</translation> </message> <message> <location line="+1"/> <source>Insufficient funds</source> <translation>Balanç insuficient</translation> </message> <message> <location line="-34"/> <source>Loading block index...</source> <translation>Carregant índex de blocs...</translation> </message> <message> <location line="-103"/> <source>Add a node to connect to and attempt to keep the connection open</source> <translation>Afegir un node per a connectar&apos;s-hi i intentar mantenir la connexió oberta</translation> </message> <message> <location line="+122"/> <source>Unable to bind to %s on this computer. BitSeeds is probably already running.</source> <translation>No es pot enllaçar a %s en aquest equip. BitSeeds probablement ja estigui en funcionament.</translation> </message> <message> <location line="-97"/> <source>Fee per KB to add to transactions you send</source> <translation>Comisió per KB per a afegir a les transaccions que enviï</translation> </message> <message> <location line="+55"/> <source>Invalid amount for -mininput=&lt;amount&gt;: &apos;%s&apos;</source> <translation>Quantitat invalida per a -mininput=&lt;amount&gt;: &apos;%s&apos;</translation> </message> <message> <location line="+25"/> <source>Loading wallet...</source> <translation>Carregant moneder...</translation> </message> <message> <location line="+8"/> <source>Cannot downgrade wallet</source> <translation>No es pot reduir la versió del moneder</translation> </message> <message> <location line="+1"/> <source>Cannot initialize keypool</source> <translation>No es pot inicialitzar el keypool</translation> </message> <message> <location line="+1"/> <source>Cannot write default address</source> <translation>No es pot escriure l&apos;adreça per defecte</translation> </message> <message> <location line="+1"/> <source>Rescanning...</source> <translation>Re-escanejant...</translation> </message> <message> <location line="+5"/> <source>Done loading</source> <translation>Càrrega acabada</translation> </message> <message> <location line="-167"/> <source>To use the %s option</source> <translation>Utilitza la opció %s</translation> </message> <message> <location line="+14"/> <source>Error</source> <translation>Error</translation> </message> <message> <location line="+6"/> <source>You must set rpcpassword=&lt;password&gt; in the configuration file: %s If the file does not exist, create it with owner-readable-only file permissions.</source> <translation>Has de configurar el rpcpassword=&lt;password&gt; a l&apos;arxiu de configuració:\n %s\n Si l&apos;arxiu no existeix, crea&apos;l amb els permís owner-readable-only.</translation> </message> </context> </TS><|fim▁end|>
<location line="+1"/> <source>Show transaction details</source>
<|file_name|>main.js<|end_file_name|><|fim▁begin|>var rowIdSequence = 100; var rowClassRules = { "red-row": 'data.color == "Red"', "green-row": 'data.color == "Green"', "blue-row": 'data.color == "Blue"', }; var gridOptions = { defaultColDef: { width: 80, sortable: true, filter: true, resizable: true }, rowClassRules: rowClassRules, rowData: createRowData(), rowDragManaged: true, columnDefs: [ {cellRenderer: 'dragSourceCellRenderer'}, {field: "id"}, {field: "color"}, {field: "value1"},<|fim▁hole|> }, animateRows: true }; function createRowData() { var data = []; ['Red', 'Green', 'Blue', 'Red', 'Green', 'Blue', 'Red', 'Green', 'Blue'].forEach(function (color) { var newDataItem = { id: rowIdSequence++, color: color, value1: Math.floor(Math.random() * 100), value2: Math.floor(Math.random() * 100) }; data.push(newDataItem); }); return data; } function onDragOver(event) { var types = event.dataTransfer.types; var dragSupported = types.length; if (dragSupported) { event.dataTransfer.dropEffect = "move"; } event.preventDefault(); } function onDrop(event) { event.preventDefault(); var userAgent = window.navigator.userAgent; var isIE = userAgent.indexOf("Trident/") >= 0; var textData = event.dataTransfer.getData(isIE ? 'text' : 'text/plain'); var eJsonRow = document.createElement('div'); eJsonRow.classList.add('json-row'); eJsonRow.innerText = textData; var eJsonDisplay = document.querySelector('#eJsonDisplay'); eJsonDisplay.appendChild(eJsonRow); } // setup the grid after the page has finished loading document.addEventListener('DOMContentLoaded', function () { var gridDiv = document.querySelector('#myGrid'); new agGrid.Grid(gridDiv, gridOptions); });<|fim▁end|>
{field: "value2"} ], components: { dragSourceCellRenderer: DragSourceRenderer,
<|file_name|>network_node.cc<|end_file_name|><|fim▁begin|>/* * Copyright 2018 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "test/scenario/network_node.h" #include <algorithm> #include <vector> #include <memory> #include "rtc_base/net_helper.h" #include "rtc_base/numerics/safe_minmax.h" namespace webrtc { namespace test { namespace { constexpr char kDummyTransportName[] = "dummy"; SimulatedNetwork::Config CreateSimulationConfig( NetworkSimulationConfig config) { SimulatedNetwork::Config sim_config; sim_config.link_capacity_kbps = config.bandwidth.kbps_or(0); sim_config.loss_percent = config.loss_rate * 100; sim_config.queue_delay_ms = config.delay.ms(); sim_config.delay_standard_deviation_ms = config.delay_std_dev.ms(); sim_config.packet_overhead = config.packet_overhead.bytes<int>(); sim_config.codel_active_queue_management = config.codel_active_queue_management; sim_config.queue_length_packets = config.packet_queue_length_limit.value_or(0); return sim_config; } } // namespace SimulationNode::SimulationNode(NetworkSimulationConfig config, SimulatedNetwork* behavior, EmulatedNetworkNode* network_node) : config_(config), simulation_(behavior), network_node_(network_node) {} std::unique_ptr<SimulatedNetwork> SimulationNode::CreateBehavior( NetworkSimulationConfig config) { SimulatedNetwork::Config sim_config = CreateSimulationConfig(config); return std::make_unique<SimulatedNetwork>(sim_config); } void SimulationNode::UpdateConfig( std::function<void(NetworkSimulationConfig*)> modifier) { modifier(&config_); SimulatedNetwork::Config sim_config = CreateSimulationConfig(config_); simulation_->SetConfig(sim_config); } void SimulationNode::PauseTransmissionUntil(Timestamp until) { simulation_->PauseTransmissionUntil(until.us()); } ColumnPrinter SimulationNode::ConfigPrinter() const { return ColumnPrinter::Lambda( "propagation_delay capacity loss_rate", [this](rtc::SimpleStringBuilder& sb) { sb.AppendFormat("%.3lf %.0lf %.2lf", config_.delay.seconds<double>(), config_.bandwidth.bps() / 8.0, config_.loss_rate); }); } NetworkNodeTransport::NetworkNodeTransport(Clock* sender_clock, Call* sender_call) : sender_clock_(sender_clock), sender_call_(sender_call) {} NetworkNodeTransport::~NetworkNodeTransport() = default; bool NetworkNodeTransport::SendRtp(const uint8_t* packet, size_t length, const PacketOptions& options) { int64_t send_time_ms = sender_clock_->TimeInMilliseconds(); rtc::SentPacket sent_packet; sent_packet.packet_id = options.packet_id; sent_packet.info.included_in_feedback = options.included_in_feedback; sent_packet.info.included_in_allocation = options.included_in_allocation; sent_packet.send_time_ms = send_time_ms; sent_packet.info.packet_size_bytes = length; sent_packet.info.packet_type = rtc::PacketType::kData; sender_call_->OnSentPacket(sent_packet); <|fim▁hole|> endpoint_->SendPacket(local_address_, remote_address_, buffer, packet_overhead_.bytes()); return true; } bool NetworkNodeTransport::SendRtcp(const uint8_t* packet, size_t length) { rtc::CopyOnWriteBuffer buffer(packet, length); rtc::CritScope crit(&crit_sect_); if (!endpoint_) return false; endpoint_->SendPacket(local_address_, remote_address_, buffer, packet_overhead_.bytes()); return true; } void NetworkNodeTransport::Connect(EmulatedEndpoint* endpoint, const rtc::SocketAddress& receiver_address, DataSize packet_overhead) { rtc::NetworkRoute route; route.connected = true; // We assume that the address will be unique in the lower bytes. route.local = rtc::RouteEndpoint::CreateWithNetworkId(static_cast<uint16_t>( receiver_address.ipaddr().v4AddressAsHostOrderInteger())); route.remote = rtc::RouteEndpoint::CreateWithNetworkId(static_cast<uint16_t>( receiver_address.ipaddr().v4AddressAsHostOrderInteger())); route.packet_overhead = packet_overhead.bytes() + receiver_address.ipaddr().overhead() + cricket::kUdpHeaderSize; { // Only IPv4 address is supported. RTC_CHECK_EQ(receiver_address.family(), AF_INET); rtc::CritScope crit(&crit_sect_); endpoint_ = endpoint; local_address_ = rtc::SocketAddress(endpoint_->GetPeerLocalAddress(), 0); remote_address_ = receiver_address; packet_overhead_ = packet_overhead; current_network_route_ = route; } sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged( kDummyTransportName, route); } void NetworkNodeTransport::Disconnect() { rtc::CritScope crit(&crit_sect_); current_network_route_.connected = false; sender_call_->GetTransportControllerSend()->OnNetworkRouteChanged( kDummyTransportName, current_network_route_); current_network_route_ = {}; endpoint_ = nullptr; } } // namespace test } // namespace webrtc<|fim▁end|>
rtc::CritScope crit(&crit_sect_); if (!endpoint_) return false; rtc::CopyOnWriteBuffer buffer(packet, length);
<|file_name|>output7off.py<|end_file_name|><|fim▁begin|># THIS IS THE PYTHON CODE FOR PiFACE OUTPUT OFF # # Copyright (C) 2014 Tim Massey # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. #<|fim▁hole|># # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # Also add information on how to contact you by electronic and paper mail. #!/usr/bin/python import pifacedigitalio pifacedigital = pifacedigitalio.PiFaceDigital() pifacedigital.output_pins[7].turn_off()<|fim▁end|>
# This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details.
<|file_name|>SWIM.py<|end_file_name|><|fim▁begin|>import unittest import random from pygraph.classes.graph import graph class SWIM(object): def __init__(self, graph): self.graph = graph def edge_alive(self, nodeA, nodeB, alive): ''' edge_alive(A, B, True|False) ''' edge = (nodeA, nodeB) if alive: self.graph.add_edge(edge) else: self.graph.del_edge(edge) def node_alive(self, node, alive): ''' node_alive(A, True|False) ''' if alive: self.graph.node_attributes(node).clear() else: self.graph.node_attributes(node).append("dead") def ping(self, nodeStart, nodeEnd, k): ''' NodeStart to ping NodeEnd directly or indirectly through K random neighbors. Return True if nodeEnd receives ping, or False otherwise ''' g = self.graph # Check if direct ping works if g.has_edge((nodeStart, nodeEnd)) and \ "dead" not in g.node_attributes(nodeEnd): return True # Pick k random neighbors and let them ping end node for neighbor in self._random_neighbors(nodeStart, k): if self.ping(neighbor, nodeEnd, 0): return True # All pings have failed return False def _random_neighbors(self, node, b): neighbors = self.graph.neighbors(node) if len(neighbors) <= b: return neighbors else: return random.sample(neighbors, b) class SWIMTest(unittest.TestCase): def setUp(self): g = graph() g.add_nodes(xrange(10)) g.complete() self.graph = g self.swim = SWIM(g) def test_good_ping(self): swim = self.swim self.assertTrue(swim.ping(0, 1, 0)) self.assertTrue(swim.ping(1, 3, 0)) def test_dead_edge_ping(self): swim = self.swim swim.edge_alive(0, 1, False) self.assertFalse(swim.ping(0, 1, 0)) self.assertTrue(swim.ping(0, 1, 1)) def test_dead_node_ping(self): swim = self.swim swim.node_alive(2, False) self.assertFalse(swim.ping(0, 2, 0)) self.assertFalse(swim.ping(0, 2, 3))<|fim▁hole|> unittest.main()<|fim▁end|>
if __name__ == '__main__':
<|file_name|>simpleworkflow-rules.py<|end_file_name|><|fim▁begin|>import sys import os<|fim▁hole|>from baserules import BaseIndexData class IndexData(BaseIndexData): def __activate__(self, context): BaseIndexData.__activate__(self,context)<|fim▁end|>
from com.googlecode.fascinator.common import FascinatorHome sys.path.append(os.path.join(FascinatorHome.getPath(),"harvest", "workflows"))
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # ---------------------------------------------------------------------------- # cocos "jscompile" plugin # # Copyright 2013 (C) Intel # # License: MIT # ---------------------------------------------------------------------------- ''' "jscompile" plugin for cocos command line tool ''' __docformat__ = 'restructuredtext' import sys import subprocess import os import json import inspect import platform import cocos from MultiLanguage import MultiLanguage class CCPluginJSCompile(cocos.CCPlugin): """ compiles (encodes) and minifies JS files """ @staticmethod def plugin_name(): return "jscompile" @staticmethod def brief_description(): # returns a short description of this module return MultiLanguage.get_string('JSCOMPILE_BRIEF') # This is not the constructor, just an initializator def init(self, options, workingdir): """ Arguments: - `options`: """ self._current_src_dir = None self._src_dir_arr = self.normalize_path_in_list(options.src_dir_arr) self._dst_dir = options.dst_dir self._use_closure_compiler = options.use_closure_compiler self._verbose = options.verbose self._config = None self._workingdir = workingdir self._closure_params = '' if options.compiler_config != None: f = open(options.compiler_config) self._config = json.load(f) f.close() self._pre_order = self._config["pre_order"] self.normalize_path_in_list(self._pre_order) self._post_order = self._config["post_order"] self.normalize_path_in_list(self._post_order) self._skip = self._config["skip"] self.normalize_path_in_list(self._skip) self._closure_params = self._config["closure_params"] if options.closure_params is not None: self._closure_params = options.closure_params self._js_files = {} self._compressed_js_path = os.path.join(self._dst_dir, options.compressed_filename) self._compressed_jsc_path = os.path.join(self._dst_dir, options.compressed_filename+"c") def normalize_path_in_list(self, list): for i in list: tmp = os.path.normpath(i) list[list.index(i)] = tmp return list def get_relative_path(self, jsfile): try: # print "current src dir: "+self._current_src_dir) pos = jsfile.index(self._current_src_dir) if pos != 0: raise cocos.CCPluginError(MultiLanguage.get_string('LUACOMPILE_ERROR_SRCDIR_NAME_NOT_FOUND'), cocos.CCPluginError.ERROR_WRONG_ARGS) # print "origin js path: "+ jsfile # print "relative path: "+jsfile[len(self._current_src_dir)+1:] return jsfile[len(self._current_src_dir)+1:] except ValueError: raise cocos.CCPluginError(MultiLanguage.get_string('LUACOMPILE_ERROR_SRCDIR_NAME_NOT_FOUND'), cocos.CCPluginError.ERROR_WRONG_ARGS) def get_output_file_path(self, jsfile): """ Gets output file path by source js file """ # create folder for generated file jsc_filepath = "" relative_path = self.get_relative_path(jsfile)+"c" jsc_filepath = os.path.join(self._dst_dir, relative_path) dst_rootpath = os.path.split(jsc_filepath)[0] try: # print "creating dir (%s)" % (dst_rootpath) os.makedirs(dst_rootpath) except OSError: if os.path.exists(dst_rootpath) == False: # There was an error on creation, so make sure we know about it raise cocos.CCPluginError(MultiLanguage.get_string('LUACOMPILE_ERROR_MKDIR_FAILED_FMT', dst_rootpath), cocos.CCPluginError.ERROR_PATH_NOT_FOUND) # print "return jsc path: "+jsc_filepath return jsc_filepath def compile_js(self, jsfile, output_file): """ Compiles js file """ cocos.Logging.debug(MultiLanguage.get_string('JSCOMPILE_DEBUG_COMPILE_FILE_FMT', jsfile)) jsbcc_exe_path = "" if(cocos.os_is_linux()): if(platform.architecture()[0] == "32bit"): jsbcc_exe_path = os.path.join(self._workingdir, "bin", "linux", "jsbcc_x86") else: jsbcc_exe_path = os.path.join(self._workingdir, "bin", "linux", "jsbcc_x64") else: jsbcc_exe_path = os.path.join(self._workingdir, "bin", "jsbcc") cmd_str = "\"%s\" \"%s\" \"%s\"" % (jsbcc_exe_path, jsfile, output_file) self._run_cmd(cmd_str) def compress_js(self): """ Compress all js files into one big file. """ jsfiles = "" for src_dir in self._src_dir_arr: # print "\n----------src:"+src_dir jsfiles = jsfiles + " --js ".join(self._js_files[src_dir]) + " " compiler_jar_path = os.path.join(self._workingdir, "bin", "compiler.jar") command = "java -jar \"%s\" %s --js %s --js_output_file \"%s\"" % (compiler_jar_path, self._closure_params, jsfiles, self._compressed_js_path) self._run_cmd(command) def deep_iterate_dir(self, rootDir): for lists in os.listdir(rootDir): path = os.path.join(rootDir, lists) if os.path.isdir(path): self.deep_iterate_dir(path) elif os.path.isfile(path): if os.path.splitext(path)[1] == ".js": self._js_files[self._current_src_dir].append(path) def index_in_list(self, jsfile, l): """ Arguments: - `self`: - `jsfile`: - `l`: """ index = -1 for el in l: if jsfile.rfind(el) != -1: # print "index:"+str(index+1)+", el:"+el return index+1 index = index + 1 return -1 def js_filename_pre_order_compare(self, a, b): return self._js_filename_compare(a, b, self._pre_order, 1) def js_filename_post_order_compare(self, a, b): return self._js_filename_compare(a, b, self._post_order, -1) def _js_filename_compare(self, a, b, files, delta): index_a = self.index_in_list(a, files) index_b = self.index_in_list(b, files) is_a_in_list = index_a != -1 is_b_in_list = index_b != -1 if is_a_in_list and not is_b_in_list: return -1 * delta elif not is_a_in_list and is_b_in_list: return 1 * delta elif is_a_in_list and is_b_in_list: if index_a > index_b: return 1 elif index_a < index_b: return -1 else: return 0 else: return 0 def reorder_js_files(self): if self._config == None: return # print "before:"+str(self._js_files) for src_dir in self._js_files: # Remove file in exclude list need_remove_arr = [] for jsfile in self._js_files[src_dir]: for exclude_file in self._skip: if jsfile.rfind(exclude_file) != -1: # print "remove:" + jsfile need_remove_arr.append(jsfile) for need_remove in need_remove_arr: self._js_files[src_dir].remove(need_remove) self._js_files[src_dir].sort(cmp=self.js_filename_pre_order_compare) self._js_files[src_dir].sort(cmp=self.js_filename_post_order_compare) # print '-------------------' # print "after:" + str(self._js_files) def handle_all_js_files(self): """ Arguments: - `self`: """ if self._use_closure_compiler == True: cocos.Logging.info(MultiLanguage.get_string('JSCOMPILE_INFO_COMPRESS_TIP')) self.compress_js() self.compile_js(self._compressed_js_path, self._compressed_jsc_path) # remove tmp compressed file os.remove(self._compressed_js_path) else: cocos.Logging.info(MultiLanguage.get_string('JSCOMPILE_INFO_COMPILE_TO_BYTECODE')) for src_dir in self._src_dir_arr: for jsfile in self._js_files[src_dir]: self._current_src_dir = src_dir self.compile_js(jsfile, self.get_output_file_path(jsfile)) # will be called from the cocos.py script def run(self, argv, dependencies): """ """ self.parse_args(argv) # create output directory try: os.makedirs(self._dst_dir) except OSError: if os.path.exists(self._dst_dir) == False: raise cocos.CCPluginError(MultiLanguage.get_string('LUACOMPILE_ERROR_MKDIR_FAILED_FMT', self._dst_dir), cocos.CCPluginError.ERROR_PATH_NOT_FOUND) # download the bin folder jsbcc_exe_path = os.path.join(self._workingdir, "bin", "jsbcc") if not os.path.exists(jsbcc_exe_path): download_cmd_path = os.path.join(self._workingdir, os.pardir, os.pardir) subprocess.call("python %s -f -r no" % (os.path.join(download_cmd_path, "download-bin.py")), shell=True, cwd=download_cmd_path) # deep iterate the src directory for src_dir in self._src_dir_arr: self._current_src_dir = src_dir self._js_files[self._current_src_dir] = [] self.deep_iterate_dir(src_dir) self.reorder_js_files() self.handle_all_js_files() cocos.Logging.info(MultiLanguage.get_string('LUACOMPILE_INFO_FINISHED')) def parse_args(self, argv): """ """ from argparse import ArgumentParser parser = ArgumentParser(prog="cocos %s" % self.__class__.plugin_name(), description=self.__class__.brief_description()) parser.add_argument("-v", "--verbose", action="store_true", dest="verbose", help=MultiLanguage.get_string('LUACOMPILE_ARG_VERBOSE')) parser.add_argument("-s", "--src", action="append", dest="src_dir_arr", help=MultiLanguage.get_string('JSCOMPILE_ARG_SRC')) parser.add_argument("-d", "--dst", action="store", dest="dst_dir", help=MultiLanguage.get_string('JSCOMPILE_ARG_DST'))<|fim▁hole|> action="store_true", dest="use_closure_compiler", default=False, help=MultiLanguage.get_string('JSCOMPILE_ARG_CLOSURE')) parser.add_argument("-o", "--output_compressed_filename", action="store", dest="compressed_filename", default="game.min.js", help=MultiLanguage.get_string('JSCOMPILE_ARG_OUT_FILE_NAME')) parser.add_argument("-j", "--compiler_config", action="store", dest="compiler_config", help=MultiLanguage.get_string('JSCOMPILE_ARG_JSON_FILE')) parser.add_argument("-m", "--closure_params", action="store", dest="closure_params", help=MultiLanguage.get_string('JSCOMPILE_ARG_EXTRA_PARAM')) options = parser.parse_args(argv) if options.src_dir_arr == None: raise cocos.CCPluginError(MultiLanguage.get_string('JSCOMPILE_ERROR_SRC_NOT_SPECIFIED'), cocos.CCPluginError.ERROR_WRONG_ARGS) elif options.dst_dir == None: raise cocos.CCPluginError(MultiLanguage.get_string('LUACOMPILE_ERROR_DST_NOT_SPECIFIED'), cocos.CCPluginError.ERROR_WRONG_ARGS) else: for src_dir in options.src_dir_arr: if os.path.exists(src_dir) == False: raise cocos.CCPluginError(MultiLanguage.get_string('LUACOMPILE_ERROR_DIR_NOT_EXISTED_FMT', (src_dir)), cocos.CCPluginError.ERROR_PATH_NOT_FOUND) # script directory if getattr(sys, 'frozen', None): workingdir = os.path.realpath(os.path.dirname(sys.executable)) else: workingdir = os.path.realpath(os.path.dirname(__file__)) self.init(options, workingdir)<|fim▁end|>
parser.add_argument("-c", "--use_closure_compiler",
<|file_name|>DataBaseManagementService.java<|end_file_name|><|fim▁begin|>/** * */ package com.airnoise.services; import java.sql.SQLException; import com.airnoise.core.exception.PersistenceException; import com.airnoise.dao.DataBaseManager; /** * @author tomio * */ public class DataBaseManagementService { private DataBaseManager manager; public DataBaseManagementService(DataBaseManager manager) { this.manager = manager; } <|fim▁hole|> } catch (SQLException e) { throw new PersistenceException("Error creating database", e); } } public void reinitializeDB() throws PersistenceException { try { this.manager.getDataBaseManagementDAO().reinitializeDB(); } catch (SQLException e) { throw new PersistenceException("Error reinitializing the database", e); } } }<|fim▁end|>
public void createDB() throws PersistenceException { try { this.manager.getDataBaseManagementDAO().createDB();
<|file_name|>ServerModpackRemoteInstallTask.java<|end_file_name|><|fim▁begin|>/* * Hello Minecraft! Launcher * Copyright (C) 2020 huangyuhui <[email protected]> and contributors *<|fim▁hole|> * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program. If not, see <https://www.gnu.org/licenses/>. */ package org.jackhuang.hmcl.mod.server; import com.google.gson.JsonParseException; import com.google.gson.reflect.TypeToken; import org.jackhuang.hmcl.download.DefaultDependencyManager; import org.jackhuang.hmcl.download.GameBuilder; import org.jackhuang.hmcl.game.DefaultGameRepository; import org.jackhuang.hmcl.mod.ModpackConfiguration; import org.jackhuang.hmcl.task.Task; import org.jackhuang.hmcl.util.gson.JsonUtils; import org.jackhuang.hmcl.util.io.FileUtils; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; public class ServerModpackRemoteInstallTask extends Task<Void> { private final String name; private final DefaultDependencyManager dependency; private final DefaultGameRepository repository; private final List<Task<?>> dependencies = new ArrayList<>(1); private final List<Task<?>> dependents = new ArrayList<>(1); private final ServerModpackManifest manifest; public ServerModpackRemoteInstallTask(DefaultDependencyManager dependencyManager, ServerModpackManifest manifest, String name) { this.name = name; this.dependency = dependencyManager; this.repository = dependencyManager.getGameRepository(); this.manifest = manifest; File json = repository.getModpackConfiguration(name); if (repository.hasVersion(name) && !json.exists()) throw new IllegalArgumentException("Version " + name + " already exists."); GameBuilder builder = dependencyManager.gameBuilder().name(name); for (ServerModpackManifest.Addon addon : manifest.getAddons()) { builder.version(addon.getId(), addon.getVersion()); } dependents.add(builder.buildAsync()); onDone().register(event -> { if (event.isFailed()) repository.removeVersionFromDisk(name); }); ModpackConfiguration<ServerModpackManifest> config = null; try { if (json.exists()) { config = JsonUtils.GSON.fromJson(FileUtils.readText(json), new TypeToken<ModpackConfiguration<ServerModpackManifest>>() { }.getType()); if (!MODPACK_TYPE.equals(config.getType())) throw new IllegalArgumentException("Version " + name + " is not a Server modpack. Cannot update this version."); } } catch (JsonParseException | IOException ignore) { } } @Override public List<Task<?>> getDependents() { return dependents; } @Override public List<Task<?>> getDependencies() { return dependencies; } @Override public void execute() throws Exception { dependencies.add(new ServerModpackCompletionTask(dependency, name, new ModpackConfiguration<>(manifest, MODPACK_TYPE, manifest.getName(), manifest.getVersion(), Collections.emptyList()))); } public static final String MODPACK_TYPE = "Server"; }<|fim▁end|>
* This program is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or
<|file_name|>zsyscall_darwin_arm.go<|end_file_name|><|fim▁begin|>// mksyscall.pl -l32 syscall_bsd.go syscall_darwin.go syscall_darwin_arm.go // MACHINE GENERATED BY THE COMMAND ABOVE; DO NOT EDIT package syscall import "unsafe"<|fim▁hole|> func getgroups(ngid int, gid *_Gid_t) (n int, err error) { r0, _, e1 := RawSyscall(SYS_GETGROUPS, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func setgroups(ngid int, gid *_Gid_t) (err error) { _, _, e1 := RawSyscall(SYS_SETGROUPS, uintptr(ngid), uintptr(unsafe.Pointer(gid)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func wait4(pid int, wstatus *_C_int, options int, rusage *Rusage) (wpid int, err error) { r0, _, e1 := Syscall6(SYS_WAIT4, uintptr(pid), uintptr(unsafe.Pointer(wstatus)), uintptr(options), uintptr(unsafe.Pointer(rusage)), 0, 0) wpid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func accept(s int, rsa *RawSockaddrAny, addrlen *_Socklen) (fd int, err error) { r0, _, e1 := Syscall(SYS_ACCEPT, uintptr(s), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func bind(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { _, _, e1 := Syscall(SYS_BIND, uintptr(s), uintptr(addr), uintptr(addrlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func connect(s int, addr unsafe.Pointer, addrlen _Socklen) (err error) { _, _, e1 := Syscall(SYS_CONNECT, uintptr(s), uintptr(addr), uintptr(addrlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func socket(domain int, typ int, proto int) (fd int, err error) { r0, _, e1 := RawSyscall(SYS_SOCKET, uintptr(domain), uintptr(typ), uintptr(proto)) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getsockopt(s int, level int, name int, val unsafe.Pointer, vallen *_Socklen) (err error) { _, _, e1 := Syscall6(SYS_GETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(unsafe.Pointer(vallen)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func setsockopt(s int, level int, name int, val unsafe.Pointer, vallen uintptr) (err error) { _, _, e1 := Syscall6(SYS_SETSOCKOPT, uintptr(s), uintptr(level), uintptr(name), uintptr(val), uintptr(vallen), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getpeername(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { _, _, e1 := RawSyscall(SYS_GETPEERNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func getsockname(fd int, rsa *RawSockaddrAny, addrlen *_Socklen) (err error) { _, _, e1 := RawSyscall(SYS_GETSOCKNAME, uintptr(fd), uintptr(unsafe.Pointer(rsa)), uintptr(unsafe.Pointer(addrlen))) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Shutdown(s int, how int) (err error) { _, _, e1 := Syscall(SYS_SHUTDOWN, uintptr(s), uintptr(how), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func socketpair(domain int, typ int, proto int, fd *[2]int32) (err error) { _, _, e1 := RawSyscall6(SYS_SOCKETPAIR, uintptr(domain), uintptr(typ), uintptr(proto), uintptr(unsafe.Pointer(fd)), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func recvfrom(fd int, p []byte, flags int, from *RawSockaddrAny, fromlen *_Socklen) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_RECVFROM, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(flags), uintptr(unsafe.Pointer(from)), uintptr(unsafe.Pointer(fromlen))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sendto(s int, buf []byte, flags int, to unsafe.Pointer, addrlen _Socklen) (err error) { var _p0 unsafe.Pointer if len(buf) > 0 { _p0 = unsafe.Pointer(&buf[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall6(SYS_SENDTO, uintptr(s), uintptr(_p0), uintptr(len(buf)), uintptr(flags), uintptr(to), uintptr(addrlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func recvmsg(s int, msg *Msghdr, flags int) (n int, err error) { r0, _, e1 := Syscall(SYS_RECVMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sendmsg(s int, msg *Msghdr, flags int) (n int, err error) { r0, _, e1 := Syscall(SYS_SENDMSG, uintptr(s), uintptr(unsafe.Pointer(msg)), uintptr(flags)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func kevent(kq int, change unsafe.Pointer, nchange int, event unsafe.Pointer, nevent int, timeout *Timespec) (n int, err error) { r0, _, e1 := Syscall6(SYS_KEVENT, uintptr(kq), uintptr(change), uintptr(nchange), uintptr(event), uintptr(nevent), uintptr(unsafe.Pointer(timeout))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func sysctl(mib []_C_int, old *byte, oldlen *uintptr, new *byte, newlen uintptr) (err error) { var _p0 unsafe.Pointer if len(mib) > 0 { _p0 = unsafe.Pointer(&mib[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall6(SYS___SYSCTL, uintptr(_p0), uintptr(len(mib)), uintptr(unsafe.Pointer(old)), uintptr(unsafe.Pointer(oldlen)), uintptr(unsafe.Pointer(new)), uintptr(newlen)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func utimes(path string, timeval *[2]Timeval) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UTIMES, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(timeval)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func futimes(fd int, timeval *[2]Timeval) (err error) { _, _, e1 := Syscall(SYS_FUTIMES, uintptr(fd), uintptr(unsafe.Pointer(timeval)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func fcntl(fd int, cmd int, arg int) (val int, err error) { r0, _, e1 := Syscall(SYS_FCNTL, uintptr(fd), uintptr(cmd), uintptr(arg)) val = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func ptrace(request int, pid int, addr uintptr, data uintptr) (err error) { _, _, e1 := Syscall6(SYS_PTRACE, uintptr(request), uintptr(pid), uintptr(addr), uintptr(data), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func pipe() (r int, w int, err error) { r0, r1, e1 := RawSyscall(SYS_PIPE, 0, 0, 0) r = int(r0) w = int(r1) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func kill(pid int, signum int, posix int) (err error) { _, _, e1 := Syscall(SYS_KILL, uintptr(pid), uintptr(signum), uintptr(posix)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Access(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_ACCESS, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Adjtime(delta *Timeval, olddelta *Timeval) (err error) { _, _, e1 := Syscall(SYS_ADJTIME, uintptr(unsafe.Pointer(delta)), uintptr(unsafe.Pointer(olddelta)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chdir(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chflags(path string, flags int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHFLAGS, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chmod(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHMOD, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chown(path string, uid int, gid int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Chroot(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_CHROOT, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Close(fd int) (err error) { _, _, e1 := Syscall(SYS_CLOSE, uintptr(fd), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Dup(fd int) (nfd int, err error) { r0, _, e1 := Syscall(SYS_DUP, uintptr(fd), 0, 0) nfd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Dup2(from int, to int) (err error) { _, _, e1 := Syscall(SYS_DUP2, uintptr(from), uintptr(to), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Exchangedata(path1 string, path2 string, options int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path1) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(path2) if err != nil { return } _, _, e1 := Syscall(SYS_EXCHANGEDATA, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), uintptr(options)) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Exit(code int) { Syscall(SYS_EXIT, uintptr(code), 0, 0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchdir(fd int) (err error) { _, _, e1 := Syscall(SYS_FCHDIR, uintptr(fd), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchflags(fd int, flags int) (err error) { _, _, e1 := Syscall(SYS_FCHFLAGS, uintptr(fd), uintptr(flags), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchmod(fd int, mode uint32) (err error) { _, _, e1 := Syscall(SYS_FCHMOD, uintptr(fd), uintptr(mode), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fchown(fd int, uid int, gid int) (err error) { _, _, e1 := Syscall(SYS_FCHOWN, uintptr(fd), uintptr(uid), uintptr(gid)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Flock(fd int, how int) (err error) { _, _, e1 := Syscall(SYS_FLOCK, uintptr(fd), uintptr(how), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fpathconf(fd int, name int) (val int, err error) { r0, _, e1 := Syscall(SYS_FPATHCONF, uintptr(fd), uintptr(name), 0) val = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fstat(fd int, stat *Stat_t) (err error) { _, _, e1 := Syscall(SYS_FSTAT64, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fstatfs(fd int, stat *Statfs_t) (err error) { _, _, e1 := Syscall(SYS_FSTATFS64, uintptr(fd), uintptr(unsafe.Pointer(stat)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Fsync(fd int) (err error) { _, _, e1 := Syscall(SYS_FSYNC, uintptr(fd), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Ftruncate(fd int, length int64) (err error) { _, _, e1 := Syscall(SYS_FTRUNCATE, uintptr(fd), uintptr(length), uintptr(length>>32)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getdirentries(fd int, buf []byte, basep *uintptr) (n int, err error) { var _p0 unsafe.Pointer if len(buf) > 0 { _p0 = unsafe.Pointer(&buf[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_GETDIRENTRIES64, uintptr(fd), uintptr(_p0), uintptr(len(buf)), uintptr(unsafe.Pointer(basep)), 0, 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getdtablesize() (size int) { r0, _, _ := Syscall(SYS_GETDTABLESIZE, 0, 0, 0) size = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getegid() (egid int) { r0, _, _ := RawSyscall(SYS_GETEGID, 0, 0, 0) egid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Geteuid() (uid int) { r0, _, _ := RawSyscall(SYS_GETEUID, 0, 0, 0) uid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getgid() (gid int) { r0, _, _ := RawSyscall(SYS_GETGID, 0, 0, 0) gid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpgid(pid int) (pgid int, err error) { r0, _, e1 := RawSyscall(SYS_GETPGID, uintptr(pid), 0, 0) pgid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpgrp() (pgrp int) { r0, _, _ := RawSyscall(SYS_GETPGRP, 0, 0, 0) pgrp = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpid() (pid int) { r0, _, _ := RawSyscall(SYS_GETPID, 0, 0, 0) pid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getppid() (ppid int) { r0, _, _ := RawSyscall(SYS_GETPPID, 0, 0, 0) ppid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getpriority(which int, who int) (prio int, err error) { r0, _, e1 := Syscall(SYS_GETPRIORITY, uintptr(which), uintptr(who), 0) prio = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getrlimit(which int, lim *Rlimit) (err error) { _, _, e1 := RawSyscall(SYS_GETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getrusage(who int, rusage *Rusage) (err error) { _, _, e1 := RawSyscall(SYS_GETRUSAGE, uintptr(who), uintptr(unsafe.Pointer(rusage)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getsid(pid int) (sid int, err error) { r0, _, e1 := RawSyscall(SYS_GETSID, uintptr(pid), 0, 0) sid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Getuid() (uid int) { r0, _, _ := RawSyscall(SYS_GETUID, 0, 0, 0) uid = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Issetugid() (tainted bool) { r0, _, _ := RawSyscall(SYS_ISSETUGID, 0, 0, 0) tainted = bool(r0 != 0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Kqueue() (fd int, err error) { r0, _, e1 := Syscall(SYS_KQUEUE, 0, 0, 0) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Lchown(path string, uid int, gid int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_LCHOWN, uintptr(unsafe.Pointer(_p0)), uintptr(uid), uintptr(gid)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Link(path string, link string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(link) if err != nil { return } _, _, e1 := Syscall(SYS_LINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Listen(s int, backlog int) (err error) { _, _, e1 := Syscall(SYS_LISTEN, uintptr(s), uintptr(backlog), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Lstat(path string, stat *Stat_t) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_LSTAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mkdir(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_MKDIR, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mkfifo(path string, mode uint32) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_MKFIFO, uintptr(unsafe.Pointer(_p0)), uintptr(mode), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mknod(path string, mode uint32, dev int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_MKNOD, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(dev)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mlock(b []byte) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { _p0 = unsafe.Pointer(&b[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall(SYS_MLOCK, uintptr(_p0), uintptr(len(b)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mlockall(flags int) (err error) { _, _, e1 := Syscall(SYS_MLOCKALL, uintptr(flags), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Mprotect(b []byte, prot int) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { _p0 = unsafe.Pointer(&b[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall(SYS_MPROTECT, uintptr(_p0), uintptr(len(b)), uintptr(prot)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Munlock(b []byte) (err error) { var _p0 unsafe.Pointer if len(b) > 0 { _p0 = unsafe.Pointer(&b[0]) } else { _p0 = unsafe.Pointer(&_zero) } _, _, e1 := Syscall(SYS_MUNLOCK, uintptr(_p0), uintptr(len(b)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Munlockall() (err error) { _, _, e1 := Syscall(SYS_MUNLOCKALL, 0, 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Open(path string, mode int, perm uint32) (fd int, err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } r0, _, e1 := Syscall(SYS_OPEN, uintptr(unsafe.Pointer(_p0)), uintptr(mode), uintptr(perm)) use(unsafe.Pointer(_p0)) fd = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Pathconf(path string, name int) (val int, err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } r0, _, e1 := Syscall(SYS_PATHCONF, uintptr(unsafe.Pointer(_p0)), uintptr(name), 0) use(unsafe.Pointer(_p0)) val = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Pread(fd int, p []byte, offset int64) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_PREAD, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Pwrite(fd int, p []byte, offset int64) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall6(SYS_PWRITE, uintptr(fd), uintptr(_p0), uintptr(len(p)), uintptr(offset), uintptr(offset>>32), 0) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func read(fd int, p []byte) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(_p0), uintptr(len(p))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Readlink(path string, buf []byte) (n int, err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } var _p1 unsafe.Pointer if len(buf) > 0 { _p1 = unsafe.Pointer(&buf[0]) } else { _p1 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall(SYS_READLINK, uintptr(unsafe.Pointer(_p0)), uintptr(_p1), uintptr(len(buf))) use(unsafe.Pointer(_p0)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Rename(from string, to string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(from) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(to) if err != nil { return } _, _, e1 := Syscall(SYS_RENAME, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Revoke(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_REVOKE, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Rmdir(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_RMDIR, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Seek(fd int, offset int64, whence int) (newoffset int64, err error) { r0, r1, e1 := Syscall6(SYS_LSEEK, uintptr(fd), uintptr(offset), uintptr(offset>>32), uintptr(whence), 0, 0) newoffset = int64(int64(r1)<<32 | int64(r0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Select(n int, r *FdSet, w *FdSet, e *FdSet, timeout *Timeval) (err error) { _, _, e1 := Syscall6(SYS_SELECT, uintptr(n), uintptr(unsafe.Pointer(r)), uintptr(unsafe.Pointer(w)), uintptr(unsafe.Pointer(e)), uintptr(unsafe.Pointer(timeout)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setegid(egid int) (err error) { _, _, e1 := Syscall(SYS_SETEGID, uintptr(egid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Seteuid(euid int) (err error) { _, _, e1 := RawSyscall(SYS_SETEUID, uintptr(euid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setgid(gid int) (err error) { _, _, e1 := RawSyscall(SYS_SETGID, uintptr(gid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setlogin(name string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(name) if err != nil { return } _, _, e1 := Syscall(SYS_SETLOGIN, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setpgid(pid int, pgid int) (err error) { _, _, e1 := RawSyscall(SYS_SETPGID, uintptr(pid), uintptr(pgid), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setpriority(which int, who int, prio int) (err error) { _, _, e1 := Syscall(SYS_SETPRIORITY, uintptr(which), uintptr(who), uintptr(prio)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setprivexec(flag int) (err error) { _, _, e1 := Syscall(SYS_SETPRIVEXEC, uintptr(flag), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setregid(rgid int, egid int) (err error) { _, _, e1 := RawSyscall(SYS_SETREGID, uintptr(rgid), uintptr(egid), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setreuid(ruid int, euid int) (err error) { _, _, e1 := RawSyscall(SYS_SETREUID, uintptr(ruid), uintptr(euid), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setrlimit(which int, lim *Rlimit) (err error) { _, _, e1 := RawSyscall(SYS_SETRLIMIT, uintptr(which), uintptr(unsafe.Pointer(lim)), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setsid() (pid int, err error) { r0, _, e1 := RawSyscall(SYS_SETSID, 0, 0, 0) pid = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Settimeofday(tp *Timeval) (err error) { _, _, e1 := RawSyscall(SYS_SETTIMEOFDAY, uintptr(unsafe.Pointer(tp)), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Setuid(uid int) (err error) { _, _, e1 := RawSyscall(SYS_SETUID, uintptr(uid), 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Stat(path string, stat *Stat_t) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_STAT64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Statfs(path string, stat *Statfs_t) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_STATFS64, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(stat)), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Symlink(path string, link string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } var _p1 *byte _p1, err = BytePtrFromString(link) if err != nil { return } _, _, e1 := Syscall(SYS_SYMLINK, uintptr(unsafe.Pointer(_p0)), uintptr(unsafe.Pointer(_p1)), 0) use(unsafe.Pointer(_p0)) use(unsafe.Pointer(_p1)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Sync() (err error) { _, _, e1 := Syscall(SYS_SYNC, 0, 0, 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Truncate(path string, length int64) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_TRUNCATE, uintptr(unsafe.Pointer(_p0)), uintptr(length), uintptr(length>>32)) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Umask(newmask int) (oldmask int) { r0, _, _ := Syscall(SYS_UMASK, uintptr(newmask), 0, 0) oldmask = int(r0) return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Undelete(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UNDELETE, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Unlink(path string) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UNLINK, uintptr(unsafe.Pointer(_p0)), 0, 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func Unmount(path string, flags int) (err error) { var _p0 *byte _p0, err = BytePtrFromString(path) if err != nil { return } _, _, e1 := Syscall(SYS_UNMOUNT, uintptr(unsafe.Pointer(_p0)), uintptr(flags), 0) use(unsafe.Pointer(_p0)) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func write(fd int, p []byte) (n int, err error) { var _p0 unsafe.Pointer if len(p) > 0 { _p0 = unsafe.Pointer(&p[0]) } else { _p0 = unsafe.Pointer(&_zero) } r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(_p0), uintptr(len(p))) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func mmap(addr uintptr, length uintptr, prot int, flag int, fd int, pos int64) (ret uintptr, err error) { r0, _, e1 := Syscall9(SYS_MMAP, uintptr(addr), uintptr(length), uintptr(prot), uintptr(flag), uintptr(fd), uintptr(pos), uintptr(pos>>32), 0, 0) ret = uintptr(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func munmap(addr uintptr, length uintptr) (err error) { _, _, e1 := Syscall(SYS_MUNMAP, uintptr(addr), uintptr(length), 0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func readlen(fd int, buf *byte, nbuf int) (n int, err error) { r0, _, e1 := Syscall(SYS_READ, uintptr(fd), uintptr(unsafe.Pointer(buf)), uintptr(nbuf)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func writelen(fd int, buf *byte, nbuf int) (n int, err error) { r0, _, e1 := Syscall(SYS_WRITE, uintptr(fd), uintptr(unsafe.Pointer(buf)), uintptr(nbuf)) n = int(r0) if e1 != 0 { err = errnoErr(e1) } return } // THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT func gettimeofday(tp *Timeval) (sec int32, usec int32, err error) { r0, r1, e1 := RawSyscall(SYS_GETTIMEOFDAY, uintptr(unsafe.Pointer(tp)), 0, 0) sec = int32(r0) usec = int32(r1) if e1 != 0 { err = errnoErr(e1) } return }<|fim▁end|>
// THIS FILE IS GENERATED BY THE COMMAND AT THE TOP; DO NOT EDIT
<|file_name|>attr.rs<|end_file_name|><|fim▁begin|>use syn::{ parse::{Parse, ParseStream}, LitStr, Token, }; use super::case::RenameRule; mod kw { syn::custom_keyword!(rename); syn::custom_keyword!(rename_all); } pub struct RenameAttr(LitStr); impl RenameAttr { pub fn into_inner(self) -> LitStr { self.0 } } impl Parse for RenameAttr { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _: kw::rename = input.parse()?; let _: Token![=] = input.parse()?; Ok(Self(input.parse()?)) } } pub struct RenameAllAttr(RenameRule); impl RenameAllAttr {<|fim▁hole|> self.0 } } impl Parse for RenameAllAttr { fn parse(input: ParseStream<'_>) -> syn::Result<Self> { let _: kw::rename_all = input.parse()?; let _: Token![=] = input.parse()?; let s: LitStr = input.parse()?; Ok(Self( s.value() .parse() .map_err(|_| syn::Error::new_spanned(s, "invalid value for rename_all"))?, )) } }<|fim▁end|>
pub fn into_inner(self) -> RenameRule {
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>use crossbeam_channel::{bounded, select}; use crossbeam_utils::thread; fn main() { let people = vec!["Anna", "Bob", "Cody", "Dave", "Eva"]; let (s, r) = bounded(1); // Make room for one unmatched send. // Either send my name into the channel or receive someone else's, whatever happens first. let seek = |name, s, r| { select! { recv(r) -> peer => println!("{} received a message from {}.", name, peer.unwrap()), send(s, name) -> _ => {}, // Wait for someone to receive my message. } }; thread::scope(|scope| { for name in people { let (s, r) = (s.clone(), r.clone()); scope.spawn(move |_| seek(name, s, r)); } })<|fim▁hole|> // Check if there is a pending send operation. if let Ok(name) = r.try_recv() { println!("No one received {}’s message.", name); } }<|fim▁end|>
.unwrap();
<|file_name|>Tephigrams_From_Radiosonde_Climatology_Onset.py<|end_file_name|><|fim▁begin|># Now make a simple example using the custom projection. import pdb import sys import os import pkg_resources pkg_resources.require('matplotlib==1.4.0') import datetime from dateutil.relativedelta import relativedelta import re import math from matplotlib.ticker import ScalarFormatter, MultipleLocator from matplotlib.collections import LineCollection import matplotlib.pyplot as plt from StringIO import StringIO import numpy as np from numpy import load # Exception handling, with line number and stuff import linecache import sys def PrintException(): exc_type, exc_obj, tb = sys.exc_info() f = tb.tb_frame lineno = tb.tb_lineno filename = f.f_code.co_filename linecache.checkcache(filename) line = linecache.getline(filename, lineno, f.f_globals) print 'EXCEPTION IN ({}, LINE {} "{}"): {}'.format(filename, lineno, line.strip(), exc_obj) import imp imp.load_source('SoundingRoutines', '/nfs/see-fs-01_users/eepdw/python_scripts/Tephigram/Sounding_Routines.py') imp.load_source('TephigramPlot', '/nfs/see-fs-01_users/eepdw/python_scripts/Tephigram/Tephigram_Functions.py') from TephigramPlot import * from SoundingRoutines import * imp.load_source('GeogFuncs', '/nfs/see-fs-01_users/eepdw/python_scripts/modules/GeogFunctions.py') from GeogFuncs import * pmin=200. station_list_cs=[42182, 43003, 43014, 42867, 43371, 43353, 43285, 43192, 43150, 42339, 40990, 40948] #station_list_cs=[43003] date_min=datetime.datetime(1960,5,1,0,0,0) date_max=datetime.datetime(2014,10,1,0,0,0) delta = relativedelta(weeks=+1) variable_list={'pressures': 0, 'temps':1, 'dewpoints':2, 'winddirs':3, 'windspeeds':4, 'pot_temp':5, 'sat_vap_pres':6, 'vap_press':7, 'rel_hum':8, 'wvmr':9, 'sp_hum':10, 'sat_temp':11, 'theta_e':12, 'theta_e_sat':13, 'theta_e_minus_theta_e_sat':14} variable_list_line={'lcl_temp': 0, 'lcl_vpt':1, 'pbl_pressure':2, 'surface_pressure':3, 'T_eq_0':4} def variable_name_index_match(variable, variable_list): for key, value in variable_list.iteritems(): # iter on both keys and values if key.startswith('%s' % variable) and key.endswith('%s' % variable): arr_index_var=value return arr_index_var # Parse the data for stat in station_list_cs: station_name,la,lo, st_height=StationInfoSearch(stat) load_file = load('/nfs/a90/eepdw/Data/Observations/Radiosonde_Numpy/Radiosonde_Cross_Section_' 'IND_SOUNDING_INTERP_MEAN_Climat_%s_%s_%s_%s.npz' % (date_min.strftime('%Y%m%d'), date_max.strftime('%Y%m%d'), delta, stat)) data=load_file['date_bin_mean_all_dates_one_station'] dates=load_file['dates_for_plotting'] for bin in range(data.shape[0]): try: p=data[bin,0,:]/100 T=data[bin,1,:]-273.15 Td=T-data[bin,2,:] h=data[bin,15,:] da=dates[bin] #print T #print p #print Td #pdb.set_trace() #u_wind,v_wind = u_v_winds(data[bin,3,:], data[bin,4,:]) u_wind,v_wind = data[bin,-2,:], data[bin,-1,:] # Create a new figure. The dimensions here give a good aspect ratio fig = plt.figure(figsize=(10, 8), frameon=False) #fig.patch.set_visible(False) tephigram_plot_height=0.85 tephigram_plot_bottom=.085 ax = fig.add_axes([.085,tephigram_plot_bottom,.65,tephigram_plot_height], projection='skewx', frameon=False, axisbg='w') ax.set_yscale('log') plt.grid(True) #pdb.set_trace() tmax=math.ceil(nanmax(T)/10)*10 tmin=math.floor(nanmin(Td[p>400])/10)*10 pmax=math.ceil(nanmax(p)/50)*50 P=linspace(pmax,pmin,37) w = array([0.0001,0.0004,0.001, 0.002, 0.004, 0.007, 0.01, 0.016, 0.024, 0.032, 0.064, 0.128]) ax.add_mixratio_isopleths(w,linspace(pmax, 700., 37),color='m',ls='-',alpha=.5,lw=0.5) ax.add_dry_adiabats(linspace(-40,40,9),P,color='k',ls='-',alpha=.5,lw=0.8) ax.add_moist_adiabats(linspace(-40,40,18),P,color='k',ls='--',alpha=.5,lw=0.8, do_labels=False) ax.other_housekeeping(pmax, pmin, 40,-40) wbax = fig.add_axes([0.75,tephigram_plot_bottom,0.12,tephigram_plot_height],frameon=False, sharey=ax, label='barbs') ax_text_box = fig.add_axes([0.85,0.085,.12,tephigram_plot_height], frameon=False, axisbg='w') # Plot the data using normal plotting functions, in this case using semilogy ax.semilogy(T, p, 'r', linewidth=2) ax.semilogy(Td, p, 'r',linewidth=2) # row_labels=( # 'SLAT', # 'SLON', # 'SELV', # 'SHOW', # 'LIFT', # 'LFTV', # 'SWET', # 'KINX', # 'CTOT', # 'VTOT', # 'TOTL', # 'CAPE', # 'CINS', # 'CAPV', # 'CINV', # 'LFCT', # 'LFCV', # 'BRCH', # 'BRCV', # 'LCLT', # 'LCLP', # 'MLTH', # 'MLMR', # 'THCK', # 'PWAT') # variable='pbl_pressure' # var_index = variable_name_index_match(variable, variable_list_line) # print load_file['date_bin_mean_all_dates_one_station_single'].shape # pbl_pressure = load_file['date_bin_mean_all_dates_one_station_single'][bin,0,var_index] # print pbl_pressure # EQLV, pp, lclp,lfcp, lclt, delta_z, CAPE, CIN=CapeCinPBLInput(p, T, Td, h, st_height, pbl_pressure/100) # print lclp # table_vals=( # #'%s' % station_name, # #'Climatology - Week beg. %s' % da, # '%s' % la, # '%s' % lo, # '%s' % st_height, # '%.1f' % ShowalterIndex(T, Td, p), # ['Showalter index', # '%.1f' % LiftedIndex(T, Td, p, h, st_height), # 'Lifted index', # '--', # 'LIFT computed using virtual temperature', # '--', # 'SWEAT index', # '%.1f' % KIndex(T, Td, p), # 'K index', # '%.1f' % CrossTotalsIndex(T, Td, p), # 'Cross totals index', # '%.1f' % VerticalTotalsIndex(T, p), # 'Vertical totals index', # '%.1f' % TotalTotalsIndex(T, Td, p), # 'Total totals index', # '%.1f' % CAPE, # 'CAPE', # '%.1f' % CIN, # 'CIN', # '--', # 'CAPE using virtual temperature', # '--', # 'CINS using virtual temperature', # '%.1f' % lfcp, # 'Level of free convection', # '--', # 'LFCT using virtual temperature', # '--' , # 'Bulk Richardson number', # '--', # 'Bulk richardson using CAPV', # '%.1f' % lclt, # 'Temp [K] of the Lifted Condensation Level', # '%.1f' % lclp , # 'Pres [hPa] of the Lifted Condensation Level', # '--', # 'Mean mixed layer potential temperature', <|fim▁hole|> # Wind barbs barbs_idx=np.logspace(np.log10(10),np.log10(max(len(u_wind))),num=32).astype(int) wbax.set_yscale('log') wbax.xaxis.set_ticks([],[]) wbax.yaxis.grid(True,ls='-',color='y',lw=0.5) wbax.set_xlim(-1.5,1.5) wbax.get_yaxis().set_visible(False) wbax.set_ylim(pmax+100,pmin) wbax.barbs((zeros(p.shape))[barbs_idx-1],p[barbs_idx-1], u_wind[barbs_idx-1], v_wind[barbs_idx-1]) # Disables the log-formatting that comes with semilogy ax.yaxis.set_major_formatter(ScalarFormatter()) ax.set_yticks(linspace(100,1000,10)) ax.set_ylim(pmax,pmin) ax.set_xlim(-40.,40.) ax.xaxis.set_ticks([],[]) ax_text_box.xaxis.set_visible(False) ax_text_box.yaxis.set_visible(False) for tick in wbax.yaxis.get_major_ticks(): # tick.label1On = False pass #wbax.get_yaxis().set_tick_params(size=0,color='y') # y_loc=1. # max_string_length = max([len(line) for line in row_labels]) # for t,r in zip(row_labels,table_vals): # label_rightjust=('{:>%i}' % max_string_length).format(t) # ax_text_box.text(0.5, y_loc, ' %s:' % (label_rightjust), size=8, horizontalalignment='right') # ax_text_box.text(0.5, y_loc, ' %s' % (r), size=8, horizontalalignment='left') # y_loc-=0.04 fig.text(.02,0.965, '%s %s' %(stat, station_name), size=12, horizontalalignment='left') fig.text(.02,0.035, 'Climatology - Week beg. %s ' %(da.strftime('%m-%d')), size=12, horizontalalignment='left') #plt.show() plt.savefig('/nfs/a90/eepdw/Figures/Radiosonde/Tephigrams/Weekly_Climatology/Weekly_Climatology_%s_%s_%s_Skew_T.png' % (station_name.replace('/','_').replace(' ', '_'), stat, da.strftime('%Y%m%d'))) plt.close() except Exception: print PrintException()<|fim▁end|>
# '--', # 'Mean mixed layer mixing ratio', # '--', # '1000 hPa to 500 hPa thickness', # '--') # 'Precipitable water [mm] for entire sounding']
<|file_name|>construct.rs<|end_file_name|><|fim▁begin|>/// This is now a no-op. It does not need to be called anymore.<|fim▁hole|><|fim▁end|>
pub fn construct() {}
<|file_name|>sas.py<|end_file_name|><|fim▁begin|>""" pygments.styles.sas ~~~~~~~~~~~~~~~~~~~ Style inspired by SAS' enhanced program editor. Note This is not meant to be a complete style. It's merely meant to mimic SAS' program editor syntax highlighting. :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Other, Whitespace, Generic class SasStyle(Style): """ Style inspired by SAS' enhanced program editor. Note This is not meant to be a complete style. It's merely meant to mimic SAS' program editor syntax highlighting. """ default_style = '' styles = { Whitespace: '#bbbbbb', Comment: 'italic #008800', String: '#800080', Number: 'bold #2e8b57', Other: 'bg:#ffffe0', Keyword: '#2c2cff', Keyword.Reserved: 'bold #353580', Keyword.Constant: 'bold', Name.Builtin: '#2c2cff', Name.Function: 'bold italic', Name.Variable: 'bold #2c2cff', Generic: '#2c2cff', Generic.Emph: '#008800',<|fim▁hole|><|fim▁end|>
Generic.Error: '#d30202', Error: 'bg:#e3d2d2 #a61717' }
<|file_name|>fail.ts<|end_file_name|><|fim▁begin|>// Error code 1 due to explicit type error /*var a: string = '', b: number = a + 2; console.log(b);*/ // Error code 4 due to accessing private member export class TestClass1 { public publicMember: PrivateInterface; constructor() { this.publicMember = {a: 1, b: 2}; console.log(this.publicMember.b); } } export class TestClass2 { constructor(t: TestClass1) { var m: PrivateInterface = t.publicMember; // Accessing private interface console.log(m.b);<|fim▁hole|> interface PrivateInterface { a: number; }<|fim▁end|>
} }
<|file_name|>hostDevServer.js<|end_file_name|><|fim▁begin|>const { createServer, plugins: { queryParser, serveStatic } } = require('restify'); const { join } = require('path'); const fetch = require('node-fetch'); const proxy = require('http-proxy-middleware'); const { PORT = 5000 } = process.env; const server = createServer(); server.use(queryParser()); server.get('/', async (req, res, next) => { if (!req.query.b) { const tokenRes = await fetch('https://webchat-mockbot.azurewebsites.net/directline/token', { headers: { origin: 'http://localhost:5000' }, method: 'POST' }); if (!tokenRes.ok) {<|fim▁hole|> } const { token } = await tokenRes.json(); return res.send(302, null, { location: `/?b=webchat-mockbot&t=${encodeURIComponent(token)}` }); } return serveStatic({ directory: join(__dirname, 'dist'), file: 'index.html' })(req, res, next); }); server.get('/embed/*/config', proxy({ changeOrigin: true, target: 'https://webchat.botframework.com/' })); server.listen(PORT, () => console.log(`Embed dev server is listening to port ${PORT}`));<|fim▁end|>
return res.send(500);
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved.<|fim▁hole|> */ import { BuilderContext, BuilderOutput } from '@angular-devkit/architect'; import { Observable } from 'rxjs'; import { Schema as NgPackagrBuilderOptions } from './schema'; /** * @experimental Direct usage of this function is considered experimental. */ export declare function execute(options: NgPackagrBuilderOptions, context: BuilderContext): Observable<BuilderOutput>; export { NgPackagrBuilderOptions }; declare const _default: import("../../../../architect/src/internal").Builder<Record<string, string> & NgPackagrBuilderOptions & import("../../../../core/src").JsonObject>; export default _default;<|fim▁end|>
* * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license
<|file_name|>display.py<|end_file_name|><|fim▁begin|># Copyright 2018 Flight Lab authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Utility library for displaying arbitrary content on a machine.""" import jinja2 import tempfile from common import pattern from utils import app class Display(pattern.Closable): """Class for displaying arbitrary content on a machine. The implementation assumes Chrome browser is available on given machine and use it to display generated html content in kiosk mode so it appears as an app and works on any platform. """ def __init__(self, chrome_path, *args, **kwargs): """Creates Display instance. Args: chrome_path: path to chrome executable. """ super(Display, self).__init__(*args, **kwargs) self._chrome_path = chrome_path self._temp_path = tempfile.gettempdir() self._index_file = tempfile.mktemp(suffix='.html') self._chrome_app = app.Application( name='Browser', bin_path=chrome_path, arguments=[ '--kiosk', self._index_file, '--new-window', '--incognito', '--noerrordialogs', '--user-data-dir={0}'.format(self._temp_path) ], restart_on_crash=True) def close(self): """Closes Chrome browser.""" self._chrome_app.stop() def show_message(self, message, template_path='./data/display_message.html'): """Shows a text message in full screen. Args: message: text to show. template_path: a html template to use. It should contain "{{ message }}". """ self._generate_page( template_path=template_path, kwargs={ 'message': message }) self._relaunch() def show_image(self, image_path, template_path='./data/display_image_default.html'): """Shows an image in full screen. Current implementation only displays the image at (0,0) and at its original size. If image is smaller than screen size, the rest area will be white. If image is larger than screen size, it will be clipped and scrollbar will appear. Args: image_path: a locally accessible path to image file. template_path: a html template to use. It should contain "{{ image_path }}". """<|fim▁hole|> }) self._relaunch() def _generate_page(self, template_path, kwargs={}): with open(template_path, 'r') as f: template = jinja2.Template(f.read()) with open(self._index_file, 'w') as f: f.write(template.render(**kwargs)) def _relaunch(self): self._chrome_app.stop() self._chrome_app.start()<|fim▁end|>
self._generate_page( template_path=template_path, kwargs={ 'image_path': image_path
<|file_name|>settings-overview-routing.module.ts<|end_file_name|><|fim▁begin|>import { NgModule } from '@angular/core'; import { RouterModule, Routes } from '@angular/router'; import { SettingsOverviewComponent } from './settings-overview.component'; const routes: Routes = [ { path: '', component: SettingsOverviewComponent } ]; @NgModule({ imports: [ RouterModule.forChild(routes) ],<|fim▁hole|> exports: [ RouterModule ] }) export class SettingsOverviewRoutingModule {}<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn main() { let number = 3; <|fim▁hole|> if number < 5 { println!("condition was true"); } else { println!("condition was false"); } let condition = true; let number = if condition { 5 } else { 6 //; or 'h' will not type-checked. need return same type }; println!("The value of number is {}", number); }<|fim▁end|>
<|file_name|>svg_to_svg_ordered_dithering.py<|end_file_name|><|fim▁begin|>"""Copyright (c) 2017 abhishek-sehgal954 This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """ import sys import os import re import subprocess import math import numpy as np import inkex import simpletransform from PIL import Image, ImageStat, ImageDraw import simplestyle inkex.localize() class ordered_dithering(inkex.Effect): def __init__(self): """Init the effect library and get options from gui.""" inkex.Effect.__init__(self) self.OptionParser.add_option("-t", "--width", action="store", type="int", dest="width", default=200, help="this variable will be used to resize the original selected image to a width of whatever \<|fim▁hole|> self.OptionParser.add_option("--temp_path", action="store", type="string", dest="temp_path", default="", help="") def effect(self): outfile = self.options.temp_path curfile = self.args[-1] self.exportPage(curfile,outfile) def draw_rectangle(self,(x, y), (l,b), color, parent, id_): style = {'stroke': 'none', 'stroke-width': '1', 'fill': color,"mix-blend-mode" : "multiply"} attribs = {'style': simplestyle.formatStyle(style), 'x': str(x), 'y': str(y), 'width': str(l), 'height':str(b)} if id_ is not None: attribs.update({'id': id_}) obj = inkex.etree.SubElement(parent, inkex.addNS('rect', 'svg'), attribs) return obj def draw_circle(self,(x, y), r, color, parent, id_): style = {'stroke': 'none', 'stroke-width': '1', 'fill': color,"mix-blend-mode" : "multiply"} attribs = {'style': simplestyle.formatStyle(style), 'cx': str(x), 'cy': str(y), 'r': str(r)} if id_ is not None: attribs.update({'id': id_}) obj = inkex.etree.SubElement(parent, inkex.addNS('circle', 'svg'), attribs) return obj def draw_ellipse(self,(x, y), (r1,r2), color, parent, id_,transform): style = {'stroke': 'none', 'stroke-width': '1', 'fill': color,"mix-blend-mode" : "multiply"} if(transform == 1.5): attribs = {'style': simplestyle.formatStyle(style), 'cx': str(x), 'cy': str(y), 'rx': str(r1), 'ry': str(r2)} elif(transform == 3): attribs = {'style': simplestyle.formatStyle(style), 'cx': str(x), 'cy': str(y), 'rx': str(r1), 'ry': str(r2)} else: attribs = {'style': simplestyle.formatStyle(style), 'cx': str(x), 'cy': str(y), 'rx': str(r1), 'ry': str(r2)} if id_ is not None: attribs.update({'id': id_}) obj = inkex.etree.SubElement(parent, inkex.addNS('ellipse', 'svg'), attribs) return obj def draw_svg(self,output,parent): startu = 0 endu = 0 for i in range(len(output)): for j in range(len(output[i])): if (output[i][j]==0): self.draw_circle((int((startu+startu+1)/2),int((endu+endu+1)/2)),1,'black',parent,'id') #dwg.add(dwg.circle((int((startu+startu+1)/2),int((endu+endu+1)/2)),1,fill='black')) startu = startu+2 endu = endu+2 startu = 0 #dwg.save() def intensity(self,arr): # calcluates intensity of a pixel from 0 to 9 mini = 999 maxi = 0 for i in range(len(arr)): for j in range(len(arr[0])): maxi = max(arr[i][j],maxi) mini = min(arr[i][j],mini) level = float(float(maxi-mini)/float(10)); brr = [[0]*len(arr[0]) for i in range(len(arr))] for i in range(10): l1 = mini+level*i l2 = l1+level for j in range(len(arr)): for k in range(len(arr[0])): if(arr[j][k] >= l1 and arr[j][k] <= l2): brr[j][k]=i return brr def order_dither(self,image): arr = np.asarray(image) brr = self.intensity(arr) crr = [[8, 3, 4], [6, 1, 2], [7, 5, 9]] drr = np.zeros((len(arr),len(arr[0]))) for i in range(len(arr)): for j in range(len(arr[0])): if(brr[i][j] > crr[i%3][j%3]): drr[i][j] = 255 else: drr[i][j] = 0 return drr def dithering(self,node,image): if image: basewidth = self.options.width wpercent = (basewidth/float(image.size[0])) hsize = int((float(image.size[1])*float(wpercent))) image = image.resize((basewidth,hsize), Image.ANTIALIAS) (width, height) = image.size nodeParent = node.getparent() nodeIndex = nodeParent.index(node) pixel2svg_group = inkex.etree.Element(inkex.addNS('g', 'svg')) pixel2svg_group.set('id', "%s_pixel2svg" % node.get('id')) nodeParent.insert(nodeIndex+1, pixel2svg_group) nodeParent.remove(node) image = image.convert("RGBA") pixel_data = image.load() if image.mode == "RGBA": for y in xrange(image.size[1]): for x in xrange(image.size[0]): if pixel_data[x, y][3] < 255: pixel_data[x, y] = (255, 255, 255, 255) image.thumbnail([image.size[0], image.size[1]], Image.ANTIALIAS) image = image.convert('L') self.draw_rectangle((0,0),(width,height),'white',pixel2svg_group,'id') output = self.order_dither(image) self.draw_svg(output,pixel2svg_group) else: inkex.errormsg(_("Bailing out: No supported image file or data found")) sys.exit(1) def exportPage(self, curfile, outfile): command = "%s %s --export-png %s" %(self.options.inkscape_path,curfile,outfile) p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) return_code = p.wait() f = p.stdout err = p.stderr img = Image.open(outfile) if (self.options.ids): for node in self.selected.itervalues(): found_image = True self.dithering(node,img) def main(): e = ordered_dithering() e.affect() exit() if __name__=="__main__": main()<|fim▁end|>
you enter and height proportional to the new width, thus maintaining the aspect ratio") self.OptionParser.add_option("--inkscape_path", action="store", type="string", dest="inkscape_path", default="", help="")
<|file_name|>errors.rs<|end_file_name|><|fim▁begin|>use std::error; use std::fmt; use std::io; use parsed_class::FieldRef; #[derive(Debug)] pub enum ClassLoadingError { NoClassDefFound(Result<String, io::Error>), ClassFormatError(String), UnsupportedClassVersion, NoSuchFieldError(FieldRef), #[allow(dead_code)] IncompatibleClassChange, #[allow(dead_code)] ClassCircularity, } impl fmt::Display for ClassLoadingError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {<|fim▁hole|> "NoClassDefFound: {}", err.as_ref().map(|o| o.to_owned()).map_err(|e| format!("{}", e)).unwrap_or_else(|e| e)) } ClassLoadingError::ClassFormatError(ref err) => write!(f, "ClassFormatError: {}", err), ClassLoadingError::NoSuchFieldError(ref field) => write!(f, "NoSuchField: {:?}", field), ClassLoadingError::UnsupportedClassVersion => write!(f, "class version not supported"), ClassLoadingError::IncompatibleClassChange => write!(f, "IncompatibleClassChange"), ClassLoadingError::ClassCircularity => write!(f, "ClassCircularity"), } } } impl error::Error for ClassLoadingError { fn description(&self) -> &str { match *self { ClassLoadingError::NoClassDefFound(..) => "NoClassDefFound", ClassLoadingError::ClassFormatError(..) => "ClassFormatError", ClassLoadingError::NoSuchFieldError(..) => "NoSuchFieldError", ClassLoadingError::UnsupportedClassVersion => "UnsupportedClassVersion", ClassLoadingError::IncompatibleClassChange => "IncompatibleClassChange", ClassLoadingError::ClassCircularity => "ClassCircularity", } } fn cause(&self) -> Option<&error::Error> { match *self { ClassLoadingError::NoClassDefFound(ref err) => err.as_ref().err().map(|e| e as &error::Error), _ => None, } } }<|fim▁end|>
match *self { ClassLoadingError::NoClassDefFound(ref err) => { write!(f,
<|file_name|>test_website.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals import frappe, unittest from werkzeug.wrappers import Request from werkzeug.test import EnvironBuilder from frappe.website import render def set_request(**kwargs): builder = EnvironBuilder(**kwargs) frappe.local.request = Request(builder.get_environ()) class TestWebsite(unittest.TestCase): def test_page_load(self): set_request(method='POST', path='login') response = render.render() <|fim▁hole|> html = response.get_data() self.assertTrue('/* login-css */' in html) self.assertTrue('// login.js' in html) self.assertTrue('<!-- login.html -->' in html)<|fim▁end|>
self.assertTrue(response.status_code, 200)
<|file_name|>image_synthese_facette_image.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ @file @brief image et synthèse """ from .image_synthese_facette import Rectangle from .image_synthese_base import Rayon, Couleur from .image_synthese_sphere import Sphere class RectangleImage(Rectangle): """définit un rectangle contenant un portrait""" def __init__(self, a, b, c, d, nom_image, pygame, invertx=False): """initialisation, si d == None, d est calculé comme étant le symétrique de b par rapport au milieu du segment [ac], la texture est une image, si invertx == True, inverse l'image selon l'axe des x""" Rectangle.__init__(self, a, b, c, d, Couleur(0, 0, 0)) self.image = pygame.image.load(nom_image) self.nom_image = nom_image self.invertx = invertx def __str__(self): """affichage""" s = "rectangle image --- a : " + str(self.a) s += " b : " + str(self.b) s += " c : " + str(self.c) s += " d : " + str(self.d) s += " image : " + self.nom_image return s def couleur_point(self, p): """retourne la couleur au point de coordonnée p""" ap = p - self.a ab = self.b - self.a ad = self.d - self.a abn = ab.norme2() adn = ad.norme2() x = ab.scalaire(ap) / abn y = ad.scalaire(ap) / adn sx, sy = self.image.get_size() k, li = int(x * sx), int(y * sy) k = min(k, sx - 1) li = min(li, sy - 1) li = sy - li - 1 if not self.invertx: c = self.image.get_at((k, li)) else: c = self.image.get_at((sx - k - 1, li)) cl = Couleur(float(c[0]) / 255, float(c[1]) / 255, float(c[2]) / 255) return cl class SphereReflet (Sphere): """implémente une sphère avec un reflet""" def __init__(self, centre, rayon, couleur, reflet): """initialisation, reflet est un coefficient de réflexion""" Sphere.__init__(self, centre, rayon, couleur) self.reflet = reflet def __str__(self): """affichage""" s = "sphere reflet --- centre : " + str(self.centre) s += " rayon : " + str(self.rayon) s += " couleur : " + str(self.couleur) return s def rayon_reflechi(self, rayon, p):<|fim▁hole|> n = self.normale(p, rayon) n = n.renorme() y = n.scalaire(rayon.direction) d = rayon.direction - n * y * 2 r = Rayon(p, d, rayon.pixel, rayon.couleur * self.reflet) return r<|fim▁end|>
"""retourne le rayon réfléchi au point p de la surface, si aucune, retourne None""" if p == rayon.origine: return None
<|file_name|>vfpsingle.cpp<|end_file_name|><|fim▁begin|>/* vfp/vfpsingle.c - ARM VFPv3 emulation unit - SoftFloat single instruction Copyright (C) 2003 Skyeye Develop Group for help please send mail to <[email protected]> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA */ /* * This code is derived in part from : * - Android kernel * - John R. Housers softfloat library, which * carries the following notice: * * =========================================================================== * This C source file is part of the SoftFloat IEC/IEEE Floating-point * Arithmetic Package, Release 2. * * Written by John R. Hauser. This work was made possible in part by the * International Computer Science Institute, located at Suite 600, 1947 Center * Street, Berkeley, California 94704. Funding was partially provided by the * National Science Foundation under grant MIP-9311980. The original version * of this code was written as part of a project to build a fixed-point vector * processor in collaboration with the University of California at Berkeley, * overseen by Profs. Nelson Morgan and John Wawrzynek. More information * is available through the web page `http://HTTP.CS.Berkeley.EDU/~jhauser/ * arithmetic/softfloat.html'. * * THIS SOFTWARE IS DISTRIBUTED AS IS, FOR FREE. Although reasonable effort * has been made to avoid it, THIS SOFTWARE MAY CONTAIN FAULTS THAT WILL AT * TIMES RESULT IN INCORRECT BEHAVIOR. USE OF THIS SOFTWARE IS RESTRICTED TO * PERSONS AND ORGANIZATIONS WHO CAN AND WILL TAKE FULL RESPONSIBILITY FOR ANY * AND ALL LOSSES, COSTS, OR OTHER PROBLEMS ARISING FROM ITS USE. * * Derivative works are acceptable, even for commercial purposes, so long as * (1) they include prominent notice that the work is derivative, and (2) they * include prominent notice akin to these three paragraphs for those parts of * this code that are retained. * =========================================================================== */ #include <algorithm> #include <cinttypes> #include "common/common_funcs.h" #include "common/common_types.h" #include "common/logging/log.h" #include "core/arm/skyeye_common/vfp/asm_vfp.h" #include "core/arm/skyeye_common/vfp/vfp.h" #include "core/arm/skyeye_common/vfp/vfp_helper.h" static struct vfp_single vfp_single_default_qnan = { 255, 0, VFP_SINGLE_SIGNIFICAND_QNAN, }; static void vfp_single_dump(const char* str, struct vfp_single* s) { LOG_TRACE(Core_ARM11, "%s: sign=%d exponent=%d significand=%08x", str, s->sign != 0, s->exponent, s->significand); } static void vfp_single_normalise_denormal(struct vfp_single* vs) { int bits = 31 - fls(vs->significand); vfp_single_dump("normalise_denormal: in", vs); if (bits) { vs->exponent -= bits - 1; vs->significand <<= bits; } vfp_single_dump("normalise_denormal: out", vs); } u32 vfp_single_normaliseround(ARMul_State* state, int sd, struct vfp_single* vs, u32 fpscr, const char* func) { u32 significand, incr, rmode; int exponent, shift, underflow; u32 exceptions = 0; vfp_single_dump("pack: in", vs); /* * Infinities and NaNs are a special case. */ if (vs->exponent == 255 && (vs->significand == 0 || exceptions)) goto pack; /* * Special-case zero. */ if (vs->significand == 0) { vs->exponent = 0; goto pack; } exponent = vs->exponent; significand = vs->significand; /* * Normalise first. Note that we shift the significand up to * bit 31, so we have VFP_SINGLE_LOW_BITS + 1 below the least * significant bit. */ shift = 32 - fls(significand); if (shift < 32 && shift) { exponent -= shift; significand <<= shift; } #if 1 vs->exponent = exponent; vs->significand = significand; vfp_single_dump("pack: normalised", vs); #endif /* * Tiny number? */ underflow = exponent < 0; if (underflow) { significand = vfp_shiftright32jamming(significand, -exponent); exponent = 0; #if 1 vs->exponent = exponent; vs->significand = significand; vfp_single_dump("pack: tiny number", vs); #endif if (!(significand & ((1 << (VFP_SINGLE_LOW_BITS + 1)) - 1))) underflow = 0; } /* * Select rounding increment. */ incr = 0; rmode = fpscr & FPSCR_RMODE_MASK; if (rmode == FPSCR_ROUND_NEAREST) { incr = 1 << VFP_SINGLE_LOW_BITS; if ((significand & (1 << (VFP_SINGLE_LOW_BITS + 1))) == 0) incr -= 1; } else if (rmode == FPSCR_ROUND_TOZERO) { incr = 0; } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vs->sign != 0)) incr = (1 << (VFP_SINGLE_LOW_BITS + 1)) - 1; LOG_TRACE(Core_ARM11, "rounding increment = 0x%08x", incr); /* * Is our rounding going to overflow? */ if ((significand + incr) < significand) { exponent += 1; significand = (significand >> 1) | (significand & 1); incr >>= 1; #if 1 vs->exponent = exponent; vs->significand = significand; vfp_single_dump("pack: overflow", vs); #endif } /* * If any of the low bits (which will be shifted out of the * number) are non-zero, the result is inexact. */ if (significand & ((1 << (VFP_SINGLE_LOW_BITS + 1)) - 1)) exceptions |= FPSCR_IXC; /* * Do our rounding. */ significand += incr; /* * Infinity? */ if (exponent >= 254) { exceptions |= FPSCR_OFC | FPSCR_IXC; if (incr == 0) { vs->exponent = 253; vs->significand = 0x7fffffff; } else { vs->exponent = 255; /* infinity */ vs->significand = 0; } } else { if (significand >> (VFP_SINGLE_LOW_BITS + 1) == 0) exponent = 0; if (exponent || significand > 0x80000000) underflow = 0; if (underflow) exceptions |= FPSCR_UFC; vs->exponent = exponent; vs->significand = significand >> 1; } pack: vfp_single_dump("pack: final", vs); { s32 d = vfp_single_pack(vs); LOG_TRACE(Core_ARM11, "%s: d(s%d)=%08x exceptions=%08x", func, sd, d, exceptions); vfp_put_float(state, d, sd); } return exceptions; } /* * Propagate the NaN, setting exceptions if it is signalling. * 'n' is always a NaN. 'm' may be a number, NaN or infinity. */ static u32 vfp_propagate_nan(struct vfp_single* vsd, struct vfp_single* vsn, struct vfp_single* vsm, u32 fpscr) { struct vfp_single* nan; int tn, tm = 0; tn = vfp_single_type(vsn); if (vsm) tm = vfp_single_type(vsm); if (fpscr & FPSCR_DEFAULT_NAN) /* * Default NaN mode - always returns a quiet NaN */ nan = &vfp_single_default_qnan; else { /* * Contemporary mode - select the first signalling * NAN, or if neither are signalling, the first * quiet NAN. */ if (tn == VFP_SNAN || (tm != VFP_SNAN && tn == VFP_QNAN)) nan = vsn; else nan = vsm; /* * Make the NaN quiet. */ nan->significand |= VFP_SINGLE_SIGNIFICAND_QNAN; } *vsd = *nan; /* * If one was a signalling NAN, raise invalid operation. */ return tn == VFP_SNAN || tm == VFP_SNAN ? FPSCR_IOC : VFP_NAN_FLAG; } /* * Extended operations */ static u32 vfp_single_fabs(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { vfp_put_float(state, vfp_single_packed_abs(m), sd); return 0; } static u32 vfp_single_fcpy(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { vfp_put_float(state, m, sd); return 0; } static u32 vfp_single_fneg(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { vfp_put_float(state, vfp_single_packed_negate(m), sd); return 0; } static const u16 sqrt_oddadjust[] = { 0x0004, 0x0022, 0x005d, 0x00b1, 0x011d, 0x019f, 0x0236, 0x02e0, 0x039c, 0x0468, 0x0545, 0x0631, 0x072b, 0x0832, 0x0946, 0x0a67, }; static const u16 sqrt_evenadjust[] = { 0x0a2d, 0x08af, 0x075a, 0x0629, 0x051a, 0x0429, 0x0356, 0x029e, 0x0200, 0x0179, 0x0109, 0x00af, 0x0068, 0x0034, 0x0012, 0x0002, }; u32 vfp_estimate_sqrt_significand(u32 exponent, u32 significand) { int index; u32 z, a; if ((significand & 0xc0000000) != 0x40000000) { LOG_TRACE(Core_ARM11, "invalid significand"); } a = significand << 1; index = (a >> 27) & 15; if (exponent & 1) { z = 0x4000 + (a >> 17) - sqrt_oddadjust[index]; z = ((a / z) << 14) + (z << 15); a >>= 1; } else { z = 0x8000 + (a >> 17) - sqrt_evenadjust[index]; z = a / z + z; z = (z >= 0x20000) ? 0xffff8000 : (z << 15); if (z <= a) return (s32)a >> 1; } { u64 v = (u64)a << 31; do_div(v, z); return (u32)(v + (z >> 1)); } } static u32 vfp_single_fsqrt(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { struct vfp_single vsm, vsd, *vsp; int ret, tm; u32 exceptions = 0; exceptions |= vfp_single_unpack(&vsm, m, fpscr); tm = vfp_single_type(&vsm); if (tm & (VFP_NAN | VFP_INFINITY)) { vsp = &vsd; if (tm & VFP_NAN) ret = vfp_propagate_nan(vsp, &vsm, nullptr, fpscr); else if (vsm.sign == 0) { sqrt_copy: vsp = &vsm; ret = 0; } else { sqrt_invalid: vsp = &vfp_single_default_qnan; ret = FPSCR_IOC; } vfp_put_float(state, vfp_single_pack(vsp), sd); return ret; } /* * sqrt(+/- 0) == +/- 0 */ if (tm & VFP_ZERO) goto sqrt_copy; /* * Normalise a denormalised number */ if (tm & VFP_DENORMAL) vfp_single_normalise_denormal(&vsm); /* * sqrt(<0) = invalid */ if (vsm.sign) goto sqrt_invalid; vfp_single_dump("sqrt", &vsm); /* * Estimate the square root. */ vsd.sign = 0; vsd.exponent = ((vsm.exponent - 127) >> 1) + 127; vsd.significand = vfp_estimate_sqrt_significand(vsm.exponent, vsm.significand) + 2; vfp_single_dump("sqrt estimate", &vsd); /* * And now adjust. */ if ((vsd.significand & VFP_SINGLE_LOW_BITS_MASK) <= 5) { if (vsd.significand < 2) { vsd.significand = 0xffffffff; } else { u64 term; s64 rem; vsm.significand <<= static_cast<u32>((vsm.exponent & 1) == 0); term = (u64)vsd.significand * vsd.significand; rem = ((u64)vsm.significand << 32) - term; LOG_TRACE(Core_ARM11, "term=%016" PRIx64 "rem=%016" PRIx64, term, rem); while (rem < 0) { vsd.significand -= 1; rem += ((u64)vsd.significand << 1) | 1; } vsd.significand |= rem != 0; } } vsd.significand = vfp_shiftright32jamming(vsd.significand, 1); exceptions |= vfp_single_normaliseround(state, sd, &vsd, fpscr, "fsqrt"); return exceptions; } /* * Equal := ZC * Less than := N * Greater than := C * Unordered := CV */ static u32 vfp_compare(ARMul_State* state, int sd, int signal_on_qnan, s32 m, u32 fpscr) { s32 d; u32 ret = 0; d = vfp_get_float(state, sd); if (vfp_single_packed_exponent(m) == 255 && vfp_single_packed_mantissa(m)) { ret |= FPSCR_CFLAG | FPSCR_VFLAG; if (signal_on_qnan || !(vfp_single_packed_mantissa(m) & (1 << (VFP_SINGLE_MANTISSA_BITS - 1)))) /* * Signalling NaN, or signalling on quiet NaN */ ret |= FPSCR_IOC; } if (vfp_single_packed_exponent(d) == 255 && vfp_single_packed_mantissa(d)) { ret |= FPSCR_CFLAG | FPSCR_VFLAG; if (signal_on_qnan || !(vfp_single_packed_mantissa(d) & (1 << (VFP_SINGLE_MANTISSA_BITS - 1)))) /* * Signalling NaN, or signalling on quiet NaN */ ret |= FPSCR_IOC; } if (ret == 0) { if (d == m || vfp_single_packed_abs(d | m) == 0) { /* * equal */ ret |= FPSCR_ZFLAG | FPSCR_CFLAG; } else if (vfp_single_packed_sign(d ^ m)) { /* * different signs */ if (vfp_single_packed_sign(d)) /* * d is negative, so d < m */ ret |= FPSCR_NFLAG; else /* * d is positive, so d > m */ ret |= FPSCR_CFLAG; } else if ((vfp_single_packed_sign(d) != 0) ^ (d < m)) { /* * d < m */ ret |= FPSCR_NFLAG; } else if ((vfp_single_packed_sign(d) != 0) ^ (d > m)) { /* * d > m */ ret |= FPSCR_CFLAG; } } return ret; } static u32 vfp_single_fcmp(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { return vfp_compare(state, sd, 0, m, fpscr); } static u32 vfp_single_fcmpe(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { return vfp_compare(state, sd, 1, m, fpscr); } static u32 vfp_single_fcmpz(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { return vfp_compare(state, sd, 0, 0, fpscr); } static u32 vfp_single_fcmpez(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { return vfp_compare(state, sd, 1, 0, fpscr); } static u32 vfp_single_fcvtd(ARMul_State* state, int dd, int unused, s32 m, u32 fpscr) { struct vfp_single vsm; struct vfp_double vdd; int tm; u32 exceptions = 0; exceptions |= vfp_single_unpack(&vsm, m, fpscr); tm = vfp_single_type(&vsm); /* * If we have a signalling NaN, signal invalid operation. */ if (tm == VFP_SNAN) exceptions |= FPSCR_IOC; if (tm & VFP_DENORMAL) vfp_single_normalise_denormal(&vsm);<|fim▁hole|> vdd.sign = vsm.sign; vdd.significand = (u64)vsm.significand << 32; /* * If we have an infinity or NaN, the exponent must be 2047. */ if (tm & (VFP_INFINITY | VFP_NAN)) { vdd.exponent = 2047; if (tm == VFP_QNAN) vdd.significand |= VFP_DOUBLE_SIGNIFICAND_QNAN; goto pack_nan; } else if (tm & VFP_ZERO) vdd.exponent = 0; else vdd.exponent = vsm.exponent + (1023 - 127); exceptions |= vfp_double_normaliseround(state, dd, &vdd, fpscr, "fcvtd"); return exceptions; pack_nan: vfp_put_double(state, vfp_double_pack(&vdd), dd); return exceptions; } static u32 vfp_single_fuito(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { struct vfp_single vs; u32 exceptions = 0; vs.sign = 0; vs.exponent = 127 + 31 - 1; vs.significand = (u32)m; exceptions |= vfp_single_normaliseround(state, sd, &vs, fpscr, "fuito"); return exceptions; } static u32 vfp_single_fsito(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { struct vfp_single vs; u32 exceptions = 0; vs.sign = (m & 0x80000000) >> 16; vs.exponent = 127 + 31 - 1; vs.significand = vs.sign ? -m : m; exceptions |= vfp_single_normaliseround(state, sd, &vs, fpscr, "fsito"); return exceptions; } static u32 vfp_single_ftoui(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { struct vfp_single vsm; u32 d, exceptions = 0; int rmode = fpscr & FPSCR_RMODE_MASK; int tm; exceptions |= vfp_single_unpack(&vsm, m, fpscr); vfp_single_dump("VSM", &vsm); /* * Do we have a denormalised number? */ tm = vfp_single_type(&vsm); if (tm & VFP_DENORMAL) exceptions |= FPSCR_IDC; if (tm & VFP_NAN) vsm.sign = 1; if (vsm.exponent >= 127 + 32) { d = vsm.sign ? 0 : 0xffffffff; exceptions |= FPSCR_IOC; } else if (vsm.exponent >= 127) { int shift = 127 + 31 - vsm.exponent; u32 rem, incr = 0; /* * 2^0 <= m < 2^32-2^8 */ d = (vsm.significand << 1) >> shift; if (shift > 0) { rem = (vsm.significand << 1) << (32 - shift); } else { rem = 0; } if (rmode == FPSCR_ROUND_NEAREST) { incr = 0x80000000; if ((d & 1) == 0) incr -= 1; } else if (rmode == FPSCR_ROUND_TOZERO) { incr = 0; } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vsm.sign != 0)) { incr = ~0; } if ((rem + incr) < rem) { if (d < 0xffffffff) d += 1; else exceptions |= FPSCR_IOC; } if (d && vsm.sign) { d = 0; exceptions |= FPSCR_IOC; } else if (rem) exceptions |= FPSCR_IXC; } else { d = 0; if (vsm.exponent | vsm.significand) { if (rmode == FPSCR_ROUND_NEAREST) { if (vsm.exponent >= 126) { d = vsm.sign ? 0 : 1; exceptions |= vsm.sign ? FPSCR_IOC : FPSCR_IXC; } else { exceptions |= FPSCR_IXC; } } else if (rmode == FPSCR_ROUND_PLUSINF && vsm.sign == 0) { d = 1; exceptions |= FPSCR_IXC; } else if (rmode == FPSCR_ROUND_MINUSINF) { exceptions |= vsm.sign ? FPSCR_IOC : FPSCR_IXC; } else { exceptions |= FPSCR_IXC; } } } LOG_TRACE(Core_ARM11, "ftoui: d(s%d)=%08x exceptions=%08x", sd, d, exceptions); vfp_put_float(state, d, sd); return exceptions; } static u32 vfp_single_ftouiz(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { return vfp_single_ftoui(state, sd, unused, m, (fpscr & ~FPSCR_RMODE_MASK) | FPSCR_ROUND_TOZERO); } static u32 vfp_single_ftosi(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { struct vfp_single vsm; u32 d, exceptions = 0; int rmode = fpscr & FPSCR_RMODE_MASK; int tm; exceptions |= vfp_single_unpack(&vsm, m, fpscr); vfp_single_dump("VSM", &vsm); /* * Do we have a denormalised number? */ tm = vfp_single_type(&vsm); if (vfp_single_type(&vsm) & VFP_DENORMAL) exceptions |= FPSCR_IDC; if (tm & VFP_NAN) { d = 0; exceptions |= FPSCR_IOC; } else if (vsm.exponent >= 127 + 31) { /* * m >= 2^31-2^7: invalid */ d = 0x7fffffff; if (vsm.sign) d = ~d; exceptions |= FPSCR_IOC; } else if (vsm.exponent >= 127) { int shift = 127 + 31 - vsm.exponent; u32 rem, incr = 0; /* 2^0 <= m <= 2^31-2^7 */ d = (vsm.significand << 1) >> shift; rem = (vsm.significand << 1) << (32 - shift); if (rmode == FPSCR_ROUND_NEAREST) { incr = 0x80000000; if ((d & 1) == 0) incr -= 1; } else if (rmode == FPSCR_ROUND_TOZERO) { incr = 0; } else if ((rmode == FPSCR_ROUND_PLUSINF) ^ (vsm.sign != 0)) { incr = ~0; } if ((rem + incr) < rem && d < 0xffffffff) d += 1; if (d > (0x7fffffffu + (vsm.sign != 0))) { d = (0x7fffffffu + (vsm.sign != 0)); exceptions |= FPSCR_IOC; } else if (rem) exceptions |= FPSCR_IXC; if (vsm.sign) d = (~d + 1); } else { d = 0; if (vsm.exponent | vsm.significand) { exceptions |= FPSCR_IXC; if (rmode == FPSCR_ROUND_NEAREST) { if (vsm.exponent >= 126) d = vsm.sign ? 0xffffffff : 1; } else if (rmode == FPSCR_ROUND_PLUSINF && vsm.sign == 0) { d = 1; } else if (rmode == FPSCR_ROUND_MINUSINF && vsm.sign) { d = 0xffffffff; } } } LOG_TRACE(Core_ARM11, "ftosi: d(s%d)=%08x exceptions=%08x", sd, d, exceptions); vfp_put_float(state, (s32)d, sd); return exceptions; } static u32 vfp_single_ftosiz(ARMul_State* state, int sd, int unused, s32 m, u32 fpscr) { return vfp_single_ftosi(state, sd, unused, m, (fpscr & ~FPSCR_RMODE_MASK) | FPSCR_ROUND_TOZERO); } static struct op fops_ext[] = { {vfp_single_fcpy, 0}, // 0x00000000 - FEXT_FCPY {vfp_single_fabs, 0}, // 0x00000001 - FEXT_FABS {vfp_single_fneg, 0}, // 0x00000002 - FEXT_FNEG {vfp_single_fsqrt, 0}, // 0x00000003 - FEXT_FSQRT {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {vfp_single_fcmp, OP_SCALAR}, // 0x00000008 - FEXT_FCMP {vfp_single_fcmpe, OP_SCALAR}, // 0x00000009 - FEXT_FCMPE {vfp_single_fcmpz, OP_SCALAR}, // 0x0000000A - FEXT_FCMPZ {vfp_single_fcmpez, OP_SCALAR}, // 0x0000000B - FEXT_FCMPEZ {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {vfp_single_fcvtd, OP_SCALAR | OP_DD}, // 0x0000000F - FEXT_FCVT {vfp_single_fuito, OP_SCALAR}, // 0x00000010 - FEXT_FUITO {vfp_single_fsito, OP_SCALAR}, // 0x00000011 - FEXT_FSITO {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {nullptr, 0}, {vfp_single_ftoui, OP_SCALAR}, // 0x00000018 - FEXT_FTOUI {vfp_single_ftouiz, OP_SCALAR}, // 0x00000019 - FEXT_FTOUIZ {vfp_single_ftosi, OP_SCALAR}, // 0x0000001A - FEXT_FTOSI {vfp_single_ftosiz, OP_SCALAR}, // 0x0000001B - FEXT_FTOSIZ }; static u32 vfp_single_fadd_nonnumber(struct vfp_single* vsd, struct vfp_single* vsn, struct vfp_single* vsm, u32 fpscr) { struct vfp_single* vsp; u32 exceptions = 0; int tn, tm; tn = vfp_single_type(vsn); tm = vfp_single_type(vsm); if (tn & tm & VFP_INFINITY) { /* * Two infinities. Are they different signs? */ if (vsn->sign ^ vsm->sign) { /* * different signs -> invalid */ exceptions |= FPSCR_IOC; vsp = &vfp_single_default_qnan; } else { /* * same signs -> valid */ vsp = vsn; } } else if (tn & VFP_INFINITY && tm & VFP_NUMBER) { /* * One infinity and one number -> infinity */ vsp = vsn; } else { /* * 'n' is a NaN of some type */ return vfp_propagate_nan(vsd, vsn, vsm, fpscr); } *vsd = *vsp; return exceptions; } static u32 vfp_single_add(struct vfp_single* vsd, struct vfp_single* vsn, struct vfp_single* vsm, u32 fpscr) { u32 exp_diff, m_sig; if (vsn->significand & 0x80000000 || vsm->significand & 0x80000000) { LOG_WARNING(Core_ARM11, "bad FP values"); vfp_single_dump("VSN", vsn); vfp_single_dump("VSM", vsm); } /* * Ensure that 'n' is the largest magnitude number. Note that * if 'n' and 'm' have equal exponents, we do not swap them. * This ensures that NaN propagation works correctly. */ if (vsn->exponent < vsm->exponent) { std::swap(vsm, vsn); } /* * Is 'n' an infinity or a NaN? Note that 'm' may be a number, * infinity or a NaN here. */ if (vsn->exponent == 255) return vfp_single_fadd_nonnumber(vsd, vsn, vsm, fpscr); /* * We have two proper numbers, where 'vsn' is the larger magnitude. * * Copy 'n' to 'd' before doing the arithmetic. */ *vsd = *vsn; /* * Align both numbers. */ exp_diff = vsn->exponent - vsm->exponent; m_sig = vfp_shiftright32jamming(vsm->significand, exp_diff); /* * If the signs are different, we are really subtracting. */ if (vsn->sign ^ vsm->sign) { m_sig = vsn->significand - m_sig; if ((s32)m_sig < 0) { vsd->sign = vfp_sign_negate(vsd->sign); m_sig = (~m_sig + 1); } else if (m_sig == 0) { vsd->sign = (fpscr & FPSCR_RMODE_MASK) == FPSCR_ROUND_MINUSINF ? 0x8000 : 0; } } else { m_sig = vsn->significand + m_sig; } vsd->significand = m_sig; return 0; } static u32 vfp_single_multiply(struct vfp_single* vsd, struct vfp_single* vsn, struct vfp_single* vsm, u32 fpscr) { vfp_single_dump("VSN", vsn); vfp_single_dump("VSM", vsm); /* * Ensure that 'n' is the largest magnitude number. Note that * if 'n' and 'm' have equal exponents, we do not swap them. * This ensures that NaN propagation works correctly. */ if (vsn->exponent < vsm->exponent) { std::swap(vsm, vsn); LOG_TRACE(Core_ARM11, "swapping M <-> N"); } vsd->sign = vsn->sign ^ vsm->sign; /* * If 'n' is an infinity or NaN, handle it. 'm' may be anything. */ if (vsn->exponent == 255) { if (vsn->significand || (vsm->exponent == 255 && vsm->significand)) return vfp_propagate_nan(vsd, vsn, vsm, fpscr); if ((vsm->exponent | vsm->significand) == 0) { *vsd = vfp_single_default_qnan; return FPSCR_IOC; } vsd->exponent = vsn->exponent; vsd->significand = 0; return 0; } /* * If 'm' is zero, the result is always zero. In this case, * 'n' may be zero or a number, but it doesn't matter which. */ if ((vsm->exponent | vsm->significand) == 0) { vsd->exponent = 0; vsd->significand = 0; return 0; } /* * We add 2 to the destination exponent for the same reason as * the addition case - though this time we have +1 from each * input operand. */ vsd->exponent = vsn->exponent + vsm->exponent - 127 + 2; vsd->significand = vfp_hi64to32jamming((u64)vsn->significand * vsm->significand); vfp_single_dump("VSD", vsd); return 0; } #define NEG_MULTIPLY (1 << 0) #define NEG_SUBTRACT (1 << 1) static u32 vfp_single_multiply_accumulate(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr, u32 negate, const char* func) { vfp_single vsd, vsp, vsn, vsm; u32 exceptions = 0; s32 v; v = vfp_get_float(state, sn); LOG_TRACE(Core_ARM11, "s%u = %08x", sn, v); exceptions |= vfp_single_unpack(&vsn, v, fpscr); if (vsn.exponent == 0 && vsn.significand) vfp_single_normalise_denormal(&vsn); exceptions |= vfp_single_unpack(&vsm, m, fpscr); if (vsm.exponent == 0 && vsm.significand) vfp_single_normalise_denormal(&vsm); exceptions |= vfp_single_multiply(&vsp, &vsn, &vsm, fpscr); if (negate & NEG_MULTIPLY) vsp.sign = vfp_sign_negate(vsp.sign); v = vfp_get_float(state, sd); LOG_TRACE(Core_ARM11, "s%u = %08x", sd, v); exceptions |= vfp_single_unpack(&vsn, v, fpscr); if (vsn.exponent == 0 && vsn.significand != 0) vfp_single_normalise_denormal(&vsn); if (negate & NEG_SUBTRACT) vsn.sign = vfp_sign_negate(vsn.sign); exceptions |= vfp_single_add(&vsd, &vsn, &vsp, fpscr); exceptions |= vfp_single_normaliseround(state, sd, &vsd, fpscr, func); return exceptions; } /* * Standard operations */ /* * sd = sd + (sn * sm) */ static u32 vfp_single_fmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { u32 exceptions = 0; LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd); exceptions |= vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, 0, "fmac"); return exceptions; } /* * sd = sd - (sn * sm) */ static u32 vfp_single_fnmac(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { // TODO: this one has its arguments inverted, investigate. LOG_TRACE(Core_ARM11, "s%u = %08x", sd, sn); return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_MULTIPLY, "fnmac"); } /* * sd = -sd + (sn * sm) */ static u32 vfp_single_fmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd); return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_SUBTRACT, "fmsc"); } /* * sd = -sd - (sn * sm) */ static u32 vfp_single_fnmsc(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd); return vfp_single_multiply_accumulate(state, sd, sn, m, fpscr, NEG_SUBTRACT | NEG_MULTIPLY, "fnmsc"); } /* * sd = sn * sm */ static u32 vfp_single_fmul(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { struct vfp_single vsd, vsn, vsm; u32 exceptions = 0; s32 n = vfp_get_float(state, sn); LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n); exceptions |= vfp_single_unpack(&vsn, n, fpscr); if (vsn.exponent == 0 && vsn.significand) vfp_single_normalise_denormal(&vsn); exceptions |= vfp_single_unpack(&vsm, m, fpscr); if (vsm.exponent == 0 && vsm.significand) vfp_single_normalise_denormal(&vsm); exceptions |= vfp_single_multiply(&vsd, &vsn, &vsm, fpscr); exceptions |= vfp_single_normaliseround(state, sd, &vsd, fpscr, "fmul"); return exceptions; } /* * sd = -(sn * sm) */ static u32 vfp_single_fnmul(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { struct vfp_single vsd, vsn, vsm; u32 exceptions = 0; s32 n = vfp_get_float(state, sn); LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n); exceptions |= vfp_single_unpack(&vsn, n, fpscr); if (vsn.exponent == 0 && vsn.significand) vfp_single_normalise_denormal(&vsn); exceptions |= vfp_single_unpack(&vsm, m, fpscr); if (vsm.exponent == 0 && vsm.significand) vfp_single_normalise_denormal(&vsm); exceptions |= vfp_single_multiply(&vsd, &vsn, &vsm, fpscr); vsd.sign = vfp_sign_negate(vsd.sign); exceptions |= vfp_single_normaliseround(state, sd, &vsd, fpscr, "fnmul"); return exceptions; } /* * sd = sn + sm */ static u32 vfp_single_fadd(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { struct vfp_single vsd, vsn, vsm; u32 exceptions = 0; s32 n = vfp_get_float(state, sn); LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n); /* * Unpack and normalise denormals. */ exceptions |= vfp_single_unpack(&vsn, n, fpscr); if (vsn.exponent == 0 && vsn.significand) vfp_single_normalise_denormal(&vsn); exceptions |= vfp_single_unpack(&vsm, m, fpscr); if (vsm.exponent == 0 && vsm.significand) vfp_single_normalise_denormal(&vsm); exceptions |= vfp_single_add(&vsd, &vsn, &vsm, fpscr); exceptions |= vfp_single_normaliseround(state, sd, &vsd, fpscr, "fadd"); return exceptions; } /* * sd = sn - sm */ static u32 vfp_single_fsub(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { LOG_TRACE(Core_ARM11, "s%u = %08x", sn, sd); /* * Subtraction is addition with one sign inverted. */ if (m != 0x7FC00000) // Only negate if m isn't NaN. m = vfp_single_packed_negate(m); return vfp_single_fadd(state, sd, sn, m, fpscr); } /* * sd = sn / sm */ static u32 vfp_single_fdiv(ARMul_State* state, int sd, int sn, s32 m, u32 fpscr) { struct vfp_single vsd, vsn, vsm; u32 exceptions = 0; s32 n = vfp_get_float(state, sn); int tm, tn; LOG_TRACE(Core_ARM11, "s%u = %08x", sn, n); exceptions |= vfp_single_unpack(&vsn, n, fpscr); exceptions |= vfp_single_unpack(&vsm, m, fpscr); vsd.sign = vsn.sign ^ vsm.sign; tn = vfp_single_type(&vsn); tm = vfp_single_type(&vsm); /* * Is n a NAN? */ if (tn & VFP_NAN) goto vsn_nan; /* * Is m a NAN? */ if (tm & VFP_NAN) goto vsm_nan; /* * If n and m are infinity, the result is invalid * If n and m are zero, the result is invalid */ if (tm & tn & (VFP_INFINITY | VFP_ZERO)) goto invalid; /* * If n is infinity, the result is infinity */ if (tn & VFP_INFINITY) goto infinity; /* * If m is zero, raise div0 exception */ if (tm & VFP_ZERO) goto divzero; /* * If m is infinity, or n is zero, the result is zero */ if (tm & VFP_INFINITY || tn & VFP_ZERO) goto zero; if (tn & VFP_DENORMAL) vfp_single_normalise_denormal(&vsn); if (tm & VFP_DENORMAL) vfp_single_normalise_denormal(&vsm); /* * Ok, we have two numbers, we can perform division. */ vsd.exponent = vsn.exponent - vsm.exponent + 127 - 1; vsm.significand <<= 1; if (vsm.significand <= (2 * vsn.significand)) { vsn.significand >>= 1; vsd.exponent++; } { u64 significand = (u64)vsn.significand << 32; do_div(significand, vsm.significand); vsd.significand = (u32)significand; } if ((vsd.significand & 0x3f) == 0) vsd.significand |= ((u64)vsm.significand * vsd.significand != (u64)vsn.significand << 32); exceptions |= vfp_single_normaliseround(state, sd, &vsd, fpscr, "fdiv"); return exceptions; vsn_nan: exceptions |= vfp_propagate_nan(&vsd, &vsn, &vsm, fpscr); pack: vfp_put_float(state, vfp_single_pack(&vsd), sd); return exceptions; vsm_nan: exceptions |= vfp_propagate_nan(&vsd, &vsm, &vsn, fpscr); goto pack; zero: vsd.exponent = 0; vsd.significand = 0; goto pack; divzero: exceptions |= FPSCR_DZC; infinity: vsd.exponent = 255; vsd.significand = 0; goto pack; invalid: vfp_put_float(state, vfp_single_pack(&vfp_single_default_qnan), sd); exceptions |= FPSCR_IOC; return exceptions; } static struct op fops[] = { {vfp_single_fmac, 0}, {vfp_single_fmsc, 0}, {vfp_single_fmul, 0}, {vfp_single_fadd, 0}, {vfp_single_fnmac, 0}, {vfp_single_fnmsc, 0}, {vfp_single_fnmul, 0}, {vfp_single_fsub, 0}, {vfp_single_fdiv, 0}, }; #define FREG_BANK(x) ((x)&0x18) #define FREG_IDX(x) ((x)&7) u32 vfp_single_cpdo(ARMul_State* state, u32 inst, u32 fpscr) { u32 op = inst & FOP_MASK; u32 exceptions = 0; unsigned int dest; unsigned int sn = vfp_get_sn(inst); unsigned int sm = vfp_get_sm(inst); unsigned int vecitr, veclen, vecstride; struct op* fop; vecstride = 1 + ((fpscr & FPSCR_STRIDE_MASK) == FPSCR_STRIDE_MASK); fop = (op == FOP_EXT) ? &fops_ext[FEXT_TO_IDX(inst)] : &fops[FOP_TO_IDX(op)]; /* * fcvtsd takes a dN register number as destination, not sN. * Technically, if bit 0 of dd is set, this is an invalid * instruction. However, we ignore this for efficiency. * It also only operates on scalars. */ if (fop->flags & OP_DD) dest = vfp_get_dd(inst); else dest = vfp_get_sd(inst); /* * If destination bank is zero, vector length is always '1'. * ARM DDI0100F C5.1.3, C5.3.2. */ if ((fop->flags & OP_SCALAR) || FREG_BANK(dest) == 0) veclen = 0; else veclen = fpscr & FPSCR_LENGTH_MASK; LOG_TRACE(Core_ARM11, "vecstride=%u veclen=%u", vecstride, (veclen >> FPSCR_LENGTH_BIT) + 1); if (!fop->fn) { LOG_CRITICAL(Core_ARM11, "could not find single op %d, inst=0x%x@0x%x", FEXT_TO_IDX(inst), inst, state->Reg[15]); Crash(); goto invalid; } for (vecitr = 0; vecitr <= veclen; vecitr += 1 << FPSCR_LENGTH_BIT) { s32 m = vfp_get_float(state, sm); u32 except; char type; type = (fop->flags & OP_DD) ? 'd' : 's'; if (op == FOP_EXT) LOG_TRACE(Core_ARM11, "itr%d (%c%u) = op[%u] (s%u=%08x)", vecitr >> FPSCR_LENGTH_BIT, type, dest, sn, sm, m); else LOG_TRACE(Core_ARM11, "itr%d (%c%u) = (s%u) op[%u] (s%u=%08x)", vecitr >> FPSCR_LENGTH_BIT, type, dest, sn, FOP_TO_IDX(op), sm, m); except = fop->fn(state, dest, sn, m, fpscr); LOG_TRACE(Core_ARM11, "itr%d: exceptions=%08x", vecitr >> FPSCR_LENGTH_BIT, except); exceptions |= except; /* * CHECK: It appears to be undefined whether we stop when * we encounter an exception. We continue. */ dest = FREG_BANK(dest) + ((FREG_IDX(dest) + vecstride) & 7); sn = FREG_BANK(sn) + ((FREG_IDX(sn) + vecstride) & 7); if (FREG_BANK(sm) != 0) sm = FREG_BANK(sm) + ((FREG_IDX(sm) + vecstride) & 7); } return exceptions; invalid: return (u32)-1; }<|fim▁end|>
<|file_name|>callback.rs<|end_file_name|><|fim▁begin|>use std::mem; use std::ptr; use std::cell::RefCell; use std::sync::mpsc::Sender; use std::sync::{Arc, Mutex}; use std::ffi::OsString; use std::os::windows::ffi::OsStringExt; use CursorState; use Event; use super::event; use user32; use shell32; use winapi; /// There's no parameters passed to the callback function, so it needs to get /// its context (the HWND, the Sender for events, etc.) stashed in /// a thread-local variable. thread_local!(pub static CONTEXT_STASH: RefCell<Option<ThreadLocalData>> = RefCell::new(None)); pub struct ThreadLocalData { pub win: winapi::HWND, pub sender: Sender<Event>, pub cursor_state: Arc<Mutex<CursorState>> } /// Checks that the window is the good one, and if so send the event to it. fn send_event(input_window: winapi::HWND, event: Event) { CONTEXT_STASH.with(|context_stash| { let context_stash = context_stash.borrow(); let stored = match *context_stash { None => return, Some(ref v) => v }; let &ThreadLocalData { ref win, ref sender, .. } = stored; if win != &input_window { return; } sender.send(event).ok(); // ignoring if closed }); } /// This is the callback that is called by `DispatchMessage` in the events loop. /// /// Returning 0 tells the Win32 API that the message has been processed. // FIXME: detect WM_DWMCOMPOSITIONCHANGED and call DwmEnableBlurBehindWindow if necessary pub unsafe extern "system" fn callback(window: winapi::HWND, msg: winapi::UINT, wparam: winapi::WPARAM, lparam: winapi::LPARAM) -> winapi::LRESULT { match msg { winapi::WM_DESTROY => { use events::Event::Closed; CONTEXT_STASH.with(|context_stash| { let context_stash = context_stash.borrow(); let stored = match *context_stash { None => return, Some(ref v) => v }; let &ThreadLocalData { ref win, .. } = stored; if win == &window { user32::PostQuitMessage(0); } }); send_event(window, Closed); 0 }, winapi::WM_ERASEBKGND => { 1 }, winapi::WM_SIZE => { use events::Event::Resized; let w = winapi::LOWORD(lparam as winapi::DWORD) as u32; let h = winapi::HIWORD(lparam as winapi::DWORD) as u32; send_event(window, Resized(w, h)); 0 }, winapi::WM_MOVE => { use events::Event::Moved; let x = winapi::LOWORD(lparam as winapi::DWORD) as i32; let y = winapi::HIWORD(lparam as winapi::DWORD) as i32; send_event(window, Moved(x, y)); 0 }, winapi::WM_CHAR => { use std::mem; use events::Event::ReceivedCharacter; let chr: char = mem::transmute(wparam as u32); send_event(window, ReceivedCharacter(chr)); 0 }, // Prevents default windows menu hotkeys playing unwanted // "ding" sounds. Alternatively could check for WM_SYSCOMMAND // with wparam being SC_KEYMENU, but this may prevent some // other unwanted default hotkeys as well. winapi::WM_SYSCHAR => { 0 } winapi::WM_MOUSEMOVE => { use events::Event::MouseMoved; let x = winapi::GET_X_LPARAM(lparam) as i32; let y = winapi::GET_Y_LPARAM(lparam) as i32; send_event(window, MouseMoved((x, y))); 0 }, winapi::WM_MOUSEWHEEL => { use events::Event::MouseWheel; use events::MouseScrollDelta::LineDelta; let value = (wparam >> 16) as i16; let value = value as i32; let value = value as f32 / winapi::WHEEL_DELTA as f32; send_event(window, MouseWheel(LineDelta(0.0, value))); 0 }, winapi::WM_KEYDOWN | winapi::WM_SYSKEYDOWN => { use events::Event::KeyboardInput; use events::ElementState::Pressed; if msg == winapi::WM_SYSKEYDOWN && wparam as i32 == winapi::VK_F4 { user32::DefWindowProcW(window, msg, wparam, lparam) } else { let (scancode, vkey) = event::vkeycode_to_element(wparam, lparam); send_event(window, KeyboardInput(Pressed, scancode, vkey)); 0 } }, winapi::WM_KEYUP | winapi::WM_SYSKEYUP => { use events::Event::KeyboardInput; use events::ElementState::Released; let (scancode, vkey) = event::vkeycode_to_element(wparam, lparam); send_event(window, KeyboardInput(Released, scancode, vkey)); 0 }, winapi::WM_LBUTTONDOWN => { use events::Event::MouseInput; use events::MouseButton::Left; use events::ElementState::Pressed; send_event(window, MouseInput(Pressed, Left)); 0 }, winapi::WM_LBUTTONUP => { use events::Event::MouseInput; use events::MouseButton::Left; use events::ElementState::Released; send_event(window, MouseInput(Released, Left)); 0 }, winapi::WM_RBUTTONDOWN => { use events::Event::MouseInput; use events::MouseButton::Right; use events::ElementState::Pressed; send_event(window, MouseInput(Pressed, Right)); 0 }, winapi::WM_RBUTTONUP => { use events::Event::MouseInput; use events::MouseButton::Right; use events::ElementState::Released; send_event(window, MouseInput(Released, Right)); 0 }, winapi::WM_MBUTTONDOWN => { use events::Event::MouseInput; use events::MouseButton::Middle; use events::ElementState::Pressed; send_event(window, MouseInput(Pressed, Middle)); 0 }, winapi::WM_MBUTTONUP => { use events::Event::MouseInput; use events::MouseButton::Middle; use events::ElementState::Released; send_event(window, MouseInput(Released, Middle)); 0 }, winapi::WM_INPUT => { let mut data: winapi::RAWINPUT = mem::uninitialized(); let mut data_size = mem::size_of::<winapi::RAWINPUT>() as winapi::UINT; user32::GetRawInputData(mem::transmute(lparam), winapi::RID_INPUT, mem::transmute(&mut data), &mut data_size, mem::size_of::<winapi::RAWINPUTHEADER>() as winapi::UINT); if data.header.dwType == winapi::RIM_TYPEMOUSE { let _x = data.mouse.lLastX; // FIXME: this is not always the relative movement let _y = data.mouse.lLastY; // TODO: //send_event(window, Event::MouseRawMovement { x: x, y: y }); 0 } else { user32::DefWindowProcW(window, msg, wparam, lparam) } }, winapi::WM_SETFOCUS => { use events::Event::Focused; send_event(window, Focused(true)); 0 }, winapi::WM_KILLFOCUS => { use events::Event::Focused; send_event(window, Focused(false)); 0 }, winapi::WM_SETCURSOR => { CONTEXT_STASH.with(|context_stash| { let cstash = context_stash.borrow(); let cstash = cstash.as_ref(); // there's a very bizarre borrow checker bug // possibly related to rust-lang/rust/#23338 let _cursor_state = if let Some(cstash) = cstash { if let Ok(cursor_state) = cstash.cursor_state.lock() { match *cursor_state { CursorState::Normal => { user32::SetCursor(user32::LoadCursorW( ptr::null_mut(), winapi::IDC_ARROW)); }, CursorState::Grab | CursorState::Hide => { user32::SetCursor(ptr::null_mut()); } } } } else { return }; <|fim▁hole|> 0 }, winapi::WM_DROPFILES => { use events::Event::DroppedFile; let hdrop = wparam as winapi::HDROP; let mut pathbuf: [u16; winapi::MAX_PATH] = mem::uninitialized(); let num_drops = shell32::DragQueryFileW(hdrop, 0xFFFFFFFF, ptr::null_mut(), 0); for i in 0..num_drops { let nch = shell32::DragQueryFileW(hdrop, i, pathbuf.as_mut_ptr(), winapi::MAX_PATH as u32) as usize; if nch > 0 { send_event(window, DroppedFile(OsString::from_wide(&pathbuf[0..nch]).into())); } } shell32::DragFinish(hdrop); 0 }, x if x == *super::WAKEUP_MSG_ID => { use events::Event::Awakened; send_event(window, Awakened); 0 }, _ => { user32::DefWindowProcW(window, msg, wparam, lparam) } } }<|fim▁end|>
// let &ThreadLocalData { ref cursor_state, .. } = stored; });
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # dodotable documentation build configuration file, created by # sphinx-quickstart on Thu Sep 17 11:47:28 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from dodotable import __version__, __version_info__ import sys import os import shlex # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' <|fim▁hole|> 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'dodotable' copyright = '2016, Spoqa, Inc' author = 'Kang Hyojun' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '.'.join(str(v) for v in __version_info__[:2]) # The full version, including alpha/beta/rc tags. release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'dodotabledoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'dodotable.tex', 'dodotable Documentation', 'Kang Hyojun', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'dodotable', 'dodotable Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'dodotable', 'dodotable Documentation', author, 'dodotable', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('http://docs.python.org/', None), 'sqlalchemy': ('http://docs.sqlalchemy.org/en/latest/', None), 'flask': ('http://flask.pocoo.org/docs/', None) }<|fim▁end|>
# Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [
<|file_name|>myaddon.cc<|end_file_name|><|fim▁begin|>#include <nan.h> <|fim▁hole|>using namespace v8; NAN_METHOD(Length) { Nan::MaybeLocal<String> maybeStr = Nan::To<String>(info[0]); v8::Local<String> str; if(maybeStr.ToLocal(&str) == false) { Nan::ThrowError("Error converting first argument to string"); } int len = strlen(*String::Utf8Value(str)); info.GetReturnValue().Set(len); } NAN_MODULE_INIT(Init) { Nan::Set(target, Nan::New("length").ToLocalChecked(), Nan::GetFunction(Nan::New<FunctionTemplate>(Delay)).ToLocalChecked()); } NODE_MODULE(myaddon, Init)<|fim▁end|>
<|file_name|>profile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ Profile script for CNFgen package """ from __future__ import print_function import os import sys from contextlib import contextmanager @contextmanager def erase_stdout(): with file(os.devnull,"w") as null: old_stdout = sys.stdout sys.stdout = null yield sys.stdout = old_stdout def cnfgen_call(): from cnfformula import cnfgen cmd = ["cnfgen"] + sys.argv[1:] with erase_stdout(): cnfgen(cmd) if __name__ == '__main__': <|fim▁hole|> if len(sys.argv) <= 1: print("Usage: {} <cnfgen_args>".format(sys.argv[0]),file=sys.stderr) sys.exit(-1) profile('cnfgen_call()',sort='tottime')<|fim▁end|>
from cProfile import run as profile
<|file_name|>allocation.py<|end_file_name|><|fim▁begin|># ---------------------------------------------------------------------------- # pyglet # Copyright (c) 2006-2008 Alex Holkner # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of pyglet nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- # $Id:$ """Memory allocation algorithm for vertex arrays and buffers. The region allocator is used to allocate vertex indices within a vertex domain's multiple buffers. ("Buffer" refers to any abstract buffer presented by `pyglet.graphics.vertexbuffer`. The allocator will at times request more space from the buffers. The current policy is to double the buffer size when there is not enough room to fulfil an allocation. The buffer is never resized smaller. The allocator maintains references to free space only; it is the caller's responsibility to maintain the allocated regions. """ # Common cases: # -regions will be the same size (instances of same object, e.g. sprites) # -regions will not usually be resized (only exception is text) # -alignment of 4 vertices (glyphs, sprites, images, ...) # # Optimise for: # -keeping regions adjacent, reduce the number of entries in glMultiDrawArrays # -finding large blocks of allocated regions quickly (for drawing) # -finding block of unallocated space is the _uncommon_ case! # # Decisions: # -don't over-allocate regions to any alignment -- this would require more # work in finding the allocated spaces (for drawing) and would result in # more entries in glMultiDrawArrays # -don't move blocks when they truncate themselves. try not to allocate the # space they freed too soon (they will likely need grow back into it later, # and growing will usually require a reallocation). # -allocator does not track individual allocated regions. Trusts caller # to provide accurate (start, size) tuple, which completely describes # a region from the allocator's point of view. # -this means that compacting is probably not feasible, or would be hideously # expensive class AllocatorMemoryException(Exception): """The buffer is not large enough to fulfil an allocation. Raised by `Allocator` methods when the operation failed due to lack of buffer space. The buffer should be increased to at least requested_capacity and then the operation retried (guaranteed to pass second time). """ def __init__(self, requested_capacity): self.requested_capacity = requested_capacity class Allocator: """Buffer space allocation implementation.""" def __init__(self, capacity): """Create an allocator for a buffer of the specified capacity. :Parameters: `capacity` : int Maximum size of the buffer. """ self.capacity = capacity # Allocated blocks. Start index and size in parallel lists. # # # = allocated, - = free # # 0 3 5 15 20 24 40 # |###--##########-----####----------------------| # # starts = [0, 5, 20] # sizes = [3, 10, 4] # # To calculate free blocks: # for i in range(0, len(starts)): # free_start[i] = starts[i] + sizes[i] # free_size[i] = starts[i+1] - free_start[i] # free_size[i+1] = self.capacity - free_start[-1] self.starts = list() self.sizes = list() def set_capacity(self, size): """Resize the maximum buffer size. The capaity cannot be reduced. :Parameters: `size` : int New maximum size of the buffer. """ assert size > self.capacity self.capacity = size def alloc(self, size): """Allocate memory in the buffer. Raises `AllocatorMemoryException` if the allocation cannot be fulfilled. :Parameters: `size` : int Size of region to allocate. :rtype: int :return: Starting index of the allocated region. """ assert size >= 0 if size == 0: return 0 # return start # or raise AllocatorMemoryException if not self.starts: if size <= self.capacity: self.starts.append(0) self.sizes.append(size) return 0 else: raise AllocatorMemoryException(size) # Allocate in a free space free_start = self.starts[0] + self.sizes[0] for i, (alloc_start, alloc_size) in \ enumerate(zip(self.starts[1:], self.sizes[1:])): # Danger! # i is actually index - 1 because of slicing above... # starts[i] points to the block before this free space # starts[i+1] points to the block after this free space, and is # always valid. free_size = alloc_start - free_start if free_size == size: # Merge previous block with this one (removing this free space) self.sizes[i] += free_size + alloc_size del self.starts[i + 1] del self.sizes[i + 1] return free_start elif free_size > size: # Increase size of previous block to intrude into this free # space. self.sizes[i] += size return free_start free_start = alloc_start + alloc_size # Allocate at end of capacity free_size = self.capacity - free_start if free_size >= size: self.sizes[-1] += size return free_start raise AllocatorMemoryException(self.capacity + size - free_size) def realloc(self, start, size, new_size): """Reallocate a region of the buffer. This is more efficient than separate `dealloc` and `alloc` calls, as the region can often be resized in-place. Raises `AllocatorMemoryException` if the allocation cannot be fulfilled. :Parameters: `start` : int Current starting index of the region. `size` : int Current size of the region. `new_size` : int New size of the region. """ assert size >= 0 and new_size >= 0 if new_size == 0: if size != 0: self.dealloc(start, size) return 0 elif size == 0: return self.alloc(new_size) # return start # or raise AllocatorMemoryException # Truncation is the same as deallocating the tail cruft if new_size < size: self.dealloc(start + new_size, size - new_size) return start # Find which block it lives in for i, (alloc_start, alloc_size) in \ enumerate(zip(*(self.starts, self.sizes))): p = start - alloc_start if p >= 0 and size <= alloc_size - p: break if not (p >= 0 and size <= alloc_size - p): print(list(zip(self.starts, self.sizes))) print(start, size, new_size) print(p, alloc_start, alloc_size) assert p >= 0 and size <= alloc_size - p, 'Region not allocated' if size == alloc_size - p: # Region is at end of block. Find how much free space is after # it. is_final_block = i == len(self.starts) - 1 if not is_final_block: free_size = self.starts[i + 1] - (start + size) else: free_size = self.capacity - (start + size) # TODO If region is an entire block being an island in free space, # can possibly extend in both directions. if free_size == new_size - size and not is_final_block: # Merge block with next (region is expanded in place to # exactly fill the free space) self.sizes[i] += free_size + self.sizes[i + 1] del self.starts[i + 1] del self.sizes[i + 1] return start elif free_size > new_size - size: # Expand region in place self.sizes[i] += new_size - size return start # The block must be repositioned. Dealloc then alloc. <|fim▁hole|> # It must be alloc'd first. We're not missing an optimisation # here, because if freeing the block would've allowed for the block to # be placed in the resulting free space, one of the above in-place # checks would've found it. result = self.alloc(new_size) self.dealloc(start, size) return result def dealloc(self, start, size): """Free a region of the buffer. :Parameters: `start` : int Starting index of the region. `size` : int Size of the region. """ assert size >= 0 if size == 0: return assert self.starts # Find which block needs to be split for i, (alloc_start, alloc_size) in \ enumerate(zip(*(self.starts, self.sizes))): p = start - alloc_start if p >= 0 and size <= alloc_size - p: break # Assert we left via the break assert p >= 0 and size <= alloc_size - p, 'Region not allocated' if p == 0 and size == alloc_size: # Remove entire block del self.starts[i] del self.sizes[i] elif p == 0: # Truncate beginning of block self.starts[i] += size self.sizes[i] -= size elif size == alloc_size - p: # Truncate end of block self.sizes[i] -= size else: # Reduce size of left side, insert block at right side # $ = dealloc'd block, # = alloc'd region from same block # # <------8------> # <-5-><-6-><-7-> # 1 2 3 4 # #####$$$$$##### # # 1 = alloc_start # 2 = start # 3 = start + size # 4 = alloc_start + alloc_size # 5 = start - alloc_start = p # 6 = size # 7 = {8} - ({5} + {6}) = alloc_size - (p + size) # 8 = alloc_size # self.sizes[i] = p self.starts.insert(i + 1, start + size) self.sizes.insert(i + 1, alloc_size - (p + size)) def get_allocated_regions(self): """Get a list of (aggregate) allocated regions. The result of this method is ``(starts, sizes)``, where ``starts`` is a list of starting indices of the regions and ``sizes`` their corresponding lengths. :rtype: (list, list) """ # return (starts, sizes); len(starts) == len(sizes) return (self.starts, self.sizes) def get_fragmented_free_size(self): """Returns the amount of space unused, not including the final free block. :rtype: int """ if not self.starts: return 0 # Variation of search for free block. total_free = 0 free_start = self.starts[0] + self.sizes[0] for i, (alloc_start, alloc_size) in \ enumerate(zip(self.starts[1:], self.sizes[1:])): total_free += alloc_start - free_start free_start = alloc_start + alloc_size return total_free def get_free_size(self): """Return the amount of space unused. :rtype: int """ if not self.starts: return self.capacity free_end = self.capacity - (self.starts[-1] + self.sizes[-1]) return self.get_fragmented_free_size() + free_end def get_usage(self): """Return fraction of capacity currently allocated. :rtype: float """ return 1. - self.get_free_size() / float(self.capacity) def get_fragmentation(self): """Return fraction of free space that is not expandable. :rtype: float """ free_size = self.get_free_size() if free_size == 0: return 0. return self.get_fragmented_free_size() / float(self.get_free_size()) def _is_empty(self): return not self.starts def __str__(self): return 'allocs=' + repr(list(zip(self.starts, self.sizes))) def __repr__(self): return '<%s %s>' % (self.__class__.__name__, str(self))<|fim▁end|>
# But don't do this! If alloc fails, we've already silently dealloc'd # the original block. # self.dealloc(start, size) # return self.alloc(new_size)
<|file_name|>test_deploy.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software<|fim▁hole|># distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test classes for the pubsub-to-datastore module.""" import os import time import unittest import urllib import json import sys sys.path.insert(1, 'lib') import httplib2 GAE_HOST = "pubsub-to-datastore-dot-cloud-iot-dev.appspot.com" def url_for(path): """Returns the URL of the endpoint for the given path.""" return 'https://%s%s' % (GAE_HOST, path) class IntegrationTestCase(unittest.TestCase): """A test case for the pubsub-to-datastore module.""" def setUp(self): self.http = httplib2.Http() def test_push_success(self): """Test processing a new message.""" headers = {'Content-type': 'application/json', 'Accept': 'text/plain'} data = {"message": {"data": "eyJzZXNzaW9uSWQiOiI1OWE0N2VhNS1jMjAxLTA4MzItZjU2Zi1hM2ZlNGUxNzA0ODciLCJkYXRhIjp7ImV2IjoiZGV2aWNlb3JpZW50YXRpb24iLCJ4IjowLjE5MDEzNDE2NTg0MTU2ODk4LCJ5IjoyMy45MDQxMTQ5MzYzNzg0NTJ9LCJ0aW1lc3RhbXAiOjE0NjI1NTI3MzcyMDl9","message_id": "34536788863333"}} (resp, content) = self.http.request(url_for('/'), 'POST', body=json.dumps(data), headers=headers) # This ensures that our App Engine service account is working correctly. self.assertEquals(204, resp.status) # [START main] if __name__ == '__main__': unittest.main() # [END main]<|fim▁end|>
<|file_name|>vga.rs<|end_file_name|><|fim▁begin|>/******************************************************************************* * * kit/kernel/terminal/vga.rs * * vim:ft=rust:ts=4:sw=4:et:tw=80 * * Copyright (C) 2015-2021, Devyn Cairns * Redistribution of this file is permitted under the terms of the simplified * BSD license. See LICENSE for more information. * ******************************************************************************/ use super::{Terminal, Color}; use core::fmt; /// Configuration for a VGA text-mode terminal. #[derive(Debug, Clone, Copy)] pub struct VgaConfig { pub width: usize, pub height: usize, pub buffer: *mut u16, pub port: u16, } /// Controls a VGA text-mode terminal. #[derive(Debug)] pub struct Vga { config: VgaConfig, row: usize, col: usize, fg: Color, bg: Color, attr: u8, } impl Vga { /// Create a new VGA text-mode terminal controller with the given /// dimensions, buffer, and port. pub unsafe fn new(config: VgaConfig) -> Vga { let mut vga = Vga { config, row: 0, col: 0, fg: Color::LightGrey, bg: Color::Black, attr: Vga::attr(Color::LightGrey, Color::Black), }; vga.reset().unwrap(); vga } pub fn config(&self) -> VgaConfig { self.config } pub fn color(c: Color) -> u8 { c as u8<|fim▁hole|> pub fn attr(fg: Color, bg: Color) -> u8 { Vga::color(fg) | (Vga::color(bg) << 4) } fn update_attr(&mut self) { self.attr = Vga::attr(self.fg, self.bg); } fn update_cursor(&mut self) { unsafe fn outb(byte: u8, port: u16) { asm!("out %al, %dx", in("al") byte, in("dx") port, options(att_syntax)); } let pos: u16 = ((self.row * self.config.width) + self.col) as u16; unsafe { outb(0x0F, self.config.port); outb(pos as u8, self.config.port + 1); outb(0x0E, self.config.port); outb((pos >> 8) as u8, self.config.port + 1); } } pub fn put(&mut self, byte: u8, attr: u8, row: usize, col: usize) { unsafe { *self.config.buffer.offset((row * self.config.width + col) as isize) = (byte as u16) | ((attr as u16) << 8); } } pub fn put_here(&mut self, byte: u8) { let (attr, row, col) = (self.attr, self.row, self.col); self.put(byte, attr, row, col) } fn new_line(&mut self) { // Clear to the end of the line. while self.col < self.config.width { self.put_here(' ' as u8); self.col += 1; } // Go to the next line, scrolling if necessary. self.col = 0; self.row += 1; while self.row >= self.config.height { self.scroll(); self.row -= 1; } self.update_cursor(); } fn scroll(&mut self) { // Shift everything one line back. for row in 1..self.config.height { for col in 0..self.config.width { let index = (row * self.config.width + col) as isize; unsafe { *self.config.buffer.offset(index - self.config.width as isize) = *self.config.buffer.offset(index); } // XXX: SSE memory operations fail on memory-mapped I/O in KVM, // so inhibit vectorization unsafe { asm!("nop"); } } } // Clear last line. let (attr, height) = (self.attr, self.config.height); for col in 0..self.config.width { self.put(' ' as u8, attr, height - 1, col); } } } impl Terminal for Vga { fn reset(&mut self) -> fmt::Result { self.fg = Color::LightGrey; self.bg = Color::Black; self.update_attr(); self.clear() } fn clear(&mut self) -> fmt::Result { self.row = 0; self.col = 0; let attr = self.attr; for row in 0..self.config.height { for col in 0..self.config.width { self.put(' ' as u8, attr, row, col); // XXX: SSE memory operations fail on memory-mapped I/O in KVM, // so inhibit vectorization unsafe { asm!("nop"); } } } Ok(()) } fn get_cursor(&self) -> (usize, usize) { (self.row, self.col) } fn set_cursor(&mut self, row: usize, col: usize) -> fmt::Result { self.row = row; self.col = col; self.update_cursor(); Ok(()) } fn get_color(&self) -> (Color, Color) { (self.fg, self.bg) } fn set_color(&mut self, fg: Color, bg: Color) -> fmt::Result { self.fg = fg; self.bg = bg; self.update_attr(); Ok(()) } fn put_raw_byte(&mut self, byte: u8, fg: Color, bg: Color, row: usize, col: usize) -> fmt::Result { self.put(byte, Vga::attr(fg, bg), row, col); Ok(()) } fn write_raw_byte(&mut self, byte: u8) -> fmt::Result { match byte { b'\n' => { self.new_line(); }, 0x08 /* backspace */ => { if self.col > 0 { self.col -= 1; } self.put_here(' ' as u8); }, _ => { self.put_here(byte); self.col += 1; if self.col >= self.config.width { self.new_line(); } } } Ok(()) } fn flush(&mut self) -> fmt::Result { self.update_cursor(); Ok(()) } } impl fmt::Write for Vga { fn write_char(&mut self, ch: char) -> fmt::Result { let mut buf = [0; 4]; self.write_raw_bytes(ch.encode_utf8(&mut buf).as_bytes())?; self.flush()?; Ok(()) } fn write_str(&mut self, s: &str) -> fmt::Result { self.write_raw_bytes(s.as_bytes())?; self.flush()?; Ok(()) } }<|fim▁end|>
}
<|file_name|>MSEdge.cpp<|end_file_name|><|fim▁begin|>/****************************************************************************/ /// @file MSEdge.cpp /// @author Christian Roessel /// @author Jakob Erdmann /// @author Christoph Sommer /// @author Daniel Krajzewicz /// @author Laura Bieker /// @author Michael Behrisch /// @author Sascha Krieg /// @date Tue, 06 Mar 2001 /// @version $Id: MSEdge.cpp 13107 2012-12-02 13:57:34Z behrisch $ /// // A road/street connecting two junctions /****************************************************************************/ // SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/ // Copyright (C) 2001-2012 DLR (http://www.dlr.de/) and contributors /****************************************************************************/ // // This file is part of SUMO. // SUMO is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // /****************************************************************************/ // =========================================================================== // included modules // =========================================================================== #ifdef _MSC_VER #include <windows_config.h> #else #include <config.h> #endif #include <algorithm> #include <iostream> #include <cassert> #include <utils/common/StringTokenizer.h> #include <utils/options/OptionsCont.h> #include "MSEdge.h" #include "MSLane.h" #include "MSLaneChanger.h" #include "MSGlobals.h" #include "MSVehicle.h" #include "MSEdgeWeightsStorage.h" #ifdef HAVE_INTERNAL #include <mesosim/MELoop.h> #include <mesosim/MESegment.h> #include <mesosim/MEVehicle.h> #endif #ifdef CHECK_MEMORY_LEAKS #include <foreign/nvwa/debug_new.h> #endif // CHECK_MEMORY_LEAKS // =========================================================================== // static member definitions // =========================================================================== MSEdge::DictType MSEdge::myDict; std::vector<MSEdge*> MSEdge::myEdges; // =========================================================================== // member method definitions // =========================================================================== MSEdge::MSEdge(const std::string& id, int numericalID, const EdgeBasicFunction function, const std::string& streetName) : Named(id), myNumericalID(numericalID), myLanes(0), myLaneChanger(0), myFunction(function), myVaporizationRequests(0), myLastFailedInsertionTime(-1), myStreetName(streetName) {} MSEdge::~MSEdge() { delete myLaneChanger; for (AllowedLanesCont::iterator i1 = myAllowed.begin(); i1 != myAllowed.end(); i1++) { delete(*i1).second; } for (ClassedAllowedLanesCont::iterator i2 = myClassedAllowed.begin(); i2 != myClassedAllowed.end(); i2++) { for (AllowedLanesCont::iterator i1 = (*i2).second.begin(); i1 != (*i2).second.end(); i1++) { delete(*i1).second; } } delete myLanes; // Note: Lanes are delete using MSLane::clear(); } void MSEdge::initialize(std::vector<MSLane*>* lanes) { assert(myFunction == EDGEFUNCTION_DISTRICT || lanes != 0); myLanes = lanes; if (myLanes && myLanes->size() > 1 && myFunction != EDGEFUNCTION_INTERNAL) { myLaneChanger = new MSLaneChanger(myLanes, OptionsCont::getOptions().getBool("lanechange.allow-swap")); } } void MSEdge::closeBuilding() { myAllowed[0] = new std::vector<MSLane*>(); for (std::vector<MSLane*>::iterator i = myLanes->begin(); i != myLanes->end(); ++i) { myAllowed[0]->push_back(*i); const MSLinkCont& lc = (*i)->getLinkCont(); for (MSLinkCont::const_iterator j = lc.begin(); j != lc.end(); ++j) { MSLane* toL = (*j)->getLane(); if (toL != 0) { MSEdge& to = toL->getEdge(); // if (std::find(mySuccessors.begin(), mySuccessors.end(), &to) == mySuccessors.end()) { mySuccessors.push_back(&to); } if (std::find(to.myPredeccesors.begin(), to.myPredeccesors.end(), this) == to.myPredeccesors.end()) { to.myPredeccesors.push_back(this); } // if (myAllowed.find(&to) == myAllowed.end()) { myAllowed[&to] = new std::vector<MSLane*>(); } myAllowed[&to]->push_back(*i); } #ifdef HAVE_INTERNAL_LANES toL = (*j)->getViaLane(); if (toL != 0) { MSEdge& to = toL->getEdge(); to.myPredeccesors.push_back(this); } #endif } } std::sort(mySuccessors.begin(), mySuccessors.end(), by_id_sorter()); rebuildAllowedLanes(); } void MSEdge::rebuildAllowedLanes() { // clear myClassedAllowed. // it will be rebuilt on demand for (ClassedAllowedLanesCont::iterator i2 = myClassedAllowed.begin(); i2 != myClassedAllowed.end(); i2++) { for (AllowedLanesCont::iterator i1 = (*i2).second.begin(); i1 != (*i2).second.end(); i1++) { delete(*i1).second; } } myClassedAllowed.clear(); // rebuild myMinimumPermissions and myCombinedPermissions myMinimumPermissions = SVCFreeForAll; myCombinedPermissions = 0; for (std::vector<MSLane*>::iterator i = myLanes->begin(); i != myLanes->end(); ++i) { myMinimumPermissions &= (*i)->getPermissions(); myCombinedPermissions |= (*i)->getPermissions(); } } // ------------ Access to the edge's lanes MSLane* MSEdge::leftLane(const MSLane* const lane) const { std::vector<MSLane*>::iterator laneIt = find(myLanes->begin(), myLanes->end(), lane); if (laneIt == myLanes->end() || laneIt == myLanes->end() - 1) { return 0; } return *(laneIt + 1); } MSLane* MSEdge::rightLane(const MSLane* const lane) const { std::vector<MSLane*>::iterator laneIt = find(myLanes->begin(), myLanes->end(), lane); if (laneIt == myLanes->end() || laneIt == myLanes->begin()) { return 0; } return *(laneIt - 1); } const std::vector<MSLane*>* MSEdge::allowedLanes(const MSEdge& destination, SUMOVehicleClass vclass) const { return allowedLanes(&destination, vclass); } const std::vector<MSLane*>* MSEdge::allowedLanes(SUMOVehicleClass vclass) const { return allowedLanes(0, vclass); } const std::vector<MSLane*>* MSEdge::getAllowedLanesWithDefault(const AllowedLanesCont& c, const MSEdge* dest) const { AllowedLanesCont::const_iterator it = c.find(dest); if (it == c.end()) { return 0; } return it->second; } const std::vector<MSLane*>* MSEdge::allowedLanes(const MSEdge* destination, SUMOVehicleClass vclass) const { if ((myMinimumPermissions & vclass) == vclass) { // all lanes allow vclass return getAllowedLanesWithDefault(myAllowed, destination); } // look up cached result in myClassedAllowed ClassedAllowedLanesCont::const_iterator i = myClassedAllowed.find(vclass); if (i != myClassedAllowed.end()) { // can use cached value const AllowedLanesCont& c = (*i).second; return getAllowedLanesWithDefault(c, destination); } else { // this vclass is requested for the first time. rebuild all destinations // go through connected edges for (AllowedLanesCont::const_iterator i1 = myAllowed.begin(); i1 != myAllowed.end(); ++i1) { const MSEdge* edge = i1->first; const std::vector<MSLane*>* lanes = i1->second; myClassedAllowed[vclass][edge] = new std::vector<MSLane*>(); // go through lanes approaching current edge for (std::vector<MSLane*>::const_iterator i2 = lanes->begin(); i2 != lanes->end(); ++i2) { // allows the current vehicle class? if ((*i2)->allowsVehicleClass(vclass)) { // -> may be used myClassedAllowed[vclass][edge]->push_back(*i2); } } // assert that 0 is returned if no connection is allowed for a class if (myClassedAllowed[vclass][edge]->size() == 0) { delete myClassedAllowed[vclass][edge]; myClassedAllowed[vclass][edge] = 0; } } return myClassedAllowed[vclass][destination]; } } // ------------ SUMOTime MSEdge::incVaporization(SUMOTime) { ++myVaporizationRequests; return 0; } SUMOTime MSEdge::decVaporization(SUMOTime) { --myVaporizationRequests; return 0; } MSLane* MSEdge::getFreeLane(const std::vector<MSLane*>* allowed, const SUMOVehicleClass vclass) const { if (allowed == 0) { allowed = allowedLanes(vclass); } MSLane* res = 0; if (allowed != 0) { unsigned int noCars = INT_MAX; for (std::vector<MSLane*>::const_iterator i = allowed->begin(); i != allowed->end(); ++i) { if ((*i)->getVehicleNumber() < noCars) { res = (*i); noCars = (*i)->getVehicleNumber(); } } } return res; } MSLane* MSEdge::getDepartLane(const MSVehicle& veh) const { switch (veh.getParameter().departLaneProcedure) { case DEPART_LANE_GIVEN: if ((int) myLanes->size() <= veh.getParameter().departLane || !(*myLanes)[veh.getParameter().departLane]->allowsVehicleClass(veh.getVehicleType().getVehicleClass())) { return 0; } return (*myLanes)[veh.getParameter().departLane]; case DEPART_LANE_RANDOM: return RandHelper::getRandomFrom(*allowedLanes(veh.getVehicleType().getVehicleClass())); case DEPART_LANE_FREE: return getFreeLane(0, veh.getVehicleType().getVehicleClass()); case DEPART_LANE_ALLOWED_FREE: if (veh.getRoute().size() == 1) { return getFreeLane(0, veh.getVehicleType().getVehicleClass()); } else { return getFreeLane(allowedLanes(**(veh.getRoute().begin() + 1)), veh.getVehicleType().getVehicleClass()); } case DEPART_LANE_BEST_FREE: { const std::vector<MSVehicle::LaneQ>& bl = veh.getBestLanes(false, (*myLanes)[0]); SUMOReal bestLength = -1; for (std::vector<MSVehicle::LaneQ>::const_iterator i = bl.begin(); i != bl.end(); ++i) { if ((*i).length > bestLength) { bestLength = (*i).length; } } std::vector<MSLane*>* bestLanes = new std::vector<MSLane*>(); for (std::vector<MSVehicle::LaneQ>::const_iterator i = bl.begin(); i != bl.end(); ++i) { if ((*i).length == bestLength) { bestLanes->push_back((*i).lane); } } MSLane* ret = getFreeLane(bestLanes, veh.getVehicleType().getVehicleClass()); delete bestLanes; return ret; } case DEPART_LANE_DEFAULT: default: break; } if (!(*myLanes)[0]->allowsVehicleClass(veh.getVehicleType().getVehicleClass())) { return 0; } return (*myLanes)[0]; } bool MSEdge::insertVehicle(SUMOVehicle& v, SUMOTime time) const { // when vaporizing, no vehicles are inserted... if (isVaporizing()) { return false; } #ifdef HAVE_INTERNAL if (MSGlobals::gUseMesoSim) { const SUMOVehicleParameter& pars = v.getParameter(); SUMOReal pos = 0.0; switch (pars.departPosProcedure) { case DEPART_POS_GIVEN:<|fim▁hole|> pos = pars.departPos; } else { pos = pars.departPos + getLength(); } if (pos < 0 || pos > getLength()) { WRITE_WARNING("Invalid departPos " + toString(pos) + " given for vehicle '" + v.getID() + "'. Inserting at lane end instead."); pos = getLength(); } break; case DEPART_POS_RANDOM: case DEPART_POS_RANDOM_FREE: pos = RandHelper::rand(getLength()); break; default: break; } bool result = false; MESegment* segment = MSGlobals::gMesoNet->getSegmentForEdge(*this, pos); MEVehicle* veh = static_cast<MEVehicle*>(&v); if (pars.departPosProcedure == DEPART_POS_FREE) { while (segment != 0 && !result) { result = segment->initialise(veh, time); segment = segment->getNextSegment(); } } else { result = segment->initialise(veh, time); } return result; } #else UNUSED_PARAMETER(time); #endif MSLane* insertionLane = getDepartLane(static_cast<MSVehicle&>(v)); return insertionLane != 0 && insertionLane->insertVehicle(static_cast<MSVehicle&>(v)); } void MSEdge::changeLanes(SUMOTime t) { if (myFunction == EDGEFUNCTION_INTERNAL) { return; } assert(myLaneChanger != 0); myLaneChanger->laneChange(t); } #ifdef HAVE_INTERNAL_LANES const MSEdge* MSEdge::getInternalFollowingEdge(MSEdge* followerAfterInternal) const { //@todo to be optimized for (std::vector<MSLane*>::const_iterator i = myLanes->begin(); i != myLanes->end(); ++i) { MSLane* l = *i; const MSLinkCont& lc = l->getLinkCont(); for (MSLinkCont::const_iterator j = lc.begin(); j != lc.end(); ++j) { MSLink* link = *j; if (&link->getLane()->getEdge() == followerAfterInternal) { if (link->getViaLane() != 0) { return &link->getViaLane()->getEdge(); } else { return 0; // network without internal links } } } } return 0; } #endif SUMOReal MSEdge::getCurrentTravelTime(SUMOReal minSpeed) const { assert(minSpeed > 0); SUMOReal v = 0; #ifdef HAVE_INTERNAL if (MSGlobals::gUseMesoSim) { MESegment* first = MSGlobals::gMesoNet->getSegmentForEdge(*this); unsigned segments = 0; do { v += first->getMeanSpeed(); first = first->getNextSegment(); segments++; } while (first != 0); v /= (SUMOReal) segments; } else { #endif for (std::vector<MSLane*>::iterator i = myLanes->begin(); i != myLanes->end(); ++i) { v += (*i)->getMeanSpeed(); } v /= (SUMOReal) myLanes->size(); #ifdef HAVE_INTERNAL } #endif v = MAX2(minSpeed, v); return getLength() / v; } bool MSEdge::dictionary(const std::string& id, MSEdge* ptr) { DictType::iterator it = myDict.find(id); if (it == myDict.end()) { // id not in myDict. myDict[id] = ptr; if (ptr->getNumericalID() != -1) { while ((int)myEdges.size() < ptr->getNumericalID() + 1) { myEdges.push_back(0); } myEdges[ptr->getNumericalID()] = ptr; } return true; } return false; } MSEdge* MSEdge::dictionary(const std::string& id) { DictType::iterator it = myDict.find(id); if (it == myDict.end()) { // id not in myDict. return 0; } return it->second; } MSEdge* MSEdge::dictionary(size_t id) { assert(myEdges.size() > id); return myEdges[id]; } size_t MSEdge::dictSize() { return myDict.size(); } size_t MSEdge::numericalDictSize() { return myEdges.size(); } void MSEdge::clear() { for (DictType::iterator i = myDict.begin(); i != myDict.end(); ++i) { delete(*i).second; } myDict.clear(); } void MSEdge::insertIDs(std::vector<std::string>& into) { for (DictType::iterator i = myDict.begin(); i != myDict.end(); ++i) { into.push_back((*i).first); } } void MSEdge::parseEdgesList(const std::string& desc, std::vector<const MSEdge*>& into, const std::string& rid) { if (desc[0] == BinaryFormatter::BF_ROUTE) { std::istringstream in(desc, std::ios::binary); char c; in >> c; FileHelpers::readEdgeVector(in, into, rid); } else { StringTokenizer st(desc); parseEdgesList(st.getVector(), into, rid); } } void MSEdge::parseEdgesList(const std::vector<std::string>& desc, std::vector<const MSEdge*>& into, const std::string& rid) { for (std::vector<std::string>::const_iterator i = desc.begin(); i != desc.end(); ++i) { const MSEdge* edge = MSEdge::dictionary(*i); // check whether the edge exists if (edge == 0) { throw ProcessError("The edge '" + *i + "' within the route " + rid + " is not known." + "\n The route can not be build."); } into.push_back(edge); } } SUMOReal MSEdge::getDistanceTo(const MSEdge* other) const { if (getLanes().size() > 0 && other->getLanes().size() > 0) { return getLanes()[0]->getShape()[-1].distanceTo2D(other->getLanes()[0]->getShape()[0]); } else { return 0; // optimism is just right for astar } } SUMOReal MSEdge::getLength() const { return getLanes()[0]->getLength(); } SUMOReal MSEdge::getSpeedLimit() const { // @note lanes might have different maximum speeds in theory return getLanes()[0]->getSpeedLimit(); } SUMOReal MSEdge::getVehicleMaxSpeed(const SUMOVehicle* const veh) const { // @note lanes might have different maximum speeds in theory return getLanes()[0]->getVehicleMaxSpeed(veh); } /****************************************************************************/<|fim▁end|>
if (pars.departPos >= 0.) {
<|file_name|>ExceptionTable.java<|end_file_name|><|fim▁begin|>/* * Copyright 2000-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * */ package benchmark.bcel.classfile; import java.io.DataInputStream; import java.io.DataOutputStream; import java.io.IOException; import benchmark.bcel.Constants; /** * This class represents the table of exceptions that are thrown by a * method. This attribute may be used once per method. The name of * this class is <em>ExceptionTable</em> for historical reasons; The * Java Virtual Machine Specification, Second Edition defines this * attribute using the name <em>Exceptions</em> (which is inconsistent * with the other classes). * * @version $Id: ExceptionTable.java 386056 2006-03-15 11:31:56Z tcurdt $ * @author <A HREF="mailto:[email protected]">M. Dahm</A> * @see Code */ public final class ExceptionTable extends Attribute { private int number_of_exceptions; // Table of indices into private int[] exception_index_table; // constant pool /** * Initialize from another object. Note that both objects use the same * references (shallow copy). Use copy() for a physical copy. */ public ExceptionTable(ExceptionTable c) { this(c.getNameIndex(), c.getLength(), c.getExceptionIndexTable(), c.getConstantPool()); } /** * @param name_index Index in constant pool * @param length Content length in bytes * @param exception_index_table Table of indices in constant pool * @param constant_pool Array of constants */ public ExceptionTable(int name_index, int length, int[] exception_index_table, ConstantPool constant_pool) { super(Constants.ATTR_EXCEPTIONS, name_index, length, constant_pool); setExceptionIndexTable(exception_index_table); } /** * Construct object from file stream. * @param name_index Index in constant pool * @param length Content length in bytes * @param file Input stream * @param constant_pool Array of constants * @throws IOException */ ExceptionTable(int name_index, int length, DataInputStream file, ConstantPool constant_pool) throws IOException { this(name_index, length, (int[]) null, constant_pool); number_of_exceptions = file.readUnsignedShort(); exception_index_table = new int[number_of_exceptions]; for (int i = 0; i < number_of_exceptions; i++) { exception_index_table[i] = file.readUnsignedShort(); } } <|fim▁hole|> * Called by objects that are traversing the nodes of the tree implicitely * defined by the contents of a Java class. I.e., the hierarchy of methods, * fields, attributes, etc. spawns a tree of objects. * * @param v Visitor object */ public void accept( Visitor v ) { v.visitExceptionTable(this); } /** * Dump exceptions attribute to file stream in binary format. * * @param file Output file stream * @throws IOException */ public final void dump( DataOutputStream file ) throws IOException { super.dump(file); file.writeShort(number_of_exceptions); for (int i = 0; i < number_of_exceptions; i++) { file.writeShort(exception_index_table[i]); } } /** * @return Array of indices into constant pool of thrown exceptions. */ public final int[] getExceptionIndexTable() { return exception_index_table; } /** * @return Length of exception table. */ public final int getNumberOfExceptions() { return number_of_exceptions; } /** * @return class names of thrown exceptions */ public final String[] getExceptionNames() { String[] names = new String[number_of_exceptions]; for (int i = 0; i < number_of_exceptions; i++) { names[i] = constant_pool.getConstantString(exception_index_table[i], Constants.CONSTANT_Class).replace('/', '.'); } return names; } /** * @param exception_index_table the list of exception indexes * Also redefines number_of_exceptions according to table length. */ public final void setExceptionIndexTable( int[] exception_index_table ) { this.exception_index_table = exception_index_table; number_of_exceptions = (exception_index_table == null) ? 0 : exception_index_table.length; } /** * @return String representation, i.e., a list of thrown exceptions. */ public final String toString() { StringBuffer buf = new StringBuffer(""); String str; for (int i = 0; i < number_of_exceptions; i++) { str = constant_pool.getConstantString(exception_index_table[i], Constants.CONSTANT_Class); buf.append(Utility.compactClassName(str, false)); if (i < number_of_exceptions - 1) { buf.append(", "); } } return buf.toString(); } /** * @return deep copy of this attribute */ public Attribute copy( ConstantPool _constant_pool ) { ExceptionTable c = (ExceptionTable) clone(); if (exception_index_table != null) { c.exception_index_table = new int[exception_index_table.length]; System.arraycopy(exception_index_table, 0, c.exception_index_table, 0, exception_index_table.length); } c.constant_pool = _constant_pool; return c; } }<|fim▁end|>
/**
<|file_name|>cover.py<|end_file_name|><|fim▁begin|>"""Support for RFXtrx covers.""" import logging from homeassistant.components.cover import CoverEntity from homeassistant.const import CONF_DEVICES, STATE_OPEN from homeassistant.core import callback from . import ( CONF_AUTOMATIC_ADD, CONF_DATA_BITS, CONF_SIGNAL_REPETITIONS, DEFAULT_SIGNAL_REPETITIONS, SIGNAL_EVENT, RfxtrxCommandEntity,<|fim▁hole|> get_device_id, get_rfx_object, ) from .const import COMMAND_OFF_LIST, COMMAND_ON_LIST _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass, config_entry, async_add_entities, ): """Set up config entry.""" discovery_info = config_entry.data device_ids = set() def supported(event): return event.device.known_to_be_rollershutter entities = [] for packet_id, entity_info in discovery_info[CONF_DEVICES].items(): event = get_rfx_object(packet_id) if event is None: _LOGGER.error("Invalid device: %s", packet_id) continue if not supported(event): continue device_id = get_device_id( event.device, data_bits=entity_info.get(CONF_DATA_BITS) ) if device_id in device_ids: continue device_ids.add(device_id) entity = RfxtrxCover( event.device, device_id, entity_info[CONF_SIGNAL_REPETITIONS] ) entities.append(entity) async_add_entities(entities) @callback def cover_update(event, device_id): """Handle cover updates from the RFXtrx gateway.""" if not supported(event): return if device_id in device_ids: return device_ids.add(device_id) _LOGGER.info( "Added cover (Device ID: %s Class: %s Sub: %s, Event: %s)", event.device.id_string.lower(), event.device.__class__.__name__, event.device.subtype, "".join(f"{x:02x}" for x in event.data), ) entity = RfxtrxCover( event.device, device_id, DEFAULT_SIGNAL_REPETITIONS, event=event ) async_add_entities([entity]) # Subscribe to main RFXtrx events if discovery_info[CONF_AUTOMATIC_ADD]: hass.helpers.dispatcher.async_dispatcher_connect(SIGNAL_EVENT, cover_update) class RfxtrxCover(RfxtrxCommandEntity, CoverEntity): """Representation of a RFXtrx cover.""" async def async_added_to_hass(self): """Restore device state.""" await super().async_added_to_hass() if self._event is None: old_state = await self.async_get_last_state() if old_state is not None: self._state = old_state.state == STATE_OPEN @property def is_closed(self): """Return if the cover is closed.""" return not self._state async def async_open_cover(self, **kwargs): """Move the cover up.""" await self._async_send(self._device.send_open) self._state = True self.async_write_ha_state() async def async_close_cover(self, **kwargs): """Move the cover down.""" await self._async_send(self._device.send_close) self._state = False self.async_write_ha_state() async def async_stop_cover(self, **kwargs): """Stop the cover.""" await self._async_send(self._device.send_stop) self._state = True self.async_write_ha_state() def _apply_event(self, event): """Apply command from rfxtrx.""" super()._apply_event(event) if event.values["Command"] in COMMAND_ON_LIST: self._state = True elif event.values["Command"] in COMMAND_OFF_LIST: self._state = False @callback def _handle_event(self, event, device_id): """Check if event applies to me and update.""" if device_id != self._device_id: return self._apply_event(event) self.async_write_ha_state()<|fim▁end|>
<|file_name|>PageDomain.java<|end_file_name|><|fim▁begin|>package com.zlwh.hands.api.domain.base; public class PageDomain { private String pageNo; private String pageSize = "15"; private long pageTime; // 上次刷新时间,分页查询时,防止分页数据错乱 public String getPageNo() { return pageNo; } public void setPageNo(String pageNo) { this.pageNo = pageNo; } public String getPageSize() { return pageSize; } public void setPageSize(String pageSize) {<|fim▁hole|> } public long getPageTime() { return pageTime; } public void setPageTime(long pageTime) { this.pageTime = pageTime; } }<|fim▁end|>
this.pageSize = pageSize;
<|file_name|>main.js<|end_file_name|><|fim▁begin|><|fim▁hole|>jQuery(function($) { //smooth scroll $('.navbar-nav > li.anchor').click(function(event) { //event.preventDefault(); var target = $(this).find('>a').prop('hash'); $('#navbar .active').removeClass('active'); $(this).addClass('active'); $('html, body').animate({ scrollTop: $(target).offset().top }, 500); }); //scrollspy $('[data-spy="scroll"]').each(function () { var $spy = $(this).scrollspy('refresh') }); $(function() { $(".navbar-btn").click(function() { $("#options").toggle(); $(".navbar-btn .glyphicon").toggleClass("glyphicon-plus") .toggleClass("glyphicon-minus"); }); }); });<|fim▁end|>
<|file_name|>dispatcher.rs<|end_file_name|><|fim▁begin|>/* Copyright 2017 Jinjing Wang This file is part of mtcp. mtcp is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. mtcp is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with mtcp. If not, see <http://www.gnu.org/licenses/>. */ use std::sync::mpsc; use constant::*; use structure::socket::udp::*; use structure::socket::tcp::*; use structure::socket::packet::*; pub fn dispatch ( tun_in_receiver: TunReceiver, udp_sender: Option<mpsc::Sender<UDP>>, tcp_sender: Option<mpsc::Sender<TCP>>, ) { while let Ok(Some(received)) = tun_in_receiver.recv() { match parse_packet(&received) { Some(Packet::UDP(udp)) => { // debug!("Dispatch UDP: {:#?}", udp.connection); match udp_sender { None => {} Some(ref sender) => { let _ = sender.send(udp); } }<|fim▁hole|> match tcp_sender { None => {} Some(ref sender) => { let _ = sender.send(tcp); } } } _ => {} } } } // fn skip_tun_incoming(connection: Connection) -> bool { // let tun_ip: IpAddr = IpAddr::from_str("10.0.0.1").unwrap(); // let source_ip = connection.source.ip(); // let destination_ip = connection.destination.ip(); // debug!("comparing {:#?} -> {:#?}, {:#?}", source_ip, destination_ip, tun_ip); // (source_ip == tun_ip) || (destination_ip == tun_ip); // false // }<|fim▁end|>
} Some(Packet::TCP(tcp)) => { // debug!("Dispatch TCP: {:#?}", tcp.connection);
<|file_name|>plot1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 #!/usr/bin/python # https://en.wikipedia.org/wiki/Matplotlib<|fim▁hole|> import numpy import matplotlib.pyplot as plt from numpy.random import rand a = rand(100) b = rand(100) plt.scatter(a, b) plt.show()<|fim▁end|>
<|file_name|>animation.py<|end_file_name|><|fim▁begin|>import pygame class Animation: def __init__(self, sheet, seq): #Attributes self.sheet = sheet self.length = seq[0] self.delay = seq[1] self.x = seq[2] self.y = seq[3] self.w = seq[4] self.h = seq[5] self.step = 0 self.tick = 0 self.curX = self.x<|fim▁hole|> def draw(self, screen, dest): screen.blit(self.sheet, dest, pygame.Rect(self.curX, self.y, self.w, self.h)) if self.tick == self.delay: self.tick = 0 self.step += 1 if self.step < self.length: self.curX += self.w else: self.step = 0 self.curX = self.x else: self.tick += 1<|fim▁end|>
<|file_name|>cookie-monster-spec.js<|end_file_name|><|fim▁begin|>var cookie = require('../index'); chai.should(); describe('cookie monster', function() { it('sets a cookie', function (){ cookie.setItem('cookieKey', 'cookieVal'); document.cookie.should.contain('cookieKey=cookieVal'); }); it('gets a cookie', function (){ document.cookie = 'dumby=mcdumberson;'; cookie.getItem('dumby').should.equal('mcdumberson'); }); it('sets and gets cookie with `=` in value', function (){ cookie.setItem('key', 'val=ue'); cookie.getItem('key').should.equal('val=ue'); }); it('removes a cookie', function (){ document.cookie = 'dumby=mcdumberson;'; document.cookie.should.contain('dumby=mcdumberson'); cookie.removeItem('dumby'); document.cookie.should.not.contain('dumby=mcdumberson'); }); <|fim▁hole|> for (var i = 0; i++; i < 30){ cookie.setItem('key' + i, 'value' + i); } for (var i = 0; i++; i < 30){ cookie.getItem('key' + i).should.equal('value' + i); } cookie.clear(); document.cookie.should.equal(''); }); });<|fim▁end|>
it('sets 30 cookies and clears all of them', function (){
<|file_name|>config.py<|end_file_name|><|fim▁begin|>from yaml import load from os import environ from os.path import join, isfile from ..module_ultra_repo import ModuleUltraRepo from ..module_ultra_config import ModuleUltraConfig class RepoDaemonConfig: """Represent a MU repo to the MU daemon.""" def __init__(self, **kwargs): self.repo_name = kwargs['repo_name'] self.repo_path = kwargs['repo_path'] self.pipelines = kwargs['pipelines'] def get_repo(self): """Return the MU repo that this represents.""" return ModuleUltraRepo(self.repo_path) def get_pipeline_list(self): """Return a list of (pipe_name, version).""" return [(pipe['name'], pipe['version']) for pipe in self.pipelines] def get_pipeline_tolerance(self, pipe_name): """Return tolerance for the pipeline.""" for pipe in self.pipelines: if pipe['name'] == pipe_name: return pipe.get('tolerance', 0) def get_pipeline_endpts(self, pipe_name): """Return a list of endpts or None.""" return None def get_pipeline_excluded_endpts(self, pipe_name): """Return a list of excluded endpts or None.""" return None class DaemonConfig: """Store config information for the MU daemon.""" def __init__(self, repos, total_jobs=10, run_local=True, pipeline_configs={}): self.repos = repos self.total_jobs = int(total_jobs) self.run_local = run_local self.pipeline_configs = pipeline_configs def list_repos(self): """Return a list of RepoDaemonConfigs.""" repo_configs = [] for repo_name, repo_path, pipelines in self.repos: repo_configs.append(RepoDaemonConfig(**{ 'repo_name': repo_name,<|fim▁hole|> })) return repo_configs def get_pipeline_run_config(self, pipe_name, pipe_version): """Return a filepath for the config to be used or None.""" return None @classmethod def get_daemon_config_filename(ctype): try: return environ['MODULE_ULTRA_DAEMON_CONFIG'] except KeyError: config_dir = ModuleUltraConfig.getConfigDir() config_filename = join(config_dir, 'daemon_config.yaml') if isfile(config_filename): return config_filename assert False, "No daemon config found" @classmethod def load_from_yaml(ctype, yaml_filename=None): yaml_filename = yaml_filename if yaml_filename else ctype.get_daemon_config_filename() raw_config = load(open(yaml_filename)) raw_repos = raw_config['repos'] repo_list = [ (raw_repo['name'], raw_repo['path'], raw_repo['pipelines']) for raw_repo in raw_repos ] return DaemonConfig( repo_list, total_jobs=raw_config.get('num_jobs', 10), run_local=raw_config.get('run_on_cluster', True), pipeline_configs=raw_config.get('pipeline_configs', {}) )<|fim▁end|>
'repo_path': repo_path, 'pipelines': pipelines,
<|file_name|>grad_multiply.py<|end_file_name|><|fim▁begin|># Copyright (c) 2017-present, Facebook, Inc. # All rights reserved. # # This source code is licensed under the license found in the LICENSE file in # the root directory of this source tree. An additional grant of patent rights<|fim▁hole|># can be found in the PATENTS file in the same directory. import torch class GradMultiply(torch.autograd.Function): @staticmethod def forward(ctx, x, scale): ctx.scale = scale res = x.new(x) return res @staticmethod def backward(ctx, grad): return grad * ctx.scale, None<|fim▁end|>
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import binascii import hashlib from reversecoin.bitcoin.key import CKey as Key from reversecoin.bitcoin.base58 import encode, decode def myhash(s): return hashlib.sha256(hashlib.sha256(s).digest()).digest() def myhash160(s): h = hashlib.new('ripemd160') h.update(hashlib.sha256(s).digest()) return h.digest() def getnewaddress(): # Generate public and private keys key = Key() key.generate() key.set_compressed(True) private_key = key.get_privkey() public_key = key.get_pubkey() private_key_hex = private_key.encode('hex') public_key_hex = public_key.encode('hex') public_key_bytearray = bytearray.fromhex(public_key_hex) # Perform SHA-256 and RIPEMD-160 hashing on public key hash160_address = myhash160(public_key_bytearray) # add version byte: 0x00 for Main Network extended_address = '\x00' + hash160_address # generate double SHA-256 hash of extended address<|fim▁hole|> # Take the first 4 bytes of the second SHA-256 hash. This is the address checksum checksum = hash_address[:4] # Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address. binary_address = extended_address + checksum # Convert the result from a byte string into a base58 string using Base58Check encoding. address = encode(binary_address) return public_key, private_key, address def public_key_to_address(public_key): public_key_hex = public_key.encode('hex') public_key_bytearray = bytearray.fromhex(public_key_hex) # Perform SHA-256 and RIPEMD-160 hashing on public key hash160_address = myhash160(public_key_bytearray) # add version byte: 0x00 for Main Network extended_address = '\x00' + hash160_address # generate double SHA-256 hash of extended address hash_address = myhash(extended_address) # Take the first 4 bytes of the second SHA-256 hash. This is the address checksum checksum = hash_address[:4] # Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address. binary_address = extended_address + checksum address = encode(binary_address) return address def public_key_hex_to_address(public_key_hex): public_key_bytearray = bytearray.fromhex(public_key_hex) # Perform SHA-256 and RIPEMD-160 hashing on public key hash160_address = myhash160(public_key_bytearray) # add version byte: 0x00 for Main Network extended_address = '\x00' + hash160_address # generate double SHA-256 hash of extended address hash_address = myhash(extended_address) # Take the first 4 bytes of the second SHA-256 hash. This is the address checksum checksum = hash_address[:4] # Add the 4 checksum bytes from point 7 at the end of extended RIPEMD-160 hash from point 4. This is the 25-byte binary Bitcoin Address. binary_address = extended_address + checksum address = encode(binary_address) return address # fix this def address_to_public_key_hash(address): binary_address = decode(address) # remove the 4 checksum bytes extended_address = binary_address[:-4] # remove version byte: 0x00 for Main Network hash160_address = extended_address[1:] return hash160_address def public_key_hex_to_pay_to_script_hash(public_key_hex): script = "41" + public_key_hex + "AC" return binascii.unhexlify(script) def address_to_pay_to_pubkey_hash(address): print "Not implemented >>>>>>>>>>>>>>>>>>>" exit(0) def output_script_to_public_key_hash(script): script_key_hash = binascii.hexlify(myhash160(bytearray.fromhex(binascii.hexlify(script[1:-1])))) return script_key_hash def address_to_output_script(address): pass if __name__ == "__main__": address1 = "16UwLL9Risc3QfPqBUvKofHmBQ7wMtjvM" address2 = "1A1zP1eP5QGefi2DMPTfTL5SLmv7DivfNa" public_key_hex1 = "0450863AD64A87AE8A2FE83C1AF1A8403CB53F53E486D8511DAD8A04887E5B23522CD470243453A299FA9E77237716103ABC11A1DF38855ED6F2EE187E9C582BA6" public_key_hex2 = "04678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4cef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5f" print "address: ", address1 print "public key_hex: ", public_key_hex1 #print "public_keys_hex: ", public_key_hex1, public_key_hex2 print "public key to address: ", public_key_hex_to_address(public_key_hex1) print "address to public key hash: ", binascii.hexlify(address_to_public_key_hash(address1)) # print "public key hash: ", binascii.hexlify(myhash160(bytearray.fromhex(public_key_hex1)))<|fim▁end|>
hash_address = myhash(extended_address)
<|file_name|>lfm_service.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright (C) 2016 Taifxx # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ########## LAUNCHING FROM MEMORY SERVICE: ### Import modules ... import context_ex as context import context as cstart import deb ### Define ... ITD_FILE = cstart.ITD_FILE error_file = cstart.error_file adnname = context.tl(context.TAG_TTL_NM) % (context.addon.name) ### Messages ... msgStrat = 'Launching from memory service started ...' msgEnd = 'Launching from memory service stopped ...' msgStratVisual = 'LFM service started' msgEndVisual = 'LFM service stopped' msgProcessError = 'Process ERROR' ### Base functions ... log = lambda event : context.xbmc.log('[%s] >> %s' % (context.addon.id, event)) def starter(): isRaise = False try: context.plgMain (importLI=ITD_FILE)<|fim▁hole|> context.DOS.delf(context.DOS.join(context.addon.profile, context.TAG_PAR_STRARTF)) context.GUI.msgf(adnname, msgProcessError, context.GUI.notError) deb.addraise(context.DOS.join(context.addon.profile, error_file)) isRaise = True finally: ## If user try double run ... if context.xbmcvfs.exists(context.DOS.join(context.addon.profile, ITD_FILE)): context.DOS.delf(context.DOS.join(context.addon.profile, ITD_FILE)) if isRaise : raise ### Main ... def service(externalAbort, report): ## Load monitor ... monitor = context.xbmc.Monitor() ## Log start ... log(msgStrat) if report : context.GUI.msg(adnname, msgStratVisual) ## Start service ... while not monitor.abortRequested(): ## Check starter ... if context.xbmcvfs.exists(context.DOS.join(context.addon.profile, ITD_FILE)) : starter() ## Check exit ... if monitor.waitForAbort(1) or externalAbort() : break ## End service (log end) ... del monitor log(msgEnd) context.GUI.msg(adnname, msgEndVisual)<|fim▁end|>
except Exception as exc:
<|file_name|>trumail.js<|end_file_name|><|fim▁begin|>$('.ui.dropdown').dropdown(); $(document).ready(function() { $('#test-form').on('submit', function(e) { e.preventDefault(); var format = document.getElementsByName('test-format')[0].value; var email = document.getElementsByName('test-email')[0].value; // Verify the parameters were passed if (format === '' || email === '') { return; }<|fim▁hole|> xhr.open('GET', 'https://trumail.io/' + format + '/' + email, true); xhr.onload = function(e) { var results; if (format === 'json') { results = vkbeautify.json(xhr.responseText, 4); } if (format === 'xml') { results = vkbeautify.xml(xhr.responseText, 4); } document.getElementsByName('test-results')[0].textContent = results; $('.ui.modal').modal({ closable: false, transition: 'flip vertical' }).modal('show'); $('#test-button').removeClass('loading'); }; xhr.onerror = function(e) { console.error(xhr.statusText); }; xhr.send(null); }); });<|fim▁end|>
$('#test-button').addClass('loading'); var xhr = new XMLHttpRequest();
<|file_name|>serve.go<|end_file_name|><|fim▁begin|>// FUSE service loop, for servers that wish to use it. package fs // import "github.com/muthu-r/horcrux/bazil-fuse/fuse/fs" import ( "encoding/binary" "fmt" "hash/fnv" "io" "reflect" "runtime" "strings" "sync" "time" "golang.org/x/net/context" log "github.com/Sirupsen/logrus" ) import ( "bytes" "github.com/muthu-r/horcrux/bazil-fuse/fuse" "github.com/muthu-r/horcrux/bazil-fuse/fuse/fuseutil" ) const ( attrValidTime = 1 * time.Minute //RMK entryValidTime = 1 * time.Minute //RMK ) // TODO: FINISH DOCS // An FS is the interface required of a file system. // // Other FUSE requests can be handled by implementing methods from the // FS* interfaces, for example FSStatfser. type FS interface { // Root is called to obtain the Node for the file system root. Root() (Node, error) } type FSStatfser interface { // Statfs is called to obtain file system metadata. // It should write that data to resp. Statfs(ctx context.Context, req *fuse.StatfsRequest, resp *fuse.StatfsResponse) error } type FSDestroyer interface { // Destroy is called when the file system is shutting down. // // Linux only sends this request for block device backed (fuseblk) // filesystems, to allow them to flush writes to disk before the // unmount completes. Destroy() } type FSInodeGenerator interface { // GenerateInode is called to pick a dynamic inode number when it // would otherwise be 0. // // Not all filesystems bother tracking inodes, but FUSE requires // the inode to be set, and fewer duplicates in general makes UNIX // tools work better. // // Operations where the nodes may return 0 inodes include Getattr, // Setattr and ReadDir. // // If FS does not implement FSInodeGenerator, GenerateDynamicInode // is used. // // Implementing this is useful to e.g. constrain the range of // inode values used for dynamic inodes. GenerateInode(parentInode uint64, name string) uint64 } // A Node is the interface required of a file or directory. // See the documentation for type FS for general information // pertaining to all methods. // // A Node must be usable as a map key, that is, it cannot be a // function, map or slice. // // Other FUSE requests can be handled by implementing methods from the // Node* interfaces, for example NodeOpener. // // Methods returning Node should take care to return the same Node // when the result is logically the same instance. Without this, each // Node will get a new NodeID, causing spurious cache invalidations, // extra lookups and aliasing anomalies. This may not matter for a // simple, read-only filesystem. type Node interface { // Attr fills attr with the standard metadata for the node. Attr(ctx context.Context, attr *fuse.Attr) error } type NodeGetattrer interface { // Getattr obtains the standard metadata for the receiver. // It should store that metadata in resp. // // If this method is not implemented, the attributes will be // generated based on Attr(), with zero values filled in. Getattr(ctx context.Context, req *fuse.GetattrRequest, resp *fuse.GetattrResponse) error } type NodeSetattrer interface { // Setattr sets the standard metadata for the receiver. // // Note, this is also used to communicate changes in the size of // the file. Not implementing Setattr causes writes to be unable // to grow the file (except with OpenDirectIO, which bypasses that // mechanism). // // req.Valid is a bitmask of what fields are actually being set. // For example, the method should not change the mode of the file // unless req.Valid.Mode() is true. Setattr(ctx context.Context, req *fuse.SetattrRequest, resp *fuse.SetattrResponse) error } type NodeSymlinker interface { // Symlink creates a new symbolic link in the receiver, which must be a directory. // // TODO is the above true about directories? Symlink(ctx context.Context, req *fuse.SymlinkRequest) (Node, error) } // This optional request will be called only for symbolic link nodes. type NodeReadlinker interface { // Readlink reads a symbolic link. Readlink(ctx context.Context, req *fuse.ReadlinkRequest) (string, error) } type NodeLinker interface { // Link creates a new directory entry in the receiver based on an // existing Node. Receiver must be a directory. Link(ctx context.Context, req *fuse.LinkRequest, old Node) (Node, error) } type NodeRemover interface { // Remove removes the entry with the given name from // the receiver, which must be a directory. The entry to be removed // may correspond to a file (unlink) or to a directory (rmdir). Remove(ctx context.Context, req *fuse.RemoveRequest) error } type NodeAccesser interface { // Access checks whether the calling context has permission for // the given operations on the receiver. If so, Access should // return nil. If not, Access should return EPERM. // // Note that this call affects the result of the access(2) system // call but not the open(2) system call. If Access is not // implemented, the Node behaves as if it always returns nil // (permission granted), relying on checks in Open instead. Access(ctx context.Context, req *fuse.AccessRequest) error } type NodeStringLookuper interface { // Lookup looks up a specific entry in the receiver, // which must be a directory. Lookup should return a Node // corresponding to the entry. If the name does not exist in // the directory, Lookup should return ENOENT. // // Lookup need not to handle the names "." and "..". Lookup(ctx context.Context, name string) (Node, error) } type NodeRequestLookuper interface { // Lookup looks up a specific entry in the receiver. // See NodeStringLookuper for more. Lookup(ctx context.Context, req *fuse.LookupRequest, resp *fuse.LookupResponse) (Node, error) } type NodeMkdirer interface { Mkdir(ctx context.Context, req *fuse.MkdirRequest) (Node, error) } type NodeOpener interface { // Open opens the receiver. After a successful open, a client // process has a file descriptor referring to this Handle. // // Open can also be also called on non-files. For example, // directories are Opened for ReadDir or fchdir(2). // // If this method is not implemented, the open will always // succeed, and the Node itself will be used as the Handle. // // XXX note about access. XXX OpenFlags. Open(ctx context.Context, req *fuse.OpenRequest, resp *fuse.OpenResponse) (Handle, error) } type NodeCreater interface { // Create creates a new directory entry in the receiver, which // must be a directory. Create(ctx context.Context, req *fuse.CreateRequest, resp *fuse.CreateResponse) (Node, Handle, error) } type NodeForgetter interface { // Forget about this node. This node will not receive further // method calls. // // Forget is not necessarily seen on unmount, as all nodes are // implicitly forgotten as part part of the unmount. Forget() } type NodeRenamer interface { Rename(ctx context.Context, req *fuse.RenameRequest, newDir Node) error } type NodeMknoder interface { Mknod(ctx context.Context, req *fuse.MknodRequest) (Node, error) } // TODO this should be on Handle not Node type NodeFsyncer interface { Fsync(ctx context.Context, req *fuse.FsyncRequest) error } type NodeGetxattrer interface { // Getxattr gets an extended attribute by the given name from the // node. // // If there is no xattr by that name, returns fuse.ErrNoXattr. Getxattr(ctx context.Context, req *fuse.GetxattrRequest, resp *fuse.GetxattrResponse) error } type NodeListxattrer interface { // Listxattr lists the extended attributes recorded for the node. Listxattr(ctx context.Context, req *fuse.ListxattrRequest, resp *fuse.ListxattrResponse) error } type NodeSetxattrer interface { // Setxattr sets an extended attribute with the given name and // value for the node. Setxattr(ctx context.Context, req *fuse.SetxattrRequest) error } type NodeRemovexattrer interface { // Removexattr removes an extended attribute for the name. // // If there is no xattr by that name, returns fuse.ErrNoXattr. Removexattr(ctx context.Context, req *fuse.RemovexattrRequest) error } var startTime = time.Now() func nodeAttr(ctx context.Context, n Node, attr *fuse.Attr) error { attr.Valid = attrValidTime attr.Nlink = 1 attr.Atime = startTime attr.Mtime = startTime attr.Ctime = startTime attr.Crtime = startTime if err := n.Attr(ctx, attr); err != nil { return err } return nil } // A Handle is the interface required of an opened file or directory. // See the documentation for type FS for general information // pertaining to all methods. // // Other FUSE requests can be handled by implementing methods from the // Handle* interfaces. The most common to implement are HandleReader, // HandleReadDirer, and HandleWriter. // // TODO implement methods: Getlk, Setlk, Setlkw type Handle interface { } type HandleFlusher interface { // Flush is called each time the file or directory is closed. // Because there can be multiple file descriptors referring to a // single opened file, Flush can be called multiple times. Flush(ctx context.Context, req *fuse.FlushRequest) error } type HandleReadAller interface { ReadAll(ctx context.Context) ([]byte, error) } type HandleReadDirAller interface { ReadDirAll(ctx context.Context) ([]fuse.Dirent, error) } type HandleReader interface { // Read requests to read data from the handle. // // There is a page cache in the kernel that normally submits only // page-aligned reads spanning one or more pages. However, you // should not rely on this. To see individual requests as // submitted by the file system clients, set OpenDirectIO. // // Note that reads beyond the size of the file as reported by Attr // are not even attempted (except in OpenDirectIO mode). Read(ctx context.Context, req *fuse.ReadRequest, resp *fuse.ReadResponse) error } type HandleWriter interface { // Write requests to write data into the handle. // // There is a writeback page cache in the kernel that normally submits // only page-aligned writes spanning one or more pages. However, // you should not rely on this. To see individual requests as // submitted by the file system clients, set OpenDirectIO. // // Note that file size changes are communicated through Setattr. // Writes beyond the size of the file as reported by Attr are not // even attempted (except in OpenDirectIO mode). Write(ctx context.Context, req *fuse.WriteRequest, resp *fuse.WriteResponse) error } type HandleReleaser interface { Release(ctx context.Context, req *fuse.ReleaseRequest) error } type Config struct { // Function to send debug log messages to. If nil, use fuse.Debug. // Note that changing this or fuse.Debug may not affect existing // calls to Serve. // // See fuse.Debug for the rules that log functions must follow. Debug func(msg interface{}) // Function to create new contexts. If nil, use // context.Background. // // Note that changing this may not affect existing calls to Serve. GetContext func() context.Context } // New returns a new FUSE server ready to serve this kernel FUSE // connection. // // Config may be nil. func New(conn *fuse.Conn, config *Config) *Server { s := &Server{ conn: conn, req: map[fuse.RequestID]*serveRequest{}, nodeRef: map[Node]fuse.NodeID{}, dynamicInode: GenerateDynamicInode, } if config != nil { s.debug = config.Debug s.context = config.GetContext } if s.debug == nil { s.debug = fuse.Debug } if s.context == nil { s.context = context.Background } return s } type Server struct { // set in New conn *fuse.Conn debug func(msg interface{}) context func() context.Context // set once at Serve time fs FS dynamicInode func(parent uint64, name string) uint64 // state, protected by meta meta sync.Mutex req map[fuse.RequestID]*serveRequest node []*serveNode nodeRef map[Node]fuse.NodeID handle []*serveHandle freeNode []fuse.NodeID freeHandle []fuse.HandleID nodeGen uint64 // Used to ensure worker goroutines finish before Serve returns wg sync.WaitGroup } // Serve serves the FUSE connection by making calls to the methods // of fs and the Nodes and Handles it makes available. It returns only // when the connection has been closed or an unexpected error occurs. func (s *Server) Serve(fs FS) error { defer s.wg.Wait() // Wait for worker goroutines to complete before return s.fs = fs if dyn, ok := fs.(FSInodeGenerator); ok { s.dynamicInode = dyn.GenerateInode } root, err := fs.Root() if err != nil { return fmt.Errorf("cannot obtain root node: %v", err) } // Recognize the root node if it's ever returned from Lookup, // passed to Invalidate, etc. s.nodeRef[root] = 1 s.node = append(s.node, nil, &serveNode{ inode: 1, generation: s.nodeGen, node: root, refs: 1, }) s.handle = append(s.handle, nil) for { req, err := s.conn.ReadRequest() if err != nil { if err == io.EOF { break } return err } s.wg.Add(1) go func() { defer s.wg.Done() s.serve(req) }() } return nil } // Serve serves a FUSE connection with the default settings. See // Server.Serve. func Serve(c *fuse.Conn, fs FS) error { server := New(c, nil) return server.Serve(fs) } type nothing struct{} type serveRequest struct { Request fuse.Request cancel func() } type serveNode struct { inode uint64 generation uint64 node Node refs uint64 // Delay freeing the NodeID until waitgroup is done. This allows // using the NodeID for short periods of time without holding the // Server.meta lock. // // Rules: // // - hold Server.meta while calling wg.Add, then unlock // - do NOT try to reacquire Server.meta wg sync.WaitGroup } func (sn *serveNode) attr(ctx context.Context, attr *fuse.Attr) error { err := nodeAttr(ctx, sn.node, attr) if attr.Inode == 0 { attr.Inode = sn.inode } return err } type serveHandle struct { handle Handle readData []byte nodeID fuse.NodeID } // NodeRef is deprecated. It remains here to decrease code churn on // FUSE library users. You may remove it from your program now; // returning the same Node values are now recognized automatically, // without needing NodeRef. type NodeRef struct{} func (c *Server) saveNode(inode uint64, node Node) (id fuse.NodeID, gen uint64) { c.meta.Lock() defer c.meta.Unlock() if id, ok := c.nodeRef[node]; ok { sn := c.node[id] sn.refs++ return id, sn.generation } sn := &serveNode{inode: inode, node: node, refs: 1} if n := len(c.freeNode); n > 0 { id = c.freeNode[n-1] c.freeNode = c.freeNode[:n-1] c.node[id] = sn c.nodeGen++ } else { id = fuse.NodeID(len(c.node)) c.node = append(c.node, sn) } sn.generation = c.nodeGen c.nodeRef[node] = id return id, sn.generation } func (c *Server) saveHandle(handle Handle, nodeID fuse.NodeID) (id fuse.HandleID) { c.meta.Lock() shandle := &serveHandle{handle: handle, nodeID: nodeID} if n := len(c.freeHandle); n > 0 { id = c.freeHandle[n-1] c.freeHandle = c.freeHandle[:n-1] c.handle[id] = shandle } else { id = fuse.HandleID(len(c.handle)) c.handle = append(c.handle, shandle) } c.meta.Unlock() return } type nodeRefcountDropBug struct { N uint64 Refs uint64 Node fuse.NodeID } func (n *nodeRefcountDropBug) String() string { return fmt.Sprintf("bug: trying to drop %d of %d references to %v", n.N, n.Refs, n.Node) } func (c *Server) dropNode(id fuse.NodeID, n uint64) (forget bool) { c.meta.Lock() defer c.meta.Unlock() snode := c.node[id] if snode == nil { // this should only happen if refcounts kernel<->us disagree // *and* two ForgetRequests for the same node race each other; // this indicates a bug somewhere c.debug(nodeRefcountDropBug{N: n, Node: id}) // we may end up triggering Forget twice, but that's better // than not even once, and that's the best we can do return true } if n > snode.refs { c.debug(nodeRefcountDropBug{N: n, Refs: snode.refs, Node: id}) n = snode.refs } snode.refs -= n if snode.refs == 0 { snode.wg.Wait() c.node[id] = nil delete(c.nodeRef, snode.node) c.freeNode = append(c.freeNode, id) return true } return false } func (c *Server) dropHandle(id fuse.HandleID) { c.meta.Lock() c.handle[id] = nil c.freeHandle = append(c.freeHandle, id) c.meta.Unlock() } type missingHandle struct { Handle fuse.HandleID MaxHandle fuse.HandleID } func (m missingHandle) String() string { return fmt.Sprint("missing handle: ", m.Handle, m.MaxHandle) } // Returns nil for invalid handles. func (c *Server) getHandle(id fuse.HandleID) (shandle *serveHandle) { c.meta.Lock() defer c.meta.Unlock() if id < fuse.HandleID(len(c.handle)) { shandle = c.handle[uint(id)] } if shandle == nil { c.debug(missingHandle{ Handle: id, MaxHandle: fuse.HandleID(len(c.handle)), }) } return } type request struct { Op string Request *fuse.Header In interface{} `json:",omitempty"` } func (r request) String() string { return fmt.Sprintf("<- %s", r.In) } type logResponseHeader struct { ID fuse.RequestID } func (m logResponseHeader) String() string { return fmt.Sprintf("ID=%v", m.ID) } type response struct { Op string Request logResponseHeader Out interface{} `json:",omitempty"` // Errno contains the errno value as a string, for example "EPERM". Errno string `json:",omitempty"` // Error may contain a free form error message. Error string `json:",omitempty"` } func (r response) errstr() string { s := r.Errno if r.Error != "" { // prefix the errno constant to the long form message s = s + ": " + r.Error } return s } func (r response) String() string { switch {<|fim▁hole|> case r.Errno != "": return fmt.Sprintf("-> [%v] %s error=%s", r.Request, r.Op, r.errstr()) case r.Out != nil: // make sure (seemingly) empty values are readable switch r.Out.(type) { case string: return fmt.Sprintf("-> [%v] %s %q", r.Request, r.Op, r.Out) case []byte: return fmt.Sprintf("-> [%v] %s [% x]", r.Request, r.Op, r.Out) default: return fmt.Sprintf("-> [%v] %v", r.Request, r.Out) } default: return fmt.Sprintf("-> [%v] %s", r.Request, r.Op) } } type notification struct { Op string Node fuse.NodeID Out interface{} `json:",omitempty"` Err string `json:",omitempty"` } func (n notification) String() string { var buf bytes.Buffer fmt.Fprintf(&buf, "=> %s %v", n.Op, n.Node) if n.Out != nil { // make sure (seemingly) empty values are readable switch n.Out.(type) { case string: fmt.Fprintf(&buf, " %q", n.Out) case []byte: fmt.Fprintf(&buf, " [% x]", n.Out) default: fmt.Fprintf(&buf, " %s", n.Out) } } if n.Err != "" { fmt.Fprintf(&buf, " Err:%v", n.Err) } return buf.String() } type logMissingNode struct { MaxNode fuse.NodeID } func opName(req fuse.Request) string { t := reflect.Indirect(reflect.ValueOf(req)).Type() s := t.Name() s = strings.TrimSuffix(s, "Request") return s } type logLinkRequestOldNodeNotFound struct { Request *fuse.Header In *fuse.LinkRequest } func (m *logLinkRequestOldNodeNotFound) String() string { return fmt.Sprintf("In LinkRequest (request %v), node %d not found", m.Request.Hdr().ID, m.In.OldNode) } type renameNewDirNodeNotFound struct { Request *fuse.Header In *fuse.RenameRequest } func (m *renameNewDirNodeNotFound) String() string { return fmt.Sprintf("In RenameRequest (request %v), node %d not found", m.Request.Hdr().ID, m.In.NewDir) } type handlerPanickedError struct { Request interface{} Err interface{} } var _ error = handlerPanickedError{} func (h handlerPanickedError) Error() string { return fmt.Sprintf("handler panicked: %v", h.Err) } var _ fuse.ErrorNumber = handlerPanickedError{} func (h handlerPanickedError) Errno() fuse.Errno { if err, ok := h.Err.(fuse.ErrorNumber); ok { return err.Errno() } return fuse.DefaultErrno } type handleNotReaderError struct { handle Handle } var _ error = handleNotReaderError{} func (e handleNotReaderError) Error() string { return fmt.Sprintf("handle has no Read: %T", e.handle) } var _ fuse.ErrorNumber = handleNotReaderError{} func (e handleNotReaderError) Errno() fuse.Errno { return fuse.ENOTSUP } func initLookupResponse(s *fuse.LookupResponse) { s.EntryValid = entryValidTime } func (c *Server) serve(r fuse.Request) { ctx, cancel := context.WithCancel(c.context()) defer cancel() req := &serveRequest{Request: r, cancel: cancel} c.debug(request{ Op: opName(r), Request: r.Hdr(), In: r, }) log.WithFields(log.Fields{"Op": opName(r), "Hdr": r.Hdr(), "Request": r}).Debug("Serve: ") var node Node var snode *serveNode c.meta.Lock() hdr := r.Hdr() if id := hdr.Node; id != 0 { if id < fuse.NodeID(len(c.node)) { snode = c.node[uint(id)] } if snode == nil { c.meta.Unlock() c.debug(response{ Op: opName(r), Request: logResponseHeader{ID: hdr.ID}, Error: fuse.ESTALE.ErrnoName(), // this is the only place that sets both Error and // Out; not sure if i want to do that; might get rid // of len(c.node) things altogether Out: logMissingNode{ MaxNode: fuse.NodeID(len(c.node)), }, }) r.RespondError(fuse.ESTALE) return } node = snode.node } if c.req[hdr.ID] != nil { // This happens with OSXFUSE. Assume it's okay and // that we'll never see an interrupt for this one. // Otherwise everything wedges. TODO: Report to OSXFUSE? // // TODO this might have been because of missing done() calls } else { c.req[hdr.ID] = req } c.meta.Unlock() // Call this before responding. // After responding is too late: we might get another request // with the same ID and be very confused. done := func(resp interface{}) { msg := response{ Op: opName(r), Request: logResponseHeader{ID: hdr.ID}, } if err, ok := resp.(error); ok { msg.Error = err.Error() if ferr, ok := err.(fuse.ErrorNumber); ok { errno := ferr.Errno() msg.Errno = errno.ErrnoName() if errno == err { // it's just a fuse.Errno with no extra detail; // skip the textual message for log readability msg.Error = "" } } else { msg.Errno = fuse.DefaultErrno.ErrnoName() } } else { msg.Out = resp } c.debug(msg) c.meta.Lock() delete(c.req, hdr.ID) c.meta.Unlock() } defer func() { if rec := recover(); rec != nil { const size = 1 << 16 buf := make([]byte, size) n := runtime.Stack(buf, false) buf = buf[:n] log.Printf("fuse: panic in handler for %v: %v\n%s", r, rec, buf) err := handlerPanickedError{ Request: r, Err: rec, } done(err) r.RespondError(err) } }() switch r := r.(type) { default: log.WithFields(log.Fields{"Request": r}).Info("Serve: Unknown request", r) // Note: To FUSE, ENOSYS means "this server never implements this request." // It would be inappropriate to return ENOSYS for other operations in this // switch that might only be unavailable in some contexts, not all. done(fuse.ENOSYS) r.RespondError(fuse.ENOSYS) case *fuse.StatfsRequest: log.Debug("Serve: StatfsRequest") s := &fuse.StatfsResponse{} if fs, ok := c.fs.(FSStatfser); ok { if err := fs.Statfs(ctx, r, s); err != nil { done(err) r.RespondError(err) break } } done(s) r.Respond(s) // Node operations. case *fuse.GetattrRequest: log.Debug("Serve: GetattrRequest") s := &fuse.GetattrResponse{} if n, ok := node.(NodeGetattrer); ok { if err := n.Getattr(ctx, r, s); err != nil { done(err) r.RespondError(err) break } } else { if err := snode.attr(ctx, &s.Attr); err != nil { done(err) r.RespondError(err) break } } done(s) r.Respond(s) case *fuse.SetattrRequest: log.Debug("Serve: SetattrRequest") s := &fuse.SetattrResponse{} if n, ok := node.(NodeSetattrer); ok { if err := n.Setattr(ctx, r, s); err != nil { done(err) r.RespondError(err) break } if err := snode.attr(ctx, &s.Attr); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) break } if err := snode.attr(ctx, &s.Attr); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) case *fuse.SymlinkRequest: log.Debug("Serve: SymlinkRequest") s := &fuse.SymlinkResponse{} initLookupResponse(&s.LookupResponse) n, ok := node.(NodeSymlinker) if !ok { done(fuse.EIO) // XXX or EPERM like Mkdir? r.RespondError(fuse.EIO) break } n2, err := n.Symlink(ctx, r) if err != nil { done(err) r.RespondError(err) break } if err := c.saveLookup(ctx, &s.LookupResponse, snode, r.NewName, n2); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) case *fuse.ReadlinkRequest: log.Debug("Serve: ReadlinkRequest") n, ok := node.(NodeReadlinker) if !ok { done(fuse.EIO) /// XXX or EPERM? r.RespondError(fuse.EIO) break } target, err := n.Readlink(ctx, r) if err != nil { done(err) r.RespondError(err) break } done(target) r.Respond(target) case *fuse.LinkRequest: log.Debug("Serve: LinkRequest") n, ok := node.(NodeLinker) if !ok { done(fuse.EIO) /// XXX or EPERM? r.RespondError(fuse.EIO) break } c.meta.Lock() var oldNode *serveNode if int(r.OldNode) < len(c.node) { oldNode = c.node[r.OldNode] } c.meta.Unlock() if oldNode == nil { c.debug(logLinkRequestOldNodeNotFound{ Request: r.Hdr(), In: r, }) done(fuse.EIO) r.RespondError(fuse.EIO) break } n2, err := n.Link(ctx, r, oldNode.node) if err != nil { done(err) r.RespondError(err) break } s := &fuse.LookupResponse{} initLookupResponse(s) if err := c.saveLookup(ctx, s, snode, r.NewName, n2); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) case *fuse.RemoveRequest: log.Debug("Serve: RemoveRequest") n, ok := node.(NodeRemover) if !ok { done(fuse.EIO) /// XXX or EPERM? r.RespondError(fuse.EIO) break } err := n.Remove(ctx, r) if err != nil { done(err) r.RespondError(err) break } done(nil) r.Respond() case *fuse.AccessRequest: log.Debug("Serve: AccessRequest") if n, ok := node.(NodeAccesser); ok { if err := n.Access(ctx, r); err != nil { done(err) r.RespondError(err) break } } done(nil) r.Respond() case *fuse.LookupRequest: log.Debug("Serve: LookupRequest") var n2 Node var err error s := &fuse.LookupResponse{} initLookupResponse(s) if n, ok := node.(NodeStringLookuper); ok { n2, err = n.Lookup(ctx, r.Name) } else if n, ok := node.(NodeRequestLookuper); ok { n2, err = n.Lookup(ctx, r, s) } else { done(fuse.ENOENT) r.RespondError(fuse.ENOENT) break } if err != nil { done(err) r.RespondError(err) break } if err := c.saveLookup(ctx, s, snode, r.Name, n2); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) case *fuse.MkdirRequest: log.Debug("Serve: MkdirRequest") s := &fuse.MkdirResponse{} initLookupResponse(&s.LookupResponse) n, ok := node.(NodeMkdirer) if !ok { done(fuse.EPERM) r.RespondError(fuse.EPERM) break } n2, err := n.Mkdir(ctx, r) if err != nil { done(err) r.RespondError(err) break } if err := c.saveLookup(ctx, &s.LookupResponse, snode, r.Name, n2); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) case *fuse.OpenRequest: log.Debug("Serve: OpenRequest") s := &fuse.OpenResponse{} var h2 Handle if n, ok := node.(NodeOpener); ok { hh, err := n.Open(ctx, r, s) if err != nil { done(err) r.RespondError(err) break } h2 = hh } else { log.Debug("Serve: OpenRequest: NodeOpener DONT EXIST") h2 = node } s.Handle = c.saveHandle(h2, hdr.Node) done(s) r.Respond(s) case *fuse.CreateRequest: log.Debug("Serve: CreateRequest") n, ok := node.(NodeCreater) if !ok { // If we send back ENOSYS, FUSE will try mknod+open. done(fuse.EPERM) r.RespondError(fuse.EPERM) break } s := &fuse.CreateResponse{OpenResponse: fuse.OpenResponse{}} initLookupResponse(&s.LookupResponse) n2, h2, err := n.Create(ctx, r, s) if err != nil { done(err) r.RespondError(err) break } if err := c.saveLookup(ctx, &s.LookupResponse, snode, r.Name, n2); err != nil { done(err) r.RespondError(err) break } s.Handle = c.saveHandle(h2, hdr.Node) done(s) r.Respond(s) case *fuse.GetxattrRequest: log.Debug("Serve: GetxattrRequest") n, ok := node.(NodeGetxattrer) if !ok { done(fuse.ENOTSUP) r.RespondError(fuse.ENOTSUP) break } s := &fuse.GetxattrResponse{} err := n.Getxattr(ctx, r, s) if err != nil { done(err) r.RespondError(err) break } if r.Size != 0 && uint64(len(s.Xattr)) > uint64(r.Size) { done(fuse.ERANGE) r.RespondError(fuse.ERANGE) break } done(s) r.Respond(s) case *fuse.ListxattrRequest: log.Debug("Serve: ListxattrRequest") n, ok := node.(NodeListxattrer) if !ok { done(fuse.ENOTSUP) r.RespondError(fuse.ENOTSUP) break } s := &fuse.ListxattrResponse{} err := n.Listxattr(ctx, r, s) if err != nil { done(err) r.RespondError(err) break } if r.Size != 0 && uint64(len(s.Xattr)) > uint64(r.Size) { done(fuse.ERANGE) r.RespondError(fuse.ERANGE) break } done(s) r.Respond(s) case *fuse.SetxattrRequest: log.Debug("Serve: SetxattrRequest") n, ok := node.(NodeSetxattrer) if !ok { done(fuse.ENOTSUP) r.RespondError(fuse.ENOTSUP) break } err := n.Setxattr(ctx, r) if err != nil { done(err) r.RespondError(err) break } done(nil) r.Respond() case *fuse.RemovexattrRequest: log.Debug("Serve: RemovexattrRequest") n, ok := node.(NodeRemovexattrer) if !ok { done(fuse.ENOTSUP) r.RespondError(fuse.ENOTSUP) break } err := n.Removexattr(ctx, r) if err != nil { done(err) r.RespondError(err) break } done(nil) r.Respond() case *fuse.ForgetRequest: log.Debug("Serve: ForgetRequest") forget := c.dropNode(hdr.Node, r.N) if forget { n, ok := node.(NodeForgetter) if ok { n.Forget() } } done(nil) r.Respond() // Handle operations. case *fuse.ReadRequest: log.Debug("Serve: ReadRequest") shandle := c.getHandle(r.Handle) if shandle == nil { done(fuse.ESTALE) r.RespondError(fuse.ESTALE) return } handle := shandle.handle s := &fuse.ReadResponse{Data: make([]byte, 0, r.Size)} if r.Dir { if h, ok := handle.(HandleReadDirAller); ok { if shandle.readData == nil { dirs, err := h.ReadDirAll(ctx) if err != nil { done(err) r.RespondError(err) break } var data []byte for _, dir := range dirs { if dir.Inode == 0 { dir.Inode = c.dynamicInode(snode.inode, dir.Name) } data = fuse.AppendDirent(data, dir) } shandle.readData = data } fuseutil.HandleRead(r, s, shandle.readData) done(s) r.Respond(s) break } } else { if h, ok := handle.(HandleReadAller); ok { if shandle.readData == nil { data, err := h.ReadAll(ctx) if err != nil { done(err) r.RespondError(err) break } if data == nil { data = []byte{} } shandle.readData = data } fuseutil.HandleRead(r, s, shandle.readData) done(s) r.Respond(s) break } h, ok := handle.(HandleReader) if !ok { err := handleNotReaderError{handle: handle} done(err) r.RespondError(err) break } if err := h.Read(ctx, r, s); err != nil { done(err) r.RespondError(err) break } } done(s) r.Respond(s) case *fuse.WriteRequest: log.Debug("Serve: WriteRequest") shandle := c.getHandle(r.Handle) if shandle == nil { done(fuse.ESTALE) r.RespondError(fuse.ESTALE) return } s := &fuse.WriteResponse{} if h, ok := shandle.handle.(HandleWriter); ok { if err := h.Write(ctx, r, s); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) break } done(fuse.EIO) r.RespondError(fuse.EIO) case *fuse.FlushRequest: log.Debug("Serve: FlushRequest") shandle := c.getHandle(r.Handle) if shandle == nil { done(fuse.ESTALE) r.RespondError(fuse.ESTALE) return } handle := shandle.handle if h, ok := handle.(HandleFlusher); ok { if err := h.Flush(ctx, r); err != nil { done(err) r.RespondError(err) break } } done(nil) r.Respond() case *fuse.ReleaseRequest: log.Debug("Serve: ReleaseRequest") shandle := c.getHandle(r.Handle) if shandle == nil { done(fuse.ESTALE) r.RespondError(fuse.ESTALE) return } handle := shandle.handle // No matter what, release the handle. c.dropHandle(r.Handle) if h, ok := handle.(HandleReleaser); ok { if err := h.Release(ctx, r); err != nil { done(err) r.RespondError(err) break } } done(nil) r.Respond() case *fuse.DestroyRequest: log.Debug("Serve: DestroyRequest") if fs, ok := c.fs.(FSDestroyer); ok { fs.Destroy() } done(nil) r.Respond() case *fuse.RenameRequest: log.Debug("Serve: RenameRequest") c.meta.Lock() var newDirNode *serveNode if int(r.NewDir) < len(c.node) { newDirNode = c.node[r.NewDir] } c.meta.Unlock() if newDirNode == nil { c.debug(renameNewDirNodeNotFound{ Request: r.Hdr(), In: r, }) done(fuse.EIO) r.RespondError(fuse.EIO) break } n, ok := node.(NodeRenamer) if !ok { done(fuse.EIO) // XXX or EPERM like Mkdir? r.RespondError(fuse.EIO) break } err := n.Rename(ctx, r, newDirNode.node) if err != nil { done(err) r.RespondError(err) break } done(nil) r.Respond() case *fuse.MknodRequest: log.Debug("Serve: MknodRequest") n, ok := node.(NodeMknoder) if !ok { done(fuse.EIO) r.RespondError(fuse.EIO) break } n2, err := n.Mknod(ctx, r) if err != nil { done(err) r.RespondError(err) break } s := &fuse.LookupResponse{} initLookupResponse(s) if err := c.saveLookup(ctx, s, snode, r.Name, n2); err != nil { done(err) r.RespondError(err) break } done(s) r.Respond(s) case *fuse.FsyncRequest: log.Debug("Serve: FsyncRequest") n, ok := node.(NodeFsyncer) if !ok { done(fuse.EIO) r.RespondError(fuse.EIO) break } err := n.Fsync(ctx, r) if err != nil { done(err) r.RespondError(err) break } done(nil) r.Respond() case *fuse.InterruptRequest: log.Debug("Serve: InterruptRequest") c.meta.Lock() ireq := c.req[r.IntrID] if ireq != nil && ireq.cancel != nil { ireq.cancel() ireq.cancel = nil } c.meta.Unlock() done(nil) r.Respond() /* case *FsyncdirRequest: done(ENOSYS) r.RespondError(ENOSYS) case *GetlkRequest, *SetlkRequest, *SetlkwRequest: done(ENOSYS) r.RespondError(ENOSYS) case *BmapRequest: done(ENOSYS) r.RespondError(ENOSYS) case *SetvolnameRequest, *GetxtimesRequest, *ExchangeRequest: done(ENOSYS) r.RespondError(ENOSYS) */ } } func (c *Server) saveLookup(ctx context.Context, s *fuse.LookupResponse, snode *serveNode, elem string, n2 Node) error { if err := nodeAttr(ctx, n2, &s.Attr); err != nil { return err } if s.Attr.Inode == 0 { s.Attr.Inode = c.dynamicInode(snode.inode, elem) } s.Node, s.Generation = c.saveNode(s.Attr.Inode, n2) return nil } type invalidateNodeDetail struct { Off int64 Size int64 } func (i invalidateNodeDetail) String() string { return fmt.Sprintf("Off:%d Size:%d", i.Off, i.Size) } func errstr(err error) string { if err == nil { return "" } return err.Error() } func (s *Server) invalidateNode(node Node, off int64, size int64) error { s.meta.Lock() id, ok := s.nodeRef[node] if ok { snode := s.node[id] snode.wg.Add(1) defer snode.wg.Done() } s.meta.Unlock() if !ok { // This is what the kernel would have said, if we had been // able to send this message; it's not cached. return fuse.ErrNotCached } // Delay logging until after we can record the error too. We // consider a /dev/fuse write to be instantaneous enough to not // need separate before and after messages. err := s.conn.InvalidateNode(id, off, size) s.debug(notification{ Op: "InvalidateNode", Node: id, Out: invalidateNodeDetail{ Off: off, Size: size, }, Err: errstr(err), }) return err } // InvalidateNodeAttr invalidates the kernel cache of the attributes // of node. // // Returns fuse.ErrNotCached if the kernel is not currently caching // the node. func (s *Server) InvalidateNodeAttr(node Node) error { return s.invalidateNode(node, 0, 0) } // InvalidateNodeData invalidates the kernel cache of the attributes // and data of node. // // Returns fuse.ErrNotCached if the kernel is not currently caching // the node. func (s *Server) InvalidateNodeData(node Node) error { return s.invalidateNode(node, 0, -1) } // InvalidateNodeDataRange invalidates the kernel cache of the // attributes and a range of the data of node. // // Returns fuse.ErrNotCached if the kernel is not currently caching // the node. func (s *Server) InvalidateNodeDataRange(node Node, off int64, size int64) error { return s.invalidateNode(node, off, size) } type invalidateEntryDetail struct { Name string } func (i invalidateEntryDetail) String() string { return fmt.Sprintf("%q", i.Name) } // InvalidateEntry invalidates the kernel cache of the directory entry // identified by parent node and entry basename. // // Kernel may or may not cache directory listings. To invalidate // those, use InvalidateNode to invalidate all of the data for a // directory. (As of 2015-06, Linux FUSE does not cache directory // listings.) // // Returns ErrNotCached if the kernel is not currently caching the // node. func (s *Server) InvalidateEntry(parent Node, name string) error { s.meta.Lock() id, ok := s.nodeRef[parent] if ok { snode := s.node[id] snode.wg.Add(1) defer snode.wg.Done() } s.meta.Unlock() if !ok { // This is what the kernel would have said, if we had been // able to send this message; it's not cached. return fuse.ErrNotCached } err := s.conn.InvalidateEntry(id, name) s.debug(notification{ Op: "InvalidateEntry", Node: id, Out: invalidateEntryDetail{ Name: name, }, Err: errstr(err), }) return err } // DataHandle returns a read-only Handle that satisfies reads // using the given data. func DataHandle(data []byte) Handle { return &dataHandle{data} } type dataHandle struct { data []byte } func (d *dataHandle) ReadAll(ctx context.Context) ([]byte, error) { return d.data, nil } // GenerateDynamicInode returns a dynamic inode. // // The parent inode and current entry name are used as the criteria // for choosing a pseudorandom inode. This makes it likely the same // entry will get the same inode on multiple runs. func GenerateDynamicInode(parent uint64, name string) uint64 { h := fnv.New64a() var buf [8]byte binary.LittleEndian.PutUint64(buf[:], parent) _, _ = h.Write(buf[:]) _, _ = h.Write([]byte(name)) var inode uint64 for { inode = h.Sum64() if inode != 0 { break } // there's a tiny probability that result is zero; change the // input a little and try again _, _ = h.Write([]byte{'x'}) } return inode }<|fim▁end|>
case r.Errno != "" && r.Out != nil: return fmt.Sprintf("-> [%v] %v error=%s", r.Request, r.Out, r.errstr())
<|file_name|>index.d.ts<|end_file_name|><|fim▁begin|>/* * Copyright (c) Microsoft Corporation. All rights reserved. * Licensed under the MIT License. See License.txt in the project root for * license information. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is * regenerated. */ import { BaseResource } from 'ms-rest-azure'; import { CloudError } from 'ms-rest-azure'; import * as moment from 'moment'; export { BaseResource } from 'ms-rest-azure'; export { CloudError } from 'ms-rest-azure'; /** * @class * Initializes a new instance of the ErrorModel class. * @constructor * @member {number} [status] * @member {string} [message] */ export interface ErrorModel { status?: number; message?: string; } /** * @class * Initializes a new instance of the ParameterGroupingPostRequiredParameters class. * @constructor * Additional parameters for the ParameterGrouping_postRequired operation. * * @member {number} body * @member {string} [customHeader] * @member {number} [query] Query parameter with default. Default value: 30 . * @member {string} path Path parameter */ export interface ParameterGroupingPostRequiredParameters { body: number; customHeader?: string; query?: number; path: string; } /** * @class * Initializes a new instance of the ParameterGroupingPostOptionalParameters class. * @constructor * Additional parameters for the ParameterGrouping_postOptional operation. * * @member {string} [customHeader] * @member {number} [query] Query parameter with default. Default value: 30 . */ export interface ParameterGroupingPostOptionalParameters { customHeader?: string; query?: number; } /** * @class * Initializes a new instance of the FirstParameterGroup class. * @constructor * Additional parameters for a set of operations, such as: * ParameterGrouping_postMultiParamGroups, * ParameterGrouping_postSharedParameterGroupObject. * * @member {string} [headerOne] * @member {number} [queryOne] Query parameter with default. Default value: 30<|fim▁hole|>export interface FirstParameterGroup { headerOne?: string; queryOne?: number; } /** * @class * Initializes a new instance of the ParameterGroupingPostMultiParamGroupsSecondParamGroup class. * @constructor * Additional parameters for the ParameterGrouping_postMultiParamGroups * operation. * * @member {string} [headerTwo] * @member {number} [queryTwo] Query parameter with default. Default value: 30 * . */ export interface ParameterGroupingPostMultiParamGroupsSecondParamGroup { headerTwo?: string; queryTwo?: number; }<|fim▁end|>
* . */
<|file_name|>test_lockfile.py<|end_file_name|><|fim▁begin|># Copyright (c) 2005 Divmod, Inc. # See LICENSE for details.<|fim▁hole|>class LockingTestCase(unittest.TestCase): def testBasics(self): lockf = self.mktemp() lock = lockfile.FilesystemLock(lockf) self.failUnless(lock.lock()) self.failUnless(lock.clean) lock.unlock() self.failUnless(lock.lock()) self.failUnless(lock.clean) lock.unlock() def testProtection(self): lockf = self.mktemp() lock = lockfile.FilesystemLock(lockf) self.failUnless(lock.lock()) self.failUnless(lock.clean) self.failIf(lock.lock()) lock.unlock() def testBigLoop(self): lockf = self.mktemp() lock = lockfile.FilesystemLock(lockf) self.failUnless(lock.lock()) for i in xrange(500): self.failIf(lock.lock()) lock.unlock() def testIsLocked(self): lockf = self.mktemp() self.failIf(lockfile.isLocked(lockf)) lock = lockfile.FilesystemLock(lockf) self.failUnless(lock.lock()) self.failUnless(lockfile.isLocked(lockf)) lock.unlock() self.failIf(lockfile.isLocked(lockf)) # A multiprocess test would be good here, for the sake of # completeness. However, it is fairly safe to rely on the # filesystem to provide the semantics we require.<|fim▁end|>
from twisted.trial import unittest from twisted.python import lockfile
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/** * @license * Copyright Google LLC All Rights Reserved. * * Use of this source code is governed by an MIT-style license that can be * found in the LICENSE file at https://angular.io/license */ import {Rule, SchematicsException, Tree, UpdateRecorder} from '@angular-devkit/schematics'; import {relative} from 'path'; import {getProjectTsConfigPaths} from '../../utils/project_tsconfig_paths'; import {canMigrateFile, createMigrationProgram} from '../../utils/typescript/compiler_host'; import {migrateFile} from './util'; export default function(): Rule { return async (tree: Tree) => { const {buildPaths, testPaths} = await getProjectTsConfigPaths(tree); const basePath = process.cwd(); const allPaths = [...buildPaths, ...testPaths]; <|fim▁hole|> } for (const tsconfigPath of allPaths) { runTypedFormsMigration(tree, tsconfigPath, basePath); } }; } function runTypedFormsMigration(tree: Tree, tsconfigPath: string, basePath: string) { const {program} = createMigrationProgram(tree, tsconfigPath, basePath); const typeChecker = program.getTypeChecker(); const sourceFiles = program.getSourceFiles().filter(sourceFile => canMigrateFile(basePath, sourceFile, program)); for (const sourceFile of sourceFiles) { let update: UpdateRecorder|null = null; const rewriter = (startPos: number, origLength: number, text: string) => { if (update === null) { // Lazily initialize update, because most files will not require migration. update = tree.beginUpdate(relative(basePath, sourceFile.fileName)); } update.remove(startPos, origLength); update.insertLeft(startPos, text); }; migrateFile(sourceFile, typeChecker, rewriter); if (update !== null) { tree.commitUpdate(update); } } }<|fim▁end|>
if (!allPaths.length) { throw new SchematicsException( 'Could not find any tsconfig file. Cannot migrate to Typed Forms.');
<|file_name|>target.py<|end_file_name|><|fim▁begin|># Library for RTS2 JSON calls. # (C) 2012 Petr Kubanek, Institute of Physics # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA import json class Target: def __init__(self,id,name=None): self.id = id self.name = name def reload(self): """Load target data from JSON interface.""" if self.id is None: name = None return try: data = json.getProxy().loadJson('/api/tbyid',{'id':self.id})['d'][0] self.name = data[1] except Exception,ex:<|fim▁hole|> self.name = None def get(name): """Return array with targets matching given name or target ID""" try: return json.getProxy().loadJson('/api/tbyid',{'id':int(name)})['d'] except ValueError: return json.getProxy().loadJson('/api/tbyname',{'n':name})['d'] def create(name,ra,dec): return json.getProxy().loadJson('/api/create_target', {'tn':name, 'ra':ra, 'dec':dec})['id']<|fim▁end|>
<|file_name|>resubscribe.ts<|end_file_name|><|fim▁begin|>import type { ExternalMethods } from './types' const resubscribe: ExternalMethods['resubscribe'] = async ( pool, readModelName ) => { const { dropReadModel } = pool await dropReadModel(pool, readModelName) }<|fim▁hole|> export default resubscribe<|fim▁end|>
<|file_name|>tuflowmodel.py<|end_file_name|><|fim▁begin|>""" Summary: Container and main interface for accessing the Tuflow model and a class for containing the main tuflow model files (Tcf, Tgc, etc). There are several other classes in here that are used to determine the order of the files in the model and key words for reading in the files. Author: Duncan Runnacles Created: 01 Apr 2016 Copyright: Duncan Runnacles 2016 TODO: Updates: """ from __future__ import unicode_literals from itertools import chain from ship.tuflow.tuflowfilepart import TuflowFile, TuflowKeyValue, TuflowUserVariable, TuflowModelVariable from ship.tuflow import FILEPART_TYPES as fpt from ship.utils import utilfunctions as uf import logging logger = logging.getLogger(__name__) """logging references with a __name__ set to this module.""" class TuflowModel(object): """Container for the entire loaded tuflow model. """ def __init__(self, root): """Initialise constants and dictionaries. """ self.control_files = {} """Tuflow Control File objects. All types of Tuflow Control file are stored here under the type header. Types are: TCF, TGC, TBC, ECF, TEF. TCF is slightly different to the others as it contains an additional member variable 'main_file_hash' to identify the main tcf file that was called to load the model. """ self._root = '' """The current directory path used to reach the run files in the model""" self.missing_model_files = [] """Contains any tcf, tgs, etc files that could not be loaded.""" self.bc_event = {} """Contains the currently acitve BC Event variables.""" self.user_variables = None """Class containing the scenario/event/variable keys and values.""" @property def root(self): return self._root @root.setter def root(self, value): self._root = value self.updateRoot(value) def checkPathsExist(self): """Test that all of the filepaths in the TuflowModel exist.""" failed = [] for file_type, file in self.control_files.items(): failed.extend(file.checkPathsExist()) return failed def updateRoot(self, root): """Update the root variable in all TuflowFile's in the model. The root variable (TuflowModel.root) is the directory that the main .tcf file is in. This is used to define the location of all other files which are usually referenced relative to each other. Note: This method will be called automatically when setting the TuflowModel.root variable. Args: root(str): the new root to set. """ for c in self.control_files.values(): c.updateRoot(root) def customPartSearch(self, control_callback, tuflow_callback=None, include_unknown=False): """Return TuflowPart's based on the return value of the callbacks. control_callback will be used as an argument in each of self.control_files' customPartSearch() methods. The tuflow_callback will be called on the combined generators returned from that method. See Also: ControlFile.customPartSearch Continuing the example in the ControlFile.customPartSearch method. This time the additinal tuflow_callback function is defined as well. callback_func must accept a TuflowPart and return a tuple of: keep-status and the return value. For example:: # This is the callback_func that we test the TuflowPart. It is # defined in your script def callback_func(part): # In this case we check for GIS parts and return a tuple of: # - bool(keep-status): True if it is a GIS filepart_type # - tuple: filename and parent.model_type. This can be # whatever you want though if part.filepart_type == fpt.GIS: return True, (part.filename, part.associates.parent.model_type) # Any TuflowPart's that you don't want included must return # a tuple of (False, None) else: return False, None # Here we define a function to run after the generators are returned # from callback_func. In the funcion above the return type is a # tuple, so we accept that as the arg in this function, but it will # be whatever you return from callback_func above. # This function checks to see if there are any duplicate filename's. # Note that it must return the same tuple as the other callback. # i.e. keep-status, result def tuflow_callback(part_tuple): found = [] if part_tuple[0] in found: return False, None else: return True, part_tuple[0] # Both callback's given this time results = tuflow.customPartSearch(callback, tuflow_callback=tuflowCallback) # You can now iteratre the results for r in results: print (str(r)) Args: callback_func(func): a function to run for each TuflowPart in this ControlFile's PartHolder. include_unknown=False(bool): If False any UnknownPart's will be ignored. If set to True it is the resonsibility of the callback_func to check for this and deal with it. Return: generator - containing the results of the search. """ gens = [] for c in self.control_files.values(): gens.append( c.customPartSearch(control_callback, include_unknown) ) all_gens = chain(gens[0:-1]) for a in all_gens: for val in a: if tuflow_callback: take, value = tuflow_callback(val) if take: yield[value] else: yield [val] def removeTcfModelFile(self, model_file): """Remove an existing ModelFile from 'TCF' and update ControlFile. Note: You can call this function directly if you want to, but it is also hooked into a callback in the TCF ControlFile. This means that when you use the standard ControlFile add/remove/replaceControlFile() methods these will be called automatically. Args: model_files(ModelFile): the ModelFile being removed. """ if not model_file in self.control_files[model_file.model_type].control_files: raise AttributeError("model_file doesn't exists in %s control_files" % model_file.model_type) self.control_files[model_file.model_type].removeControlFile(model_file) self.control_files['TCF'].parts.remove(model_file) def replaceTcfModelFile(self, model_file, control_file, replace_file): """Replace an existing ModelFile in 'TCF' and update ControlFile. Note: You can call this function directly if you want to, but it is also hooked into a callback in the TCF ControlFile. This means that when you use the standard ControlFile add/remove/replaceControlFile() methods these will be called automatically. Args: model_file(ModelFile): the replacement TuflowPart. control_file(ControlFile): containing the contents to replace the existing ControlFile. replace_file(ModelFile): the TuflowPart to be replaced. """ if model_file in self.control_files[model_file.model_type].control_files: raise AttributeError('model_file already exists in this ControlFile') self.control_files[replace_file.model_type].replaceControlFile( model_file, control_file, replace_file) self.control_files['TCF'].parts.replace(model_file, replace_file) def addTcfModelFile(self, model_file, control_file, **kwargs): """Add a new ModelFile instance to a TCF type ControlFile. Note: You can call this function directly if you want to, but it is also hooked into a callback in the TCF ControlFile. This means that when you use the standard ControlFile add/remove/replaceControlFile() methods these will be called automatically. **kwargs: after(TuflowPart): the part to add the new ModelFile after. before(TuflowPart): the part to add the new ModelFile before. Either after or before kwargs must be given. If both are provided after will take precedence. Args: model_file(ModelFile): the replacement ModelFile TuflowPart. control_file(ControlFile): containing the contents to replace the existing ControlFile. """<|fim▁hole|> raise AttributeError("Either 'before' or 'after' TuflowPart kwarg must be given") if model_file in self.control_files[model_file.model_type].control_files: raise AttributeError('model_file already exists in this ControlFile') self.control_files[model_file.model_type].addControlFile( model_file, control_file, **kwargs) self.control_files['TCF'].parts.add(model_file, **kwargs) # class TuflowUtils(object): # """Utility functions for dealing with TuflowModel outputs.""" # # def __init__(self): # pass # # @staticmethod # def resultsByParent(results): # """ # """ class UserVariables(object): """Container for all user defined variables. Includes variable set in the control files with 'Set somevar ==' and the scenario and event variables. Note: Only the currently active scenario and event variables will be stored in this class. """ def __init__(self): self.variable = {} self.scenario = {} self.event = {} self._names = [] self.has_cmd_args = False def add(self, filepart, vtype=None): """Add a new variables to the class. Args: filepart(TuflowModelVariables or TuflowUserVariable): Raises: TypeError - if filepart is not a TuflowModelVariable or TuflowUserVariable. ValueError - if filepart already exists. """ if filepart._variable_name in self._names: raise ValueError('variable already exists with that name - use replace instead') if isinstance(filepart, TuflowUserVariable): self.variable[filepart.variable_name] = filepart self._names.append(filepart.variable_name) elif isinstance(filepart, TuflowModelVariable): if filepart._variable_type == 'scenario': if filepart._variable_name == 's1' or filepart._variable_name == 's': if 's' in self._names or 's1' in self._names: raise ValueError("variable already exists with that " + "name - use replace instead\n" + "note 's' and 's1' are treated the same.") self.scenario[filepart._variable_name] = filepart self.variable[filepart._variable_name] = filepart self._names.append(filepart.variable_name) else: if filepart._variable_name == 'e1' or filepart._variable_name == 'e': if 'e' in self._names or 'e1' in self._names: raise ValueError("variable already exists with that " + "name - use replace instead\n" + "note 'e' and 'e1' are treated the same.") self.event[filepart._variable_name] = filepart self.variable[filepart._variable_name] = filepart self._names.append(filepart.variable_name) else: raise TypeError('filepart must be of type TuflowUserVariable or TuflowModelVariable') def replace(self, filepart): """Replace an existing variable. Args: filepart(TuflowModelVariables or TuflowUserVariable): Raises: TypeError - if filepart is not a TuflowModelVariable or TuflowUserVariable. ValueError - if filepart doesn't already exist. """ # Make sure it actually already exists. # s & s1 and e & e1 are treated as the same name - same as tuflow temp_name = filepart._variable_name if temp_name == 's' or temp_name == 's1': if not 's' in self._names and not 's1' in self._names: raise ValueError("filepart doesn't seem to exist in UserVariables.") elif temp_name == 'e' or temp_name == 'e1': if not 'e' in self._names and not 'e1' in self._names: raise ValueError("filepart doesn't seem to exist in UserVariables.") elif not filepart._variable_name in self._names: raise ValueError("filepart doesn't seem to exist in UserVariables.") # Delete the old one and call add() with the new one if temp_name == 's' or temp_name == 's1': if 's' in self.scenario.keys(): del self.scenario['s'] del self.variable['e'] if 's1' in self.scenario.keys(): del self.scenario['s1'] del self.variable['e1'] self.add(filepart, 'scenario') if temp_name == 'e' or temp_name == 'e1': if 'e' in self.scenario.keys(): del self.event['e'] del self.variable['e'] if 'e1' in self.scenario.keys(): del self.event['e1'] del self.variable['e1'] self.add(filepart, 'event') else: del self.variable[temp_name] self.add(filepart) def variablesToDict(self): """Get the values of the variables. Note that, like tuflow, scenario and event values will be includes in the variables dict returned. {'name1': var1, 'name2': var2, 'nameN': name2} Return: dict - with variables names as key and values as values. """ out = {} for vkey, vval in self.variable.items(): out[vkey] = vval.variable return out def seValsToDict(self): """Get the values of the scenario and event variables. Returns the currently active scenario and event values only - not the placeholder keys - in a dictionary in the format:: {'scenario': [val1, val2, valN], 'event': [val1, val2, valN]} Return: dict - of scenario and event values. """ scenario = [s.variable for s in self.scenario.values()] event = [e.variable for e in self.event.values()] return {'scenario': scenario, 'event': event} def remove(self, key): """Remove the variable stored at the given key. Args: key(str): key for either the scenario, event, or variables dict. """ if key in self.scenario.keys(): self._names.remove(self.scenario[key]._variable_name) del self.scenario[key] if key in self.event.keys(): self._names.remove(self.scenario[key]._variable_name) del self.event[key] if key in self.variable.keys(): self._names.remove(self.scenario[key]._variable_name) del self.variable[key] def get(self, key, vtype=None): """Return the TuflowPart at the given key. Args: key(str): the key associated with the required TuflowPart. vtype=None(str): the type of part to return. If None it will return a 'variable' type. Other options are 'scenario' and 'event'. Return: TuflowPart - TuflowModelVariable or TuflowUserVariable type. """ if vtype == 'scenario': if not key in self.scenario.keys(): raise KeyError('key %s is not in scenario keys' % key) return self.scenario[key] elif vtype == 'event': if not key in self.event.keys(): raise KeyError('key %s is not in event keys' % key) return self.event[key] else: if not key in self.variable.keys(): raise KeyError('key %s is not in variable keys' % key) return self.variable[key] class TuflowFilepartTypes(object): """Contains key words from Tuflow files for lookup. This acts as a lookup table for the TuflowLoader class more than anything else. It is kept here as that seems to be most sensible. Contains methods for identifying whether a command given to it is known to the library and what type it is. i.e. what UNIT_CATEGORY it falls into. """ def __init__(self): """Initialise the categories and known keywords""" self.ambiguous = { 'WRITE CHECK FILES': [ ['WRITE CHECK FILES INCLUDE', fpt.VARIABLE], ['WRITE CHECK FILES EXCLUDE', fpt.VARIABLE] ], # 'WRITE CHECK FILES INCLUDE': ['WRITE CHECK FILES', fpt.RESULT], # 'WRITE CHECK FILES EXCLUDE': ['WRITE CHECK FILES', fpt.RESULT], 'DEFINE EVENT': [['DEFINE OUTPUT ZONE', fpt.SECTION_LOGIC]], 'DEFINE OUTPUT ZONE': [['DEFINE EVENT', fpt.EVENT_LOGIC]], # 'START 1D DOMAIN': ['START 2D DOMAIN', fpt.SECTION_LOGIC], # 'START 2D DOMAIN': ['START 1D DOMAIN', fpt.SECTION_LOGIC], } self.ambiguous_keys = self.ambiguous.keys() self.types = {} self.types[fpt.MODEL] = [ 'GEOMETRY CONTROL FILE', 'BC CONTROL FILE', 'READ GEOMETRY CONTROL FILE', 'READ BC CONTROL FILE', 'READ FILE', 'ESTRY CONTROL FILE', 'EVENT FILE' ] self.types[fpt.RESULT] = [ 'OUTPUT FOLDER', 'WRITE CHECK FILES', 'LOG FOLDER' ] self.types[fpt.GIS] = [ 'READ MI', 'READ GIS', 'READ GRID', 'SHP PROJECTION', 'MI PROJECTION' ] self.types[fpt.DATA] = ['READ MATERIALS FILE', 'BC DATABASE'] self.types[fpt.VARIABLE] = [ 'START TIME', 'END TIME', 'TIMESTEP', 'SET IWL', 'MAP OUTPUT INTERVAL', 'MAP OUTPUT DATA TYPES', 'CELL WET/DRY DEPTH', 'CELL SIDE WET/DRY DEPTH', 'SET IWL', 'TIME SERIES OUTPUT INTERVAL', 'SCREEN/LOG DISPLAY INTERVAL', 'CSV TIME', 'START OUTPUT', 'OUTPUT INTERVAL', 'STRUCTURE LOSSES', 'WLL APPROACH', 'WLL ADJUST XS WIDTH', 'WLL ADDITIONAL POINTS', 'DEPTH LIMIT FACTOR', 'CELL SIZE', 'SET CODE', 'GRID SIZE (X,Y)', 'SET ZPTS', 'SET MAT', 'MASS BALANCE OUTPUT', 'GIS FORMAT', 'MAP OUTPUT FORMATS', 'END MAT OUTPUT', 'ASC START MAP OUTPUT', 'ASC END MAP OUTPUT', 'XMDF MAP OUTPUT DATA TYPES', 'WRITE PO ONLINE', 'ASC MAP OUTPUT DATA TYPES', 'WRITE CHECK FILES INCLUDE', 'WRITE CHECK FILES EXCLUDE', 'STORE MAXIMUMS AND MINIMUMS' ] self.types[fpt.IF_LOGIC] = [ 'IF SCENARIO', 'ELSE IF SCENARIO', 'IF EVENT', 'ELSE IF EVENT', 'END IF', 'ELSE' ] self.types[fpt.EVENT_LOGIC] = ['DEFINE EVENT', 'END DEFINE'] self.types[fpt.SECTION_LOGIC] = ['DEFINE OUTPUT ZONE', 'END DEFINE'] self.types[fpt.DOMAIN_LOGIC] = [ 'START 1D DOMAIN', 'END 1D DOMAIN', 'START 2D DOMAIN', 'END 2D DOMAIN' ] self.types[fpt.USER_VARIABLE] = ['SET VARIABLE'] self.types[fpt.EVENT_VARIABLE] = [ 'BC EVENT TEXT', 'BC EVENT NAME', 'BC EVENT SOURCE', ] self.types[fpt.MODEL_VARIABLE] = ['MODEL SCENARIOS', 'MODEL EVENTS', ] def find(self, find_val, file_type='*'): """Checks if the given value is known or not. The word to look for doesn't have to be an exact match to the given value, it only has to start with it. This means that we don't need to know whether it is a 'command == something' or just 'command something' (like: 'Estry Control File Auto') at this point. This helps to avoid unnecessary repitition. i.e. many files are like: 'READ GIS' + another word. All of them are GIS type files so they all get dealt with in the same way. In some edge cases there are command that start the same. These are dealt with by secondary check to see if the next character is '=' or not. Args: find_val (str): the value attempt to find in the lookup table. file_type (int): Optional - reduce the lookup time by providing the type (catgory) to look for the value in. These are the constants (MODEL, GIS, etc). Returns: Tuple (Bool, int) True if found. Int is the class constant indicating what type the value was found under. """ find_val = find_val.upper() if file_type == '*': for key, part_type in self.types.items(): found = [i for i in part_type if find_val.startswith(i)] if found: retval = key if found[0] in self.ambiguous_keys: retval = self._checkAmbiguity(found[0], find_val, key) return True, retval return (False, None) else: found = [i for i in self.types[file_type] if find_val.startswith(i)] if found: return True, file_type return (False, None) def _checkAmbiguity(self, found, find_val, key): """Resolves any ambiguity in the keys.""" f = find_val.replace(' ', '') f2 = found.replace(' ', '') + '=' if f.startswith(f2): return key else: alternatives = self.ambiguous[found] for i, a in enumerate(alternatives): if find_val.startswith(a[0]): return self.ambiguous[found][i][1] return key<|fim▁end|>
if not 'after' in kwargs.keys() and not 'before' in kwargs.keys():
<|file_name|>catalog_item.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from functools import partial from types import NoneType from navmazing import NavigateToSibling, NavigateToAttribute from cfme.exceptions import DestinationNotFound from cfme.fixtures import pytest_selenium as sel from cfme.provisioning import provisioning_form as request_form from cfme.web_ui import ( Form, Select, Table, accordion, fill, paginator, flash, form_buttons, tabstrip, DHTMLSelect, Input, Tree, AngularSelect, BootstrapTreeview, toolbar as tb, match_location, CheckboxTable) from utils import version, fakeobject_or_object from utils.appliance import Navigatable from utils.appliance.implementations.ui import CFMENavigateStep, navigate_to, navigator from utils.update import Updateable from utils.pretty import Pretty from utils.version import current_version cfg_btn = partial(tb.select, "Configuration") policy_btn = partial(tb.select, "Policy") accordion_tree = partial(accordion.tree, "Catalog Items") dynamic_tree = Tree("//div[@id='basic_info_div']//ul[@class='dynatree-container']") entry_tree = BootstrapTreeview('automate_treebox') listview_table = CheckboxTable(table_locator='//div[@id="list_grid"]/table') template_select_form = Form( fields=[ ('template_table', Table('//div[@id="prov_vm_div"]/table')), ('add_button', form_buttons.add), ('cancel_button', form_buttons.cancel) ] ) # Forms basic_info_form = Form( fields=[ ('name_text', Input("name")), ('description_text', Input("description")), ('display_checkbox', Input("display")), ('select_catalog', AngularSelect('catalog_id')), ('select_dialog', AngularSelect('dialog_id')), ('select_orch_template', AngularSelect('template_id')), ('select_provider', AngularSelect('manager_id')), ('select_config_template', AngularSelect('template_id')), ('field_entry_point', Input("fqname")), ('retirement_entry_point', Input("retire_fqname")), ('edit_button', form_buttons.save), ('apply_btn', '//a[normalize-space(.)="Apply"]') ]) # TODO: Replace with Taggable edit_tags_form = Form(<|fim▁hole|> ("select_tag", AngularSelect('tag_cat')), ("select_value", AngularSelect('tag_add')) ]) detail_form = Form( fields=[ ('long_desc', Input('long_description')), ]) resources_form = Form( fields=[ ('choose_resource', Select("//select[@id='resource_id']")), ('add_button', form_buttons.add), ('save_button', form_buttons.save) ]) button_group_form = Form( fields=[ ('btn_group_text', Input("name")), ('btn_group_hvr_text', Input("description")), ('add_button', form_buttons.add) ]) button_form = Form( fields=[ ('btn_text', Input("name")), ('btn_hvr_text', Input("description")), ('select_dialog', Select("//select[@id='dialog_id']")), ('system_process', Select("//select[@id='instance_name']")), ('request', Input("object_request")), ('add_button', form_buttons.add) ]) match_page = partial(match_location, title='Catalogs', controller='catalog') class CatalogItem(Updateable, Pretty, Navigatable): pretty_attrs = ['name', 'item_type', 'catalog', 'catalog_name', 'provider', 'domain'] def __init__(self, item_type=None, vm_name=None, name=None, description=None, display_in=False, catalog=None, dialog=None, catalog_name=None, orch_template=None, provider_type=None, provider=None, config_template=None, prov_data=None, domain="ManageIQ (Locked)", appliance=None): self.item_type = item_type self.vm_name = vm_name self.name = name self.description = description self.display_in = display_in self.catalog = catalog self.dialog = dialog self.catalog_name = catalog_name self.orch_template = orch_template self.provider = provider self.config_template = config_template self.provider_type = provider_type self.provisioning_data = prov_data self.domain = domain Navigatable.__init__(self, appliance=appliance) def __str__(self): return self.name def create(self): # Create has sequential forms, the first is only the provider type navigate_to(self, 'Add') # For element not found exception (To be removed) sel.sleep(5) sel.select("//select[@id='st_prov_type']", self.provider_type or self.item_type or 'Generic') sel.wait_for_element(basic_info_form.name_text) catalog = fakeobject_or_object(self.catalog, "name", "Unassigned") dialog = fakeobject_or_object(self.dialog, "name", "No Dialog") # Need to provide the (optional) provider name to the form, not the object provider_name = None provider_required_types = ['AnsibleTower', 'Orchestration'] if self.item_type in provider_required_types \ or self.provider_type in provider_required_types: provider_name = self.provider.name # For tests where orchestration template is None orch_template = None if self.orch_template: orch_template = self.orch_template.template_name fill(basic_info_form, {'name_text': self.name, 'description_text': self.description, 'display_checkbox': self.display_in, 'select_catalog': catalog.name, 'select_dialog': dialog.name, 'select_orch_template': orch_template, 'select_provider': provider_name, 'select_config_template': self.config_template}) if not (self.item_type in provider_required_types): sel.click(basic_info_form.field_entry_point) if version.current_version() < "5.7": dynamic_tree.click_path("Datastore", self.domain, "Service", "Provisioning", "StateMachines", "ServiceProvision_Template", "default") else: entry_tree.click_path("Datastore", self.domain, "Service", "Provisioning", "StateMachines", "ServiceProvision_Template", "default") sel.click(basic_info_form.apply_btn) if version.current_version() >= "5.7" and self.item_type == "AnsibleTower": sel.click(basic_info_form.retirement_entry_point) entry_tree.click_path("Datastore", self.domain, "Service", "Retirement", "StateMachines", "ServiceRetirement", "Generic") sel.click(basic_info_form.apply_btn) if self.catalog_name is not None \ and self.provisioning_data is not None \ and not isinstance(self.provider, NoneType): tabstrip.select_tab("Request Info") tabstrip.select_tab("Catalog") template = template_select_form.template_table.find_row_by_cells({ 'Name': self.catalog_name, 'Provider': self.provider.name }) sel.click(template) request_form.fill(self.provisioning_data) sel.click(template_select_form.add_button) def update(self, updates): navigate_to(self, 'Edit') fill(basic_info_form, {'name_text': updates.get('name', None), 'description_text': updates.get('description', None)}, action=basic_info_form.edit_button) flash.assert_success_message('Service Catalog Item "{}" was saved'.format(self.name)) def delete(self, from_dest='All'): if from_dest in navigator.list_destinations(self): navigate_to(self, from_dest) else: msg = 'cfme.services.catalogs.catalog_item does not have destination {}'\ .format(from_dest) raise DestinationNotFound(msg) if from_dest == 'All': # select the row for deletion listview_table.select_row_by_cells({'Name': self.name, 'Description': self.description}) cfg_btn(version.pick({version.LOWEST: 'Remove Items from the VMDB', '5.7': 'Remove Catalog Items'}), invokes_alert=True) if from_dest == 'Details': cfg_btn(version.pick({version.LOWEST: 'Remove Item from the VMDB', '5.7': 'Remove Catalog Item'}), invokes_alert=True) sel.handle_alert() flash.assert_success_message(version.pick( {version.LOWEST: 'The selected 1 Catalog Item were deleted', '5.7': 'The selected 1 Catalog Item was deleted'})) def add_button_group(self): navigate_to(self, 'Details') cfg_btn("Add a new Button Group", invokes_alert=True) sel.wait_for_element(button_group_form.btn_group_text) fill(button_group_form, {'btn_group_text': "group_text", 'btn_group_hvr_text': "descr"}) if current_version() > "5.5": select = AngularSelect("button_image") select.select_by_visible_text("Button Image 1") else: select = DHTMLSelect("div#button_div") select.select_by_value(1) sel.click(button_group_form.add_button) flash.assert_success_message('Buttons Group "descr" was added') def add_button(self): navigate_to(self, 'Details') cfg_btn('Add a new Button', invokes_alert=True) sel.wait_for_element(button_form.btn_text) fill(button_form, {'btn_text': "btn_text", 'btn_hvr_text': "btn_descr"}) if current_version() > "5.5": select = AngularSelect("button_image") select.select_by_visible_text("Button Image 1") else: select = DHTMLSelect("div#button_div") select.select_by_value(2) fill(button_form, {'select_dialog': self.dialog, 'system_process': "Request", 'request': "InspectMe"}) sel.click(button_form.add_button) flash.assert_success_message('Button "btn_descr" was added') def edit_tags(self, tag, value): navigate_to(self, 'Details') policy_btn('Edit Tags', invokes_alert=True) fill(edit_tags_form, {'select_tag': tag, 'select_value': value}, action=form_buttons.save) flash.assert_success_message('Tag edits were successfully saved') class CatalogBundle(Updateable, Pretty, Navigatable): pretty_attrs = ['name', 'catalog', 'dialog'] def __init__(self, name=None, description=None, display_in=None, catalog=None, dialog=None, appliance=None): self.name = name self.description = description self.display_in = display_in self.catalog = catalog self.dialog = dialog Navigatable.__init__(self, appliance=appliance) def __str__(self): return self.name def create(self, cat_items): navigate_to(self, 'Add') domain = "ManageIQ (Locked)" fill(basic_info_form, {'name_text': self.name, 'description_text': self.description, 'display_checkbox': self.display_in, 'select_catalog': str(self.catalog), 'select_dialog': str(self.dialog)}) sel.click(basic_info_form.field_entry_point) if sel.text(basic_info_form.field_entry_point) == "": if version.current_version() < "5.7": dynamic_tree.click_path("Datastore", domain, "Service", "Provisioning", "StateMachines", "ServiceProvision_Template", "default") else: entry_tree.click_path("Datastore", domain, "Service", "Provisioning", "StateMachines", "ServiceProvision_Template", "default") sel.click(basic_info_form.apply_btn) tabstrip.select_tab("Resources") for cat_item in cat_items: fill(resources_form, {'choose_resource': cat_item}) sel.click(resources_form.add_button) flash.assert_success_message('Catalog Bundle "{}" was added'.format(self.name)) def update(self, updates): navigate_to(self, 'Edit') fill(basic_info_form, {'name_text': updates.get('name', None), 'description_text': updates.get('description', None)}) tabstrip.select_tab("Resources") fill(resources_form, {'choose_resource': updates.get('cat_item', None)}, action=resources_form.save_button) flash.assert_success_message('Catalog Bundle "{}" was saved'.format(self.name)) @navigator.register(CatalogItem, 'All') class ItemAll(CFMENavigateStep): prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def am_i_here(self): return match_page(summary='All Service Catalog Items') def step(self): self.prerequisite_view.navigation.select('Services', 'Catalogs') tree = accordion.tree('Catalog Items') tree.click_path('All Catalog Items') def resetter(self): tb.refresh() tb.select('List View') # Ensure no rows are checked if paginator.page_controls_exist(): sel.check(paginator.check_all()) sel.uncheck(paginator.check_all()) @navigator.register(CatalogItem, 'Details') class ItemDetails(CFMENavigateStep): prerequisite = NavigateToSibling('All') # No am_i_here() due to summary duplication between item and bundle def step(self): listview_table.click_row_by_cells({'Name': self.obj.name, 'Description': self.obj.description, 'Type': 'Item'}) def resetter(self): tb.refresh() @navigator.register(CatalogItem, 'Add') class ItemAdd(CFMENavigateStep): prerequisite = NavigateToSibling('All') def am_i_here(self): return match_page(summary='Adding a new Service Catalog Item') def step(self): cfg_btn('Add a New Catalog Item') @navigator.register(CatalogItem, 'Edit') class ItemEdit(CFMENavigateStep): prerequisite = NavigateToSibling('Details') def am_i_here(self): return match_page(summary='Editing Service Catalog Item "{}"'.format(self.obj.name)) def step(self): cfg_btn('Edit this Item') @navigator.register(CatalogBundle, 'All') class BundleAll(CFMENavigateStep): prerequisite = NavigateToAttribute('appliance.server', 'LoggedIn') def am_i_here(self): return match_page(summary='All Service Catalog Items') def step(self): self.prerequisite_view.navigation.select('Services', 'Catalogs') tree = accordion.tree('Catalog Items') tree.click_path('All Catalog Items') def resetter(self): tb.refresh() tb.select('List View') # Ensure no rows are checked if paginator.page_controls_exist(): sel.check(paginator.check_all()) sel.uncheck(paginator.check_all()) @navigator.register(CatalogBundle, 'Details') class BundleDetails(CFMENavigateStep): prerequisite = NavigateToSibling('All') # No am_i_here() due to summary duplication between item and bundle def step(self): listview_table.click_row_by_cells({'Name': self.obj.name, 'Description': self.obj.description, 'Type': 'Bundle'}) def resetter(self): tb.refresh() @navigator.register(CatalogBundle, 'Add') class BundleAdd(CFMENavigateStep): prerequisite = NavigateToSibling('All') def am_i_here(self): return match_page(summary='Adding a new Catalog Bundle') def step(self): cfg_btn('Add a New Catalog Bundle') @navigator.register(CatalogBundle, 'Edit') class BundleEdit(CFMENavigateStep): prerequisite = NavigateToSibling('Details') def am_i_here(self): return match_page(summary='Editing Catalog Bundle "{}"'.format(self.obj.name)) def step(self): cfg_btn('Edit this Item')<|fim▁end|>
fields=[
<|file_name|>fonts.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <[email protected]> # # This file is part of Weblate <https://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ''' Font handling wrapper. ''' from weblate import appsettings from PIL import ImageFont import os.path # List of chars in base DejaVu font, otherwise we use DroidSansFallback BASE_CHARS = frozenset(( 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff, 0x100, 0x101, 0x102, 0x103, 0x104, 0x105, 0x106, 0x107, 0x108, 0x109, 0x10a, 0x10b, 0x10c, 0x10d, 0x10e, 0x10f, 0x110, 0x111, 0x112, 0x113, 0x114, 0x115, 0x116, 0x117, 0x118, 0x119, 0x11a, 0x11b, 0x11c, 0x11d, 0x11e, 0x11f, 0x120, 0x121, 0x122, 0x123, 0x124, 0x125, 0x126, 0x127, 0x128, 0x129, 0x12a, 0x12b, 0x12c, 0x12d, 0x12e, 0x12f, 0x130, 0x131, 0x132, 0x133, 0x134, 0x135, 0x136, 0x137, 0x138, 0x139, 0x13a, 0x13b, 0x13c, 0x13d, 0x13e, 0x13f, 0x140, 0x141, 0x142, 0x143, 0x144, 0x145, 0x146, 0x147, 0x148, 0x149, 0x14a, 0x14b, 0x14c, 0x14d, 0x14e, 0x14f, 0x150, 0x151, 0x152, 0x153, 0x154, 0x155, 0x156, 0x157, 0x158, 0x159, 0x15a, 0x15b, 0x15c, 0x15d, 0x15e, 0x15f, 0x160, 0x161, 0x162, 0x163, 0x164, 0x165, 0x166, 0x167, 0x168, 0x169, 0x16a, 0x16b, 0x16c, 0x16d, 0x16e, 0x16f, 0x170, 0x171, 0x172, 0x173, 0x174, 0x175, 0x176, 0x177, 0x178, 0x179, 0x17a, 0x17b, 0x17c, 0x17d, 0x17e, 0x17f, 0x180, 0x181, 0x182, 0x183, 0x184, 0x185, 0x186, 0x187, 0x188, 0x189, 0x18a, 0x18b, 0x18c, 0x18d, 0x18e, 0x18f, 0x190, 0x191, 0x192, 0x193, 0x194, 0x195, 0x196, 0x197, 0x198, 0x199, 0x19a, 0x19b, 0x19c, 0x19d, 0x19e, 0x19f, 0x1a0, 0x1a1, 0x1a2, 0x1a3, 0x1a4, 0x1a5, 0x1a6, 0x1a7, 0x1a8, 0x1a9, 0x1aa, 0x1ab, 0x1ac, 0x1ad, 0x1ae, 0x1af, 0x1b0, 0x1b1, 0x1b2, 0x1b3, 0x1b4, 0x1b5, 0x1b6, 0x1b7, 0x1b8, 0x1b9, 0x1ba, 0x1bb, 0x1bc, 0x1bd, 0x1be, 0x1bf, 0x1c0, 0x1c1, 0x1c2, 0x1c3, 0x1c4, 0x1c5, 0x1c6, 0x1c7, 0x1c8, 0x1c9, 0x1ca, 0x1cb, 0x1cc, 0x1cd, 0x1ce, 0x1cf, 0x1d0, 0x1d1, 0x1d2, 0x1d3, 0x1d4, 0x1d5, 0x1d6, 0x1d7, 0x1d8, 0x1d9, 0x1da, 0x1db, 0x1dc, 0x1dd, 0x1de, 0x1df, 0x1e0, 0x1e1, 0x1e2, 0x1e3, 0x1e4, 0x1e5, 0x1e6, 0x1e7, 0x1e8, 0x1e9, 0x1ea, 0x1eb, 0x1ec, 0x1ed, 0x1ee, 0x1ef, 0x1f0, 0x1f1, 0x1f2, 0x1f3, 0x1f4, 0x1f5, 0x1f6, 0x1f7, 0x1f8, 0x1f9, 0x1fa, 0x1fb, 0x1fc, 0x1fd, 0x1fe, 0x1ff, 0x200, 0x201, 0x202, 0x203, 0x204, 0x205, 0x206, 0x207, 0x208, 0x209, 0x20a, 0x20b, 0x20c, 0x20d, 0x20e, 0x20f, 0x210, 0x211, 0x212, 0x213, 0x214, 0x215, 0x216, 0x217, 0x218, 0x219, 0x21a, 0x21b, 0x21c, 0x21d, 0x21e, 0x21f, 0x220, 0x221, 0x222, 0x223, 0x224, 0x225, 0x226, 0x227, 0x228, 0x229, 0x22a, 0x22b, 0x22c, 0x22d, 0x22e, 0x22f, 0x230, 0x231, 0x232, 0x233, 0x234, 0x235, 0x236, 0x237, 0x238, 0x239, 0x23a, 0x23b, 0x23c, 0x23d, 0x23e, 0x23f, 0x240, 0x241, 0x242, 0x243, 0x244, 0x245, 0x246, 0x247, 0x248, 0x249, 0x24a, 0x24b, 0x24c, 0x24d, 0x24e, 0x24f, 0x250, 0x251, 0x252, 0x253, 0x254, 0x255, 0x256, 0x257, 0x258, 0x259, 0x25a, 0x25b, 0x25c, 0x25d, 0x25e, 0x25f, 0x260, 0x261, 0x262, 0x263, 0x264, 0x265, 0x266, 0x267, 0x268, 0x269, 0x26a, 0x26b, 0x26c, 0x26d, 0x26e, 0x26f, 0x270, 0x271, 0x272, 0x273, 0x274, 0x275, 0x276, 0x277, 0x278, 0x279, 0x27a, 0x27b, 0x27c, 0x27d, 0x27e, 0x27f, 0x280, 0x281, 0x282, 0x283, 0x284, 0x285, 0x286, 0x287, 0x288, 0x289, 0x28a, 0x28b, 0x28c, 0x28d, 0x28e, 0x28f, 0x290, 0x291, 0x292, 0x293, 0x294, 0x295, 0x296, 0x297, 0x298, 0x299, 0x29a, 0x29b, 0x29c, 0x29d, 0x29e, 0x29f, 0x2a0, 0x2a1, 0x2a2, 0x2a3, 0x2a4, 0x2a5, 0x2a6, 0x2a7, 0x2a8, 0x2a9, 0x2aa, 0x2ab, 0x2ac, 0x2ad, 0x2ae, 0x2af, 0x2b0, 0x2b1, 0x2b2, 0x2b3, 0x2b4, 0x2b5, 0x2b6, 0x2b7, 0x2b8, 0x2b9, 0x2ba, 0x2bb, 0x2bc, 0x2bd, 0x2be, 0x2bf, 0x2c0, 0x2c1, 0x2c2, 0x2c3, 0x2c4, 0x2c5, 0x2c6, 0x2c7, 0x2c8, 0x2c9, 0x2ca, 0x2cb, 0x2cc, 0x2cd, 0x2ce, 0x2cf, 0x2d0, 0x2d1, 0x2d2, 0x2d3, 0x2d4, 0x2d5, 0x2d6, 0x2d7, 0x2d8, 0x2d9, 0x2da, 0x2db, 0x2dc, 0x2dd, 0x2de, 0x2df, 0x2e0, 0x2e1, 0x2e2, 0x2e3, 0x2e4, 0x2e5, 0x2e6, 0x2e7, 0x2e8, 0x2e9, 0x2ec, 0x2ed, 0x2ee, 0x2f3, 0x2f7, 0x300, 0x301, 0x302, 0x303, 0x304, 0x305, 0x306, 0x307, 0x308, 0x309, 0x30a, 0x30b, 0x30c, 0x30d, 0x30e, 0x30f, 0x310, 0x311, 0x312, 0x313, 0x314, 0x315, 0x316, 0x317, 0x318, 0x319, 0x31a, 0x31b, 0x31c, 0x31d, 0x31e, 0x31f, 0x320, 0x321, 0x322, 0x323, 0x324, 0x325, 0x326, 0x327, 0x328, 0x329, 0x32a, 0x32b, 0x32c, 0x32d, 0x32e, 0x32f, 0x330, 0x331, 0x332, 0x333, 0x334, 0x335, 0x336, 0x337, 0x338, 0x339, 0x33a, 0x33b, 0x33c, 0x33d, 0x33e, 0x33f, 0x340, 0x341, 0x342, 0x343, 0x344, 0x345, 0x346, 0x347, 0x348, 0x349, 0x34a, 0x34b, 0x34c, 0x34d, 0x34e, 0x34f, 0x351, 0x352, 0x353, 0x357, 0x358, 0x35a, 0x35c, 0x35d, 0x35e, 0x35f, 0x360, 0x361, 0x362, 0x370, 0x371, 0x372, 0x373, 0x374, 0x375, 0x376, 0x377, 0x37a, 0x37b, 0x37c, 0x37d, 0x37e, 0x384, 0x385, 0x386, 0x387, 0x388, 0x389, 0x38a, 0x38c, 0x38e, 0x38f, 0x390, 0x391, 0x392, 0x393, 0x394, 0x395, 0x396, 0x397, 0x398, 0x399, 0x39a, 0x39b, 0x39c, 0x39d, 0x39e, 0x39f, 0x3a0, 0x3a1, 0x3a3, 0x3a4, 0x3a5, 0x3a6, 0x3a7, 0x3a8, 0x3a9, 0x3aa, 0x3ab, 0x3ac, 0x3ad, 0x3ae, 0x3af, 0x3b0, 0x3b1, 0x3b2, 0x3b3, 0x3b4, 0x3b5, 0x3b6, 0x3b7, 0x3b8, 0x3b9, 0x3ba, 0x3bb, 0x3bc, 0x3bd, 0x3be, 0x3bf, 0x3c0, 0x3c1, 0x3c2, 0x3c3, 0x3c4, 0x3c5, 0x3c6, 0x3c7, 0x3c8, 0x3c9, 0x3ca, 0x3cb, 0x3cc, 0x3cd, 0x3ce, 0x3cf, 0x3d0, 0x3d1, 0x3d2, 0x3d3, 0x3d4, 0x3d5, 0x3d6, 0x3d7, 0x3d8, 0x3d9, 0x3da, 0x3db, 0x3dc, 0x3dd, 0x3de, 0x3df, 0x3e0, 0x3e1, 0x3e2, 0x3e3, 0x3e4, 0x3e5, 0x3e6, 0x3e7, 0x3e8, 0x3e9, 0x3ea, 0x3eb, 0x3ec, 0x3ed, 0x3ee, 0x3ef, 0x3f0, 0x3f1, 0x3f2, 0x3f3, 0x3f4, 0x3f5, 0x3f6, 0x3f7, 0x3f8, 0x3f9, 0x3fa, 0x3fb, 0x3fc, 0x3fd, 0x3fe, 0x3ff, 0x400, 0x401, 0x402, 0x403, 0x404, 0x405, 0x406, 0x407, 0x408, 0x409, 0x40a, 0x40b, 0x40c, 0x40d, 0x40e, 0x40f, 0x410, 0x411, 0x412, 0x413, 0x414, 0x415, 0x416, 0x417, 0x418, 0x419, 0x41a, 0x41b, 0x41c, 0x41d, 0x41e, 0x41f, 0x420, 0x421, 0x422, 0x423, 0x424, 0x425, 0x426, 0x427, 0x428, 0x429, 0x42a, 0x42b, 0x42c, 0x42d, 0x42e, 0x42f, 0x430, 0x431, 0x432, 0x433, 0x434, 0x435, 0x436, 0x437, 0x438, 0x439, 0x43a, 0x43b, 0x43c, 0x43d, 0x43e, 0x43f, 0x440, 0x441, 0x442, 0x443, 0x444, 0x445, 0x446, 0x447, 0x448, 0x449, 0x44a, 0x44b, 0x44c, 0x44d, 0x44e, 0x44f, 0x450, 0x451, 0x452, 0x453, 0x454, 0x455, 0x456, 0x457, 0x458, 0x459, 0x45a, 0x45b, 0x45c, 0x45d, 0x45e, 0x45f, 0x460, 0x461, 0x462, 0x463, 0x464, 0x465, 0x466, 0x467, 0x468, 0x469, 0x46a, 0x46b, 0x46c, 0x46d, 0x46e, 0x46f, 0x470, 0x471, 0x472, 0x473, 0x474, 0x475, 0x476, 0x477, 0x478, 0x479, 0x47a, 0x47b, 0x47c, 0x47d, 0x47e, 0x47f, 0x480, 0x481, 0x482, 0x483, 0x484, 0x485, 0x486, 0x487, 0x488, 0x489, 0x48a, 0x48b, 0x48c, 0x48d, 0x48e, 0x48f, 0x490, 0x491, 0x492, 0x493, 0x494, 0x495, 0x496, 0x497, 0x498, 0x499, 0x49a, 0x49b, 0x49c, 0x49d, 0x49e, 0x49f, 0x4a0, 0x4a1, 0x4a2, 0x4a3, 0x4a4, 0x4a5, 0x4a6, 0x4a7, 0x4a8, 0x4a9, 0x4aa, 0x4ab, 0x4ac, 0x4ad, 0x4ae, 0x4af, 0x4b0, 0x4b1, 0x4b2, 0x4b3, 0x4b4, 0x4b5, 0x4b6, 0x4b7, 0x4b8, 0x4b9, 0x4ba, 0x4bb, 0x4bc, 0x4bd, 0x4be, 0x4bf, 0x4c0, 0x4c1, 0x4c2, 0x4c3, 0x4c4, 0x4c5, 0x4c6, 0x4c7, 0x4c8, 0x4c9, 0x4ca, 0x4cb, 0x4cc, 0x4cd, 0x4ce, 0x4cf, 0x4d0, 0x4d1, 0x4d2, 0x4d3, 0x4d4, 0x4d5, 0x4d6, 0x4d7, 0x4d8, 0x4d9, 0x4da, 0x4db, 0x4dc, 0x4dd, 0x4de, 0x4df, 0x4e0, 0x4e1, 0x4e2, 0x4e3, 0x4e4, 0x4e5, 0x4e6, 0x4e7, 0x4e8, 0x4e9, 0x4ea, 0x4eb, 0x4ec, 0x4ed, 0x4ee, 0x4ef, 0x4f0, 0x4f1, 0x4f2, 0x4f3, 0x4f4, 0x4f5, 0x4f6, 0x4f7, 0x4f8, 0x4f9, 0x4fa, 0x4fb, 0x4fc, 0x4fd, 0x4fe, 0x4ff, 0x500, 0x501, 0x502, 0x503, 0x504, 0x505, 0x506, 0x507, 0x508, 0x509, 0x50a, 0x50b, 0x50c, 0x50d, 0x50e, 0x50f, 0x510, 0x511, 0x512, 0x513, 0x514, 0x515, 0x516, 0x517, 0x518, 0x519, 0x51a, 0x51b, 0x51c, 0x51d, 0x51e, 0x51f, 0x520, 0x521, 0x522, 0x523, 0x524, 0x525, 0x531, 0x532, 0x533, 0x534, 0x535, 0x536, 0x537, 0x538, 0x539, 0x53a, 0x53b, 0x53c, 0x53d, 0x53e, 0x53f, 0x540, 0x541, 0x542, 0x543, 0x544, 0x545, 0x546, 0x547, 0x548, 0x549, 0x54a, 0x54b, 0x54c, 0x54d, 0x54e, 0x54f, 0x550, 0x551, 0x552, 0x553, 0x554, 0x555, 0x556, 0x559, 0x55a, 0x55b, 0x55c, 0x55d, 0x55e, 0x55f, 0x561, 0x562, 0x563, 0x564, 0x565, 0x566, 0x567, 0x568, 0x569, 0x56a, 0x56b, 0x56c, 0x56d, 0x56e, 0x56f, 0x570, 0x571, 0x572, 0x573, 0x574, 0x575, 0x576, 0x577, 0x578, 0x579, 0x57a, 0x57b, 0x57c, 0x57d, 0x57e, 0x57f, 0x580, 0x581, 0x582, 0x583, 0x584, 0x585, 0x586, 0x587, 0x589, 0x58a, 0x5b0, 0x5b1, 0x5b2, 0x5b3, 0x5b4, 0x5b5, 0x5b6, 0x5b7, 0x5b8, 0x5b9, 0x5ba, 0x5bb, 0x5bc, 0x5bd, 0x5be, 0x5bf, 0x5c0, 0x5c1, 0x5c2, 0x5c3, 0x5c6, 0x5c7, 0x5d0, 0x5d1, 0x5d2, 0x5d3, 0x5d4, 0x5d5, 0x5d6, 0x5d7, 0x5d8, 0x5d9, 0x5da, 0x5db, 0x5dc, 0x5dd, 0x5de, 0x5df, 0x5e0, 0x5e1, 0x5e2, 0x5e3, 0x5e4, 0x5e5, 0x5e6, 0x5e7, 0x5e8, 0x5e9, 0x5ea, 0x5f0, 0x5f1, 0x5f2, 0x5f3, 0x5f4, 0x606, 0x607, 0x609, 0x60a, 0x60c, 0x615, 0x61b, 0x61f, 0x621, 0x622, 0x623, 0x624, 0x625, 0x626, 0x627, 0x628, 0x629, 0x62a, 0x62b, 0x62c, 0x62d, 0x62e, 0x62f, 0x630, 0x631, 0x632, 0x633, 0x634, 0x635, 0x636, 0x637, 0x638, 0x639, 0x63a, 0x640, 0x641, 0x642, 0x643, 0x644, 0x645, 0x646, 0x647, 0x648, 0x649, 0x64a, 0x64b, 0x64c, 0x64d, 0x64e, 0x64f, 0x650, 0x651, 0x652, 0x653, 0x654, 0x655, 0x657, 0x65a, 0x660, 0x661, 0x662, 0x663, 0x664, 0x665, 0x666, 0x667, 0x668, 0x669, 0x66a, 0x66b, 0x66c, 0x66d, 0x66e, 0x66f, 0x670, 0x674, 0x679, 0x67a, 0x67b, 0x67c, 0x67d, 0x67e, 0x67f, 0x680, 0x681, 0x682, 0x683, 0x684, 0x685, 0x686, 0x687, 0x688, 0x689, 0x68a, 0x68b, 0x68c, 0x68d, 0x68e, 0x68f, 0x690, 0x691, 0x692, 0x693, 0x694, 0x695, 0x696, 0x697, 0x698, 0x699, 0x69a, 0x69b, 0x69c, 0x69d, 0x69e, 0x69f, 0x6a0, 0x6a1, 0x6a2, 0x6a3, 0x6a4, 0x6a5, 0x6a6, 0x6a7, 0x6a8, 0x6a9, 0x6aa, 0x6ab, 0x6ac, 0x6ad, 0x6ae, 0x6af, 0x6b0, 0x6b1, 0x6b2, 0x6b3, 0x6b4, 0x6b5, 0x6b6, 0x6b7, 0x6b8, 0x6b9, 0x6ba, 0x6bb, 0x6bc, 0x6bd, 0x6be, 0x6bf, 0x6c6, 0x6cc, 0x6ce, 0x6d5, 0x6f0, 0x6f1, 0x6f2, 0x6f3, 0x6f4, 0x6f5, 0x6f6, 0x6f7, 0x6f8, 0x6f9, 0x7c0, 0x7c1, 0x7c2, 0x7c3, 0x7c4, 0x7c5, 0x7c6, 0x7c7, 0x7c8, 0x7c9, 0x7ca, 0x7cb, 0x7cc, 0x7cd, 0x7ce, 0x7cf, 0x7d0, 0x7d1, 0x7d2, 0x7d3, 0x7d4, 0x7d5, 0x7d6, 0x7d7, 0x7d8, 0x7d9, 0x7da, 0x7db, 0x7dc, 0x7dd, 0x7de, 0x7df, 0x7e0, 0x7e1, 0x7e2, 0x7e3, 0x7e4, 0x7e5, 0x7e6, 0x7e7, 0x7eb, 0x7ec, 0x7ed, 0x7ee, 0x7ef, 0x7f0, 0x7f1, 0x7f2, 0x7f3, 0x7f4, 0x7f5, 0x7f8, 0x7f9, 0x7fa, 0xe3f, 0xe81, 0xe82, 0xe84, 0xe87, 0xe88, 0xe8a, 0xe8d, 0xe94, 0xe95, 0xe96, 0xe97, 0xe99, 0xe9a, 0xe9b, 0xe9c, 0xe9d, 0xe9e, 0xe9f, 0xea1, 0xea2, 0xea3, 0xea5, 0xea7, 0xeaa, 0xeab, 0xead, 0xeae, 0xeaf, 0xeb0, 0xeb1, 0xeb2, 0xeb3, 0xeb4, 0xeb5, 0xeb6, 0xeb7, 0xeb8, 0xeb9, 0xebb, 0xebc, 0xebd, 0xec0, 0xec1, 0xec2, 0xec3, 0xec4, 0xec6, 0xec8, 0xec9, 0xeca, 0xecb, 0xecc, 0xecd, 0xed0, 0xed1, 0xed2, 0xed3, 0xed4, 0xed5, 0xed6, 0xed7, 0xed8, 0xed9, 0xedc, 0xedd, 0x10a0, 0x10a1, 0x10a2, 0x10a3, 0x10a4, 0x10a5, 0x10a6, 0x10a7, 0x10a8, 0x10a9, 0x10aa, 0x10ab, 0x10ac, 0x10ad, 0x10ae, 0x10af, 0x10b0, 0x10b1, 0x10b2, 0x10b3, 0x10b4, 0x10b5, 0x10b6, 0x10b7, 0x10b8, 0x10b9, 0x10ba, 0x10bb, 0x10bc, 0x10bd, 0x10be, 0x10bf, 0x10c0, 0x10c1, 0x10c2, 0x10c3, 0x10c4, 0x10c5, 0x10d0, 0x10d1, 0x10d2, 0x10d3, 0x10d4, 0x10d5, 0x10d6, 0x10d7, 0x10d8, 0x10d9, 0x10da, 0x10db, 0x10dc, 0x10dd, 0x10de, 0x10df, 0x10e0, 0x10e1, 0x10e2, 0x10e3, 0x10e4, 0x10e5, 0x10e6, 0x10e7, 0x10e8, 0x10e9, 0x10ea, 0x10eb, 0x10ec, 0x10ed, 0x10ee, 0x10ef, 0x10f0, 0x10f1, 0x10f2, 0x10f3, 0x10f4, 0x10f5, 0x10f6, 0x10f7, 0x10f8, 0x10f9, 0x10fa, 0x10fb, 0x10fc, 0x1401, 0x1402, 0x1403, 0x1404, 0x1405, 0x1406, 0x1407, 0x1409, 0x140a, 0x140b, 0x140c, 0x140d, 0x140e, 0x140f, 0x1410, 0x1411, 0x1412, 0x1413, 0x1414, 0x1415, 0x1416, 0x1417, 0x1418, 0x1419, 0x141a, 0x141b, 0x141d, 0x141e, 0x141f, 0x1420, 0x1421, 0x1422, 0x1423, 0x1424, 0x1425, 0x1426, 0x1427, 0x1428, 0x1429, 0x142a, 0x142b, 0x142c, 0x142d, 0x142e, 0x142f, 0x1430, 0x1431, 0x1432, 0x1433, 0x1434, 0x1435, 0x1437, 0x1438, 0x1439, 0x143a, 0x143b, 0x143c, 0x143d, 0x143e, 0x143f, 0x1440, 0x1441, 0x1442, 0x1443, 0x1444, 0x1445, 0x1446, 0x1447, 0x1448, 0x1449, 0x144a, 0x144c, 0x144d, 0x144e, 0x144f, 0x1450, 0x1451, 0x1452, 0x1454, 0x1455, 0x1456, 0x1457, 0x1458, 0x1459, 0x145a, 0x145b, 0x145c, 0x145d, 0x145e, 0x145f, 0x1460, 0x1461, 0x1462, 0x1463, 0x1464, 0x1465, 0x1466, 0x1467, 0x1468, 0x1469, 0x146a, 0x146b, 0x146c, 0x146d, 0x146e, 0x146f, 0x1470, 0x1471, 0x1472, 0x1473, 0x1474, 0x1475, 0x1476, 0x1477, 0x1478, 0x1479, 0x147a, 0x147b, 0x147c, 0x147d, 0x147e, 0x147f, 0x1480, 0x1481, 0x1482, 0x1483, 0x1484, 0x1485, 0x1486, 0x1487, 0x1488, 0x1489, 0x148a, 0x148b, 0x148c, 0x148d, 0x148e, 0x148f, 0x1490, 0x1491, 0x1492, 0x1493, 0x1494, 0x1495, 0x1496, 0x1497, 0x1498, 0x1499, 0x149a, 0x149b, 0x149c, 0x149d, 0x149e, 0x149f, 0x14a0, 0x14a1, 0x14a2, 0x14a3, 0x14a4, 0x14a5, 0x14a6, 0x14a7, 0x14a8, 0x14a9, 0x14aa, 0x14ab, 0x14ac, 0x14ad, 0x14ae, 0x14af, 0x14b0, 0x14b1, 0x14b2, 0x14b3, 0x14b4, 0x14b5, 0x14b6, 0x14b7, 0x14b8, 0x14b9, 0x14ba, 0x14bb, 0x14bc, 0x14bd, 0x14c0, 0x14c1, 0x14c2, 0x14c3, 0x14c4, 0x14c5, 0x14c6, 0x14c7, 0x14c8, 0x14c9, 0x14ca, 0x14cb, 0x14cc, 0x14cd, 0x14ce, 0x14cf, 0x14d0, 0x14d1, 0x14d2, 0x14d3, 0x14d4, 0x14d5, 0x14d6, 0x14d7,<|fim▁hole|> 0x14ea, 0x14ec, 0x14ed, 0x14ee, 0x14ef, 0x14f0, 0x14f1, 0x14f2, 0x14f3, 0x14f4, 0x14f5, 0x14f6, 0x14f7, 0x14f8, 0x14f9, 0x14fa, 0x14fb, 0x14fc, 0x14fd, 0x14fe, 0x14ff, 0x1500, 0x1501, 0x1502, 0x1503, 0x1504, 0x1505, 0x1506, 0x1507, 0x1510, 0x1511, 0x1512, 0x1513, 0x1514, 0x1515, 0x1516, 0x1517, 0x1518, 0x1519, 0x151a, 0x151b, 0x151c, 0x151d, 0x151e, 0x151f, 0x1520, 0x1521, 0x1522, 0x1523, 0x1524, 0x1525, 0x1526, 0x1527, 0x1528, 0x1529, 0x152a, 0x152b, 0x152c, 0x152d, 0x152e, 0x152f, 0x1530, 0x1531, 0x1532, 0x1533, 0x1534, 0x1535, 0x1536, 0x1537, 0x1538, 0x1539, 0x153a, 0x153b, 0x153c, 0x153d, 0x153e, 0x1540, 0x1541, 0x1542, 0x1543, 0x1544, 0x1545, 0x1546, 0x1547, 0x1548, 0x1549, 0x154a, 0x154b, 0x154c, 0x154d, 0x154e, 0x154f, 0x1550, 0x1552, 0x1553, 0x1554, 0x1555, 0x1556, 0x1557, 0x1558, 0x1559, 0x155a, 0x155b, 0x155c, 0x155d, 0x155e, 0x155f, 0x1560, 0x1561, 0x1562, 0x1563, 0x1564, 0x1565, 0x1566, 0x1567, 0x1568, 0x1569, 0x156a, 0x1574, 0x1575, 0x1576, 0x1577, 0x1578, 0x1579, 0x157a, 0x157b, 0x157c, 0x157d, 0x157e, 0x157f, 0x1580, 0x1581, 0x1582, 0x1583, 0x1584, 0x1585, 0x158a, 0x158b, 0x158c, 0x158d, 0x158e, 0x158f, 0x1590, 0x1591, 0x1592, 0x1593, 0x1594, 0x1595, 0x1596, 0x15a0, 0x15a1, 0x15a2, 0x15a3, 0x15a4, 0x15a5, 0x15a6, 0x15a7, 0x15a8, 0x15a9, 0x15aa, 0x15ab, 0x15ac, 0x15ad, 0x15ae, 0x15af, 0x15de, 0x15e1, 0x1646, 0x1647, 0x166e, 0x166f, 0x1670, 0x1671, 0x1672, 0x1673, 0x1674, 0x1675, 0x1676, 0x1680, 0x1681, 0x1682, 0x1683, 0x1684, 0x1685, 0x1686, 0x1687, 0x1688, 0x1689, 0x168a, 0x168b, 0x168c, 0x168d, 0x168e, 0x168f, 0x1690, 0x1691, 0x1692, 0x1693, 0x1694, 0x1695, 0x1696, 0x1697, 0x1698, 0x1699, 0x169a, 0x169b, 0x169c, 0x1d00, 0x1d01, 0x1d02, 0x1d03, 0x1d04, 0x1d05, 0x1d06, 0x1d07, 0x1d08, 0x1d09, 0x1d0a, 0x1d0b, 0x1d0c, 0x1d0d, 0x1d0e, 0x1d0f, 0x1d10, 0x1d11, 0x1d12, 0x1d13, 0x1d14, 0x1d16, 0x1d17, 0x1d18, 0x1d19, 0x1d1a, 0x1d1b, 0x1d1c, 0x1d1d, 0x1d1e, 0x1d1f, 0x1d20, 0x1d21, 0x1d22, 0x1d23, 0x1d26, 0x1d27, 0x1d28, 0x1d29, 0x1d2a, 0x1d2b, 0x1d2c, 0x1d2d, 0x1d2e, 0x1d30, 0x1d31, 0x1d32, 0x1d33, 0x1d34, 0x1d35, 0x1d36, 0x1d37, 0x1d38, 0x1d39, 0x1d3a, 0x1d3b, 0x1d3c, 0x1d3d, 0x1d3e, 0x1d3f, 0x1d40, 0x1d41, 0x1d42, 0x1d43, 0x1d44, 0x1d45, 0x1d46, 0x1d47, 0x1d48, 0x1d49, 0x1d4a, 0x1d4b, 0x1d4c, 0x1d4d, 0x1d4e, 0x1d4f, 0x1d50, 0x1d51, 0x1d52, 0x1d53, 0x1d54, 0x1d55, 0x1d56, 0x1d57, 0x1d58, 0x1d59, 0x1d5a, 0x1d5b, 0x1d5d, 0x1d5e, 0x1d5f, 0x1d60, 0x1d61, 0x1d62, 0x1d63, 0x1d64, 0x1d65, 0x1d66, 0x1d67, 0x1d68, 0x1d69, 0x1d6a, 0x1d77, 0x1d78, 0x1d7b, 0x1d7d, 0x1d85, 0x1d9b, 0x1d9c, 0x1d9d, 0x1d9e, 0x1d9f, 0x1da0, 0x1da1, 0x1da2, 0x1da3, 0x1da4, 0x1da5, 0x1da6, 0x1da7, 0x1da8, 0x1da9, 0x1daa, 0x1dab, 0x1dac, 0x1dad, 0x1dae, 0x1daf, 0x1db0, 0x1db1, 0x1db2, 0x1db3, 0x1db4, 0x1db5, 0x1db6, 0x1db7, 0x1db8, 0x1db9, 0x1dba, 0x1dbb, 0x1dbc, 0x1dbd, 0x1dbe, 0x1dbf, 0x1dc4, 0x1dc5, 0x1dc6, 0x1dc7, 0x1dc8, 0x1dc9, 0x1e00, 0x1e01, 0x1e02, 0x1e03, 0x1e04, 0x1e05, 0x1e06, 0x1e07, 0x1e08, 0x1e09, 0x1e0a, 0x1e0b, 0x1e0c, 0x1e0d, 0x1e0e, 0x1e0f, 0x1e10, 0x1e11, 0x1e12, 0x1e13, 0x1e14, 0x1e15, 0x1e16, 0x1e17, 0x1e18, 0x1e19, 0x1e1a, 0x1e1b, 0x1e1c, 0x1e1d, 0x1e1e, 0x1e1f, 0x1e20, 0x1e21, 0x1e22, 0x1e23, 0x1e24, 0x1e25, 0x1e26, 0x1e27, 0x1e28, 0x1e29, 0x1e2a, 0x1e2b, 0x1e2c, 0x1e2d, 0x1e2e, 0x1e2f, 0x1e30, 0x1e31, 0x1e32, 0x1e33, 0x1e34, 0x1e35, 0x1e36, 0x1e37, 0x1e38, 0x1e39, 0x1e3a, 0x1e3b, 0x1e3c, 0x1e3d, 0x1e3e, 0x1e3f, 0x1e40, 0x1e41, 0x1e42, 0x1e43, 0x1e44, 0x1e45, 0x1e46, 0x1e47, 0x1e48, 0x1e49, 0x1e4a, 0x1e4b, 0x1e4c, 0x1e4d, 0x1e4e, 0x1e4f, 0x1e50, 0x1e51, 0x1e52, 0x1e53, 0x1e54, 0x1e55, 0x1e56, 0x1e57, 0x1e58, 0x1e59, 0x1e5a, 0x1e5b, 0x1e5c, 0x1e5d, 0x1e5e, 0x1e5f, 0x1e60, 0x1e61, 0x1e62, 0x1e63, 0x1e64, 0x1e65, 0x1e66, 0x1e67, 0x1e68, 0x1e69, 0x1e6a, 0x1e6b, 0x1e6c, 0x1e6d, 0x1e6e, 0x1e6f, 0x1e70, 0x1e71, 0x1e72, 0x1e73, 0x1e74, 0x1e75, 0x1e76, 0x1e77, 0x1e78, 0x1e79, 0x1e7a, 0x1e7b, 0x1e7c, 0x1e7d, 0x1e7e, 0x1e7f, 0x1e80, 0x1e81, 0x1e82, 0x1e83, 0x1e84, 0x1e85, 0x1e86, 0x1e87, 0x1e88, 0x1e89, 0x1e8a, 0x1e8b, 0x1e8c, 0x1e8d, 0x1e8e, 0x1e8f, 0x1e90, 0x1e91, 0x1e92, 0x1e93, 0x1e94, 0x1e95, 0x1e96, 0x1e97, 0x1e98, 0x1e99, 0x1e9a, 0x1e9b, 0x1e9c, 0x1e9d, 0x1e9e, 0x1e9f, 0x1ea0, 0x1ea1, 0x1ea2, 0x1ea3, 0x1ea4, 0x1ea5, 0x1ea6, 0x1ea7, 0x1ea8, 0x1ea9, 0x1eaa, 0x1eab, 0x1eac, 0x1ead, 0x1eae, 0x1eaf, 0x1eb0, 0x1eb1, 0x1eb2, 0x1eb3, 0x1eb4, 0x1eb5, 0x1eb6, 0x1eb7, 0x1eb8, 0x1eb9, 0x1eba, 0x1ebb, 0x1ebc, 0x1ebd, 0x1ebe, 0x1ebf, 0x1ec0, 0x1ec1, 0x1ec2, 0x1ec3, 0x1ec4, 0x1ec5, 0x1ec6, 0x1ec7, 0x1ec8, 0x1ec9, 0x1eca, 0x1ecb, 0x1ecc, 0x1ecd, 0x1ece, 0x1ecf, 0x1ed0, 0x1ed1, 0x1ed2, 0x1ed3, 0x1ed4, 0x1ed5, 0x1ed6, 0x1ed7, 0x1ed8, 0x1ed9, 0x1eda, 0x1edb, 0x1edc, 0x1edd, 0x1ede, 0x1edf, 0x1ee0, 0x1ee1, 0x1ee2, 0x1ee3, 0x1ee4, 0x1ee5, 0x1ee6, 0x1ee7, 0x1ee8, 0x1ee9, 0x1eea, 0x1eeb, 0x1eec, 0x1eed, 0x1eee, 0x1eef, 0x1ef0, 0x1ef1, 0x1ef2, 0x1ef3, 0x1ef4, 0x1ef5, 0x1ef6, 0x1ef7, 0x1ef8, 0x1ef9, 0x1efa, 0x1efb, 0x1f00, 0x1f01, 0x1f02, 0x1f03, 0x1f04, 0x1f05, 0x1f06, 0x1f07, 0x1f08, 0x1f09, 0x1f0a, 0x1f0b, 0x1f0c, 0x1f0d, 0x1f0e, 0x1f0f, 0x1f10, 0x1f11, 0x1f12, 0x1f13, 0x1f14, 0x1f15, 0x1f18, 0x1f19, 0x1f1a, 0x1f1b, 0x1f1c, 0x1f1d, 0x1f20, 0x1f21, 0x1f22, 0x1f23, 0x1f24, 0x1f25, 0x1f26, 0x1f27, 0x1f28, 0x1f29, 0x1f2a, 0x1f2b, 0x1f2c, 0x1f2d, 0x1f2e, 0x1f2f, 0x1f30, 0x1f31, 0x1f32, 0x1f33, 0x1f34, 0x1f35, 0x1f36, 0x1f37, 0x1f38, 0x1f39, 0x1f3a, 0x1f3b, 0x1f3c, 0x1f3d, 0x1f3e, 0x1f3f, 0x1f40, 0x1f41, 0x1f42, 0x1f43, 0x1f44, 0x1f45, 0x1f48, 0x1f49, 0x1f4a, 0x1f4b, 0x1f4c, 0x1f4d, 0x1f50, 0x1f51, 0x1f52, 0x1f53, 0x1f54, 0x1f55, 0x1f56, 0x1f57, 0x1f59, 0x1f5b, 0x1f5d, 0x1f5f, 0x1f60, 0x1f61, 0x1f62, 0x1f63, 0x1f64, 0x1f65, 0x1f66, 0x1f67, 0x1f68, 0x1f69, 0x1f6a, 0x1f6b, 0x1f6c, 0x1f6d, 0x1f6e, 0x1f6f, 0x1f70, 0x1f71, 0x1f72, 0x1f73, 0x1f74, 0x1f75, 0x1f76, 0x1f77, 0x1f78, 0x1f79, 0x1f7a, 0x1f7b, 0x1f7c, 0x1f7d, 0x1f80, 0x1f81, 0x1f82, 0x1f83, 0x1f84, 0x1f85, 0x1f86, 0x1f87, 0x1f88, 0x1f89, 0x1f8a, 0x1f8b, 0x1f8c, 0x1f8d, 0x1f8e, 0x1f8f, 0x1f90, 0x1f91, 0x1f92, 0x1f93, 0x1f94, 0x1f95, 0x1f96, 0x1f97, 0x1f98, 0x1f99, 0x1f9a, 0x1f9b, 0x1f9c, 0x1f9d, 0x1f9e, 0x1f9f, 0x1fa0, 0x1fa1, 0x1fa2, 0x1fa3, 0x1fa4, 0x1fa5, 0x1fa6, 0x1fa7, 0x1fa8, 0x1fa9, 0x1faa, 0x1fab, 0x1fac, 0x1fad, 0x1fae, 0x1faf, 0x1fb0, 0x1fb1, 0x1fb2, 0x1fb3, 0x1fb4, 0x1fb6, 0x1fb7, 0x1fb8, 0x1fb9, 0x1fba, 0x1fbb, 0x1fbc, 0x1fbd, 0x1fbe, 0x1fbf, 0x1fc0, 0x1fc1, 0x1fc2, 0x1fc3, 0x1fc4, 0x1fc6, 0x1fc7, 0x1fc8, 0x1fc9, 0x1fca, 0x1fcb, 0x1fcc, 0x1fcd, 0x1fce, 0x1fcf, 0x1fd0, 0x1fd1, 0x1fd2, 0x1fd3, 0x1fd6, 0x1fd7, 0x1fd8, 0x1fd9, 0x1fda, 0x1fdb, 0x1fdd, 0x1fde, 0x1fdf, 0x1fe0, 0x1fe1, 0x1fe2, 0x1fe3, 0x1fe4, 0x1fe5, 0x1fe6, 0x1fe7, 0x1fe8, 0x1fe9, 0x1fea, 0x1feb, 0x1fec, 0x1fed, 0x1fee, 0x1fef, 0x1ff2, 0x1ff3, 0x1ff4, 0x1ff6, 0x1ff7, 0x1ff8, 0x1ff9, 0x1ffa, 0x1ffb, 0x1ffc, 0x1ffd, 0x1ffe, 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200a, 0x200b, 0x200c, 0x200d, 0x200e, 0x200f, 0x2010, 0x2011, 0x2012, 0x2013, 0x2015, 0x2015, 0x2016, 0x2017, 0x2018, 0x2019, 0x201a, 0x201b, 0x201c, 0x201d, 0x201e, 0x201f, 0x2020, 0x2021, 0x2022, 0x2023, 0x2024, 0x2025, 0x2026, 0x2027, 0x2028, 0x2029, 0x202a, 0x202b, 0x202c, 0x202d, 0x202e, 0x202f, 0x2030, 0x2031, 0x2032, 0x2033, 0x2034, 0x2035, 0x2036, 0x2037, 0x2038, 0x2039, 0x203a, 0x203b, 0x203c, 0x203d, 0x203e, 0x203f, 0x2040, 0x2041, 0x2042, 0x2043, 0x2044, 0x2045, 0x2046, 0x2047, 0x2048, 0x2049, 0x204a, 0x204b, 0x204c, 0x204d, 0x204e, 0x204f, 0x2050, 0x2051, 0x2052, 0x2053, 0x2054, 0x2055, 0x2056, 0x2057, 0x2058, 0x2059, 0x205a, 0x205b, 0x205c, 0x205d, 0x205e, 0x205f, 0x2060, 0x2061, 0x2062, 0x2063, 0x2064, 0x206a, 0x206b, 0x206c, 0x206d, 0x206e, 0x206f, 0x2070, 0x2071, 0x2074, 0x2075, 0x2076, 0x2077, 0x2078, 0x2079, 0x207a, 0x207b, 0x207c, 0x207d, 0x207e, 0x207f, 0x2080, 0x2081, 0x2082, 0x2083, 0x2084, 0x2085, 0x2086, 0x2087, 0x2088, 0x2089, 0x208a, 0x208b, 0x208c, 0x208d, 0x208e, 0x2090, 0x2091, 0x2092, 0x2093, 0x2094, 0x2095, 0x2096, 0x2097, 0x2098, 0x2099, 0x209a, 0x209b, 0x209c, 0x20a0, 0x20a1, 0x20a2, 0x20a3, 0x20a4, 0x20a5, 0x20a6, 0x20a7, 0x20a8, 0x20a9, 0x20aa, 0x20ab, 0x20ac, 0x20ad, 0x20ae, 0x20af, 0x20b0, 0x20b1, 0x20b2, 0x20b3, 0x20b4, 0x20b5, 0x20b8, 0x20b9, 0x20ba, 0x20d0, 0x20d1, 0x20d6, 0x20d7, 0x20db, 0x20dc, 0x20e1, 0x2100, 0x2101, 0x2102, 0x2103, 0x2104, 0x2105, 0x2106, 0x2107, 0x2108, 0x2109, 0x210b, 0x210c, 0x210d, 0x210e, 0x210f, 0x2110, 0x2111, 0x2112, 0x2113, 0x2114, 0x2115, 0x2116, 0x2117, 0x2118, 0x2119, 0x211a, 0x211b, 0x211c, 0x211d, 0x211e, 0x211f, 0x2120, 0x2121, 0x2122, 0x2123, 0x2124, 0x2125, 0x2126, 0x2127, 0x2128, 0x2129, 0x212a, 0x212b, 0x212c, 0x212d, 0x212e, 0x212f, 0x2130, 0x2131, 0x2132, 0x2133, 0x2134, 0x2135, 0x2136, 0x2137, 0x2138, 0x2139, 0x213a, 0x213b, 0x213c, 0x213d, 0x213e, 0x213f, 0x2140, 0x2141, 0x2142, 0x2143, 0x2144, 0x2145, 0x2146, 0x2147, 0x2148, 0x2149, 0x214b, 0x214e, 0x2150, 0x2151, 0x2152, 0x2153, 0x2154, 0x2155, 0x2156, 0x2157, 0x2158, 0x2159, 0x215a, 0x215b, 0x215c, 0x215d, 0x215e, 0x215f, 0x2160, 0x2161, 0x2162, 0x2163, 0x2164, 0x2165, 0x2166, 0x2167, 0x2168, 0x2169, 0x216a, 0x216b, 0x216c, 0x216d, 0x216e, 0x216f, 0x2170, 0x2171, 0x2172, 0x2173, 0x2174, 0x2175, 0x2176, 0x2177, 0x2178, 0x2179, 0x217a, 0x217b, 0x217c, 0x217d, 0x217e, 0x217f, 0x2180, 0x2181, 0x2182, 0x2183, 0x2184, 0x2185, 0x2189, 0x2190, 0x2191, 0x2192, 0x2193, 0x2194, 0x2195, 0x2196, 0x2197, 0x2198, 0x2199, 0x219a, 0x219b, 0x219c, 0x219d, 0x219e, 0x219f, 0x21a0, 0x21a1, 0x21a2, 0x21a3, 0x21a4, 0x21a5, 0x21a6, 0x21a7, 0x21a8, 0x21a9, 0x21aa, 0x21ab, 0x21ac, 0x21ad, 0x21ae, 0x21af, 0x21b0, 0x21b1, 0x21b2, 0x21b3, 0x21b4, 0x21b5, 0x21b6, 0x21b7, 0x21b8, 0x21b9, 0x21ba, 0x21bb, 0x21bc, 0x21bd, 0x21be, 0x21bf, 0x21c0, 0x21c1, 0x21c2, 0x21c3, 0x21c4, 0x21c5, 0x21c6, 0x21c7, 0x21c8, 0x21c9, 0x21ca, 0x21cb, 0x21cc, 0x21cd, 0x21ce, 0x21cf, 0x21d0, 0x21d1, 0x21d2, 0x21d3, 0x21d4, 0x21d5, 0x21d6, 0x21d7, 0x21d8, 0x21d9, 0x21da, 0x21db, 0x21dc, 0x21dd, 0x21de, 0x21df, 0x21e0, 0x21e1, 0x21e2, 0x21e3, 0x21e4, 0x21e5, 0x21e6, 0x21e7, 0x21e8, 0x21e9, 0x21ea, 0x21eb, 0x21ec, 0x21ed, 0x21ee, 0x21ef, 0x21f0, 0x21f1, 0x21f2, 0x21f3, 0x21f4, 0x21f5, 0x21f6, 0x21f7, 0x21f8, 0x21f9, 0x21fa, 0x21fb, 0x21fc, 0x21fd, 0x21fe, 0x21ff, 0x2200, 0x2201, 0x2202, 0x2203, 0x2204, 0x2205, 0x2206, 0x2207, 0x2208, 0x2209, 0x220a, 0x220b, 0x220c, 0x220d, 0x220e, 0x220f, 0x2210, 0x2211, 0x2212, 0x2213, 0x2214, 0x2215, 0x2216, 0x2217, 0x2218, 0x2219, 0x221a, 0x221b, 0x221c, 0x221d, 0x221e, 0x221f, 0x2220, 0x2221, 0x2222, 0x2223, 0x2224, 0x2225, 0x2226, 0x2227, 0x2228, 0x2229, 0x222a, 0x222b, 0x222c, 0x222d, 0x222e, 0x222f, 0x2230, 0x2231, 0x2232, 0x2233, 0x2234, 0x2235, 0x2236, 0x2237, 0x2238, 0x2239, 0x223a, 0x223b, 0x223c, 0x223d, 0x223e, 0x223f, 0x2240, 0x2241, 0x2242, 0x2243, 0x2244, 0x2245, 0x2246, 0x2247, 0x2248, 0x2249, 0x224a, 0x224b, 0x224c, 0x224d, 0x224e, 0x224f, 0x2250, 0x2251, 0x2252, 0x2253, 0x2254, 0x2255, 0x2256, 0x2257, 0x2258, 0x2259, 0x225a, 0x225b, 0x225c, 0x225d, 0x225e, 0x225f, 0x2260, 0x2261, 0x2262, 0x2263, 0x2264, 0x2265, 0x2266, 0x2267, 0x2268, 0x2269, 0x226a, 0x226b, 0x226c, 0x226d, 0x226e, 0x226f, 0x2270, 0x2271, 0x2272, 0x2273, 0x2274, 0x2275, 0x2276, 0x2277, 0x2278, 0x2279, 0x227a, 0x227b, 0x227c, 0x227d, 0x227e, 0x227f, 0x2280, 0x2281, 0x2282, 0x2283, 0x2284, 0x2285, 0x2286, 0x2287, 0x2288, 0x2289, 0x228a, 0x228b, 0x228c, 0x228d, 0x228e, 0x228f, 0x2290, 0x2291, 0x2292, 0x2293, 0x2294, 0x2295, 0x2296, 0x2297, 0x2298, 0x2299, 0x229a, 0x229b, 0x229c, 0x229d, 0x229e, 0x229f, 0x22a0, 0x22a1, 0x22a2, 0x22a3, 0x22a4, 0x22a5, 0x22a6, 0x22a7, 0x22a8, 0x22a9, 0x22aa, 0x22ab, 0x22ac, 0x22ad, 0x22ae, 0x22af, 0x22b0, 0x22b1, 0x22b2, 0x22b3, 0x22b4, 0x22b5, 0x22b6, 0x22b7, 0x22b8, 0x22b9, 0x22ba, 0x22bb, 0x22bc, 0x22bd, 0x22be, 0x22bf, 0x22c0, 0x22c1, 0x22c2, 0x22c3, 0x22c4, 0x22c5, 0x22c6, 0x22c7, 0x22c8, 0x22c9, 0x22ca, 0x22cb, 0x22cc, 0x22cd, 0x22ce, 0x22cf, 0x22d0, 0x22d1, 0x22d2, 0x22d3, 0x22d4, 0x22d5, 0x22d6, 0x22d7, 0x22d8, 0x22d9, 0x22da, 0x22db, 0x22dc, 0x22dd, 0x22de, 0x22df, 0x22e0, 0x22e1, 0x22e2, 0x22e3, 0x22e4, 0x22e5, 0x22e6, 0x22e7, 0x22e8, 0x22e9, 0x22ea, 0x22eb, 0x22ec, 0x22ed, 0x22ee, 0x22ef, 0x22f0, 0x22f1, 0x22f2, 0x22f3, 0x22f4, 0x22f5, 0x22f6, 0x22f7, 0x22f8, 0x22f9, 0x22fa, 0x22fb, 0x22fc, 0x22fd, 0x22fe, 0x22ff, 0x2300, 0x2301, 0x2302, 0x2303, 0x2304, 0x2305, 0x2306, 0x2307, 0x2308, 0x2309, 0x230a, 0x230b, 0x230c, 0x230d, 0x230e, 0x230f, 0x2310, 0x2311, 0x2318, 0x2319, 0x231c, 0x231d, 0x231e, 0x231f, 0x2320, 0x2321, 0x2324, 0x2325, 0x2326, 0x2327, 0x2328, 0x232b, 0x232c, 0x2373, 0x2374, 0x2375, 0x237a, 0x237d, 0x2387, 0x2394, 0x239b, 0x239c, 0x239d, 0x239e, 0x239f, 0x23a0, 0x23a1, 0x23a2, 0x23a3, 0x23a4, 0x23a5, 0x23a6, 0x23a7, 0x23a8, 0x23a9, 0x23aa, 0x23ab, 0x23ac, 0x23ad, 0x23ae, 0x23ce, 0x23cf, 0x23e3, 0x23e5, 0x23e8, 0x2422, 0x2423, 0x2460, 0x2461, 0x2462, 0x2463, 0x2464, 0x2465, 0x2466, 0x2467, 0x2468, 0x2469, 0x2500, 0x2501, 0x2502, 0x2503, 0x2504, 0x2505, 0x2506, 0x2507, 0x2508, 0x2509, 0x250a, 0x250b, 0x250c, 0x250d, 0x250e, 0x250f, 0x2510, 0x2511, 0x2512, 0x2513, 0x2514, 0x2515, 0x2516, 0x2517, 0x2518, 0x2519, 0x251a, 0x251b, 0x251c, 0x251d, 0x251e, 0x251f, 0x2520, 0x2521, 0x2522, 0x2523, 0x2524, 0x2525, 0x2526, 0x2527, 0x2528, 0x2529, 0x252a, 0x252b, 0x252c, 0x252d, 0x252e, 0x252f, 0x2530, 0x2531, 0x2532, 0x2533, 0x2534, 0x2535, 0x2536, 0x2537, 0x2538, 0x2539, 0x253a, 0x253b, 0x253c, 0x253d, 0x253e, 0x253f, 0x2540, 0x2541, 0x2542, 0x2543, 0x2544, 0x2545, 0x2546, 0x2547, 0x2548, 0x2549, 0x254a, 0x254b, 0x254c, 0x254d, 0x254e, 0x254f, 0x2550, 0x2551, 0x2552, 0x2553, 0x2554, 0x2555, 0x2556, 0x2557, 0x2558, 0x2559, 0x255a, 0x255b, 0x255c, 0x255d, 0x255e, 0x255f, 0x2560, 0x2561, 0x2562, 0x2563, 0x2564, 0x2565, 0x2566, 0x2567, 0x2568, 0x2569, 0x256a, 0x256b, 0x256c, 0x256d, 0x256e, 0x256f, 0x2570, 0x2571, 0x2572, 0x2573, 0x2574, 0x2575, 0x2576, 0x2577, 0x2578, 0x2579, 0x257a, 0x257b, 0x257c, 0x257d, 0x257e, 0x257f, 0x2580, 0x2581, 0x2582, 0x2583, 0x2584, 0x2585, 0x2586, 0x2587, 0x2588, 0x2589, 0x258a, 0x258b, 0x258c, 0x258d, 0x258e, 0x258f, 0x2590, 0x2591, 0x2592, 0x2593, 0x2594, 0x2595, 0x2596, 0x2597, 0x2598, 0x2599, 0x259a, 0x259b, 0x259c, 0x259d, 0x259e, 0x259f, 0x25a0, 0x25a1, 0x25a2, 0x25a3, 0x25a4, 0x25a5, 0x25a6, 0x25a7, 0x25a8, 0x25a9, 0x25aa, 0x25ab, 0x25ac, 0x25ad, 0x25ae, 0x25af, 0x25b0, 0x25b1, 0x25b2, 0x25b3, 0x25b4, 0x25b5, 0x25b6, 0x25b7, 0x25b8, 0x25b9, 0x25ba, 0x25bb, 0x25bc, 0x25bd, 0x25be, 0x25bf, 0x25c0, 0x25c1, 0x25c2, 0x25c3, 0x25c4, 0x25c5, 0x25c6, 0x25c7, 0x25c8, 0x25c9, 0x25ca, 0x25cb, 0x25cc, 0x25cd, 0x25ce, 0x25cf, 0x25d0, 0x25d1, 0x25d2, 0x25d3, 0x25d4, 0x25d5, 0x25d6, 0x25d7, 0x25d8, 0x25d9, 0x25da, 0x25db, 0x25dc, 0x25dd, 0x25de, 0x25df, 0x25e0, 0x25e1, 0x25e2, 0x25e3, 0x25e4, 0x25e5, 0x25e6, 0x25e7, 0x25e8, 0x25e9, 0x25ea, 0x25eb, 0x25ec, 0x25ed, 0x25ee, 0x25ef, 0x25f0, 0x25f1, 0x25f2, 0x25f3, 0x25f4, 0x25f5, 0x25f6, 0x25f7, 0x25f8, 0x25f9, 0x25fa, 0x25fb, 0x25fc, 0x25fd, 0x25fe, 0x25ff, 0x2600, 0x2601, 0x2602, 0x2603, 0x2604, 0x2605, 0x2606, 0x2607, 0x2608, 0x2609, 0x260a, 0x260b, 0x260c, 0x260d, 0x260e, 0x260f, 0x2610, 0x2611, 0x2612, 0x2613, 0x2614, 0x2615, 0x2616, 0x2617, 0x2618, 0x2619, 0x261a, 0x261b, 0x261c, 0x261d, 0x261e, 0x261f, 0x2620, 0x2621, 0x2622, 0x2623, 0x2624, 0x2625, 0x2626, 0x2627, 0x2628, 0x2629, 0x262a, 0x262b, 0x262c, 0x262d, 0x262e, 0x262f, 0x2630, 0x2631, 0x2632, 0x2633, 0x2634, 0x2635, 0x2636, 0x2637, 0x2638, 0x2639, 0x263a, 0x263b, 0x263c, 0x263d, 0x263e, 0x263f, 0x2640, 0x2641, 0x2642, 0x2643, 0x2644, 0x2645, 0x2646, 0x2647, 0x2648, 0x2649, 0x264a, 0x264b, 0x264c, 0x264d, 0x264e, 0x264f, 0x2650, 0x2651, 0x2652, 0x2653, 0x2654, 0x2655, 0x2656, 0x2657, 0x2658, 0x2659, 0x265a, 0x265b, 0x265c, 0x265d, 0x265e, 0x265f, 0x2660, 0x2661, 0x2662, 0x2663, 0x2664, 0x2665, 0x2666, 0x2667, 0x2668, 0x2669, 0x266a, 0x266b, 0x266c, 0x266d, 0x266e, 0x266f, 0x2670, 0x2671, 0x2672, 0x2673, 0x2674, 0x2675, 0x2676, 0x2677, 0x2678, 0x2679, 0x267a, 0x267b, 0x267c, 0x267d, 0x267e, 0x267f, 0x2680, 0x2681, 0x2682, 0x2683, 0x2684, 0x2685, 0x2686, 0x2687, 0x2688, 0x2689, 0x268a, 0x268b, 0x268c, 0x268d, 0x268e, 0x268f, 0x2690, 0x2691, 0x2692, 0x2693, 0x2694, 0x2695, 0x2696, 0x2697, 0x2698, 0x2699, 0x269a, 0x269b, 0x269c, 0x26a0, 0x26a1, 0x26a2, 0x26a3, 0x26a4, 0x26a5, 0x26a6, 0x26a7, 0x26a8, 0x26a9, 0x26aa, 0x26ab, 0x26ac, 0x26ad, 0x26ae, 0x26af, 0x26b0, 0x26b1, 0x26b2, 0x26b3, 0x26b4, 0x26b5, 0x26b6, 0x26b7, 0x26b8, 0x26c0, 0x26c1, 0x26c2, 0x26c3, 0x26e2, 0x2701, 0x2702, 0x2703, 0x2704, 0x2706, 0x2707, 0x2708, 0x2709, 0x270c, 0x270d, 0x270e, 0x270f, 0x2710, 0x2711, 0x2712, 0x2713, 0x2714, 0x2715, 0x2716, 0x2717, 0x2718, 0x2719, 0x271a, 0x271b, 0x271c, 0x271d, 0x271e, 0x271f, 0x2720, 0x2721, 0x2722, 0x2723, 0x2724, 0x2725, 0x2726, 0x2727, 0x2729, 0x272a, 0x272b, 0x272c, 0x272d, 0x272e, 0x272f, 0x2730, 0x2731, 0x2732, 0x2733, 0x2734, 0x2735, 0x2736, 0x2737, 0x2738, 0x2739, 0x273a, 0x273b, 0x273c, 0x273d, 0x273e, 0x273f, 0x2740, 0x2741, 0x2742, 0x2743, 0x2744, 0x2745, 0x2746, 0x2747, 0x2748, 0x2749, 0x274a, 0x274b, 0x274d, 0x274f, 0x2750, 0x2751, 0x2752, 0x2756, 0x2758, 0x2759, 0x275a, 0x275b, 0x275c, 0x275d, 0x275e, 0x2761, 0x2762, 0x2763, 0x2764, 0x2765, 0x2766, 0x2767, 0x2768, 0x2769, 0x276a, 0x276b, 0x276c, 0x276d, 0x276e, 0x276f, 0x2770, 0x2771, 0x2772, 0x2773, 0x2774, 0x2775, 0x2776, 0x2777, 0x2778, 0x2779, 0x277a, 0x277b, 0x277c, 0x277d, 0x277e, 0x277f, 0x2780, 0x2781, 0x2782, 0x2783, 0x2784, 0x2785, 0x2786, 0x2787, 0x2788, 0x2789, 0x278a, 0x278b, 0x278c, 0x278d, 0x278e, 0x278f, 0x2790, 0x2791, 0x2792, 0x2793, 0x2794, 0x2798, 0x2799, 0x279a, 0x279b, 0x279c, 0x279d, 0x279e, 0x279f, 0x27a0, 0x27a1, 0x27a2, 0x27a3, 0x27a4, 0x27a5, 0x27a6, 0x27a7, 0x27a8, 0x27a9, 0x27aa, 0x27ab, 0x27ac, 0x27ad, 0x27ae, 0x27af, 0x27b1, 0x27b2, 0x27b3, 0x27b4, 0x27b5, 0x27b6, 0x27b7, 0x27b8, 0x27b9, 0x27ba, 0x27bb, 0x27bc, 0x27bd, 0x27be, 0x27c5, 0x27c6, 0x27e0, 0x27e6, 0x27e7, 0x27e8, 0x27e9, 0x27ea, 0x27eb, 0x27f0, 0x27f1, 0x27f2, 0x27f3, 0x27f4, 0x27f5, 0x27f6, 0x27f7, 0x27f8, 0x27f9, 0x27fa, 0x27fb, 0x27fc, 0x27fd, 0x27fe, 0x27ff, 0x2800, 0x2801, 0x2802, 0x2803, 0x2804, 0x2805, 0x2806, 0x2807, 0x2808, 0x2809, 0x280a, 0x280b, 0x280c, 0x280d, 0x280e, 0x280f, 0x2810, 0x2811, 0x2812, 0x2813, 0x2814, 0x2815, 0x2816, 0x2817, 0x2818, 0x2819, 0x281a, 0x281b, 0x281c, 0x281d, 0x281e, 0x281f, 0x2820, 0x2821, 0x2822, 0x2823, 0x2824, 0x2825, 0x2826, 0x2827, 0x2828, 0x2829, 0x282a, 0x282b, 0x282c, 0x282d, 0x282e, 0x282f, 0x2830, 0x2831, 0x2832, 0x2833, 0x2834, 0x2835, 0x2836, 0x2837, 0x2838, 0x2839, 0x283a, 0x283b, 0x283c, 0x283d, 0x283e, 0x283f, 0x2840, 0x2841, 0x2842, 0x2843, 0x2844, 0x2845, 0x2846, 0x2847, 0x2848, 0x2849, 0x284a, 0x284b, 0x284c, 0x284d, 0x284e, 0x284f, 0x2850, 0x2851, 0x2852, 0x2853, 0x2854, 0x2855, 0x2856, 0x2857, 0x2858, 0x2859, 0x285a, 0x285b, 0x285c, 0x285d, 0x285e, 0x285f, 0x2860, 0x2861, 0x2862, 0x2863, 0x2864, 0x2865, 0x2866, 0x2867, 0x2868, 0x2869, 0x286a, 0x286b, 0x286c, 0x286d, 0x286e, 0x286f, 0x2870, 0x2871, 0x2872, 0x2873, 0x2874, 0x2875, 0x2876, 0x2877, 0x2878, 0x2879, 0x287a, 0x287b, 0x287c, 0x287d, 0x287e, 0x287f, 0x2880, 0x2881, 0x2882, 0x2883, 0x2884, 0x2885, 0x2886, 0x2887, 0x2888, 0x2889, 0x288a, 0x288b, 0x288c, 0x288d, 0x288e, 0x288f, 0x2890, 0x2891, 0x2892, 0x2893, 0x2894, 0x2895, 0x2896, 0x2897, 0x2898, 0x2899, 0x289a, 0x289b, 0x289c, 0x289d, 0x289e, 0x289f, 0x28a0, 0x28a1, 0x28a2, 0x28a3, 0x28a4, 0x28a5, 0x28a6, 0x28a7, 0x28a8, 0x28a9, 0x28aa, 0x28ab, 0x28ac, 0x28ad, 0x28ae, 0x28af, 0x28b0, 0x28b1, 0x28b2, 0x28b3, 0x28b4, 0x28b5, 0x28b6, 0x28b7, 0x28b8, 0x28b9, 0x28ba, 0x28bb, 0x28bc, 0x28bd, 0x28be, 0x28bf, 0x28c0, 0x28c1, 0x28c2, 0x28c3, 0x28c4, 0x28c5, 0x28c6, 0x28c7, 0x28c8, 0x28c9, 0x28ca, 0x28cb, 0x28cc, 0x28cd, 0x28ce, 0x28cf, 0x28d0, 0x28d1, 0x28d2, 0x28d3, 0x28d4, 0x28d5, 0x28d6, 0x28d7, 0x28d8, 0x28d9, 0x28da, 0x28db, 0x28dc, 0x28dd, 0x28de, 0x28df, 0x28e0, 0x28e1, 0x28e2, 0x28e3, 0x28e4, 0x28e5, 0x28e6, 0x28e7, 0x28e8, 0x28e9, 0x28ea, 0x28eb, 0x28ec, 0x28ed, 0x28ee, 0x28ef, 0x28f0, 0x28f1, 0x28f2, 0x28f3, 0x28f4, 0x28f5, 0x28f6, 0x28f7, 0x28f8, 0x28f9, 0x28fa, 0x28fb, 0x28fc, 0x28fd, 0x28fe, 0x28ff, 0x2906, 0x2907, 0x290a, 0x290b, 0x2940, 0x2941, 0x2983, 0x2984, 0x29ce, 0x29cf, 0x29d0, 0x29d1, 0x29d2, 0x29d3, 0x29d4, 0x29d5, 0x29eb, 0x29fa, 0x29fb, 0x2a00, 0x2a01, 0x2a02, 0x2a0c, 0x2a0d, 0x2a0e, 0x2a0f, 0x2a10, 0x2a11, 0x2a12, 0x2a13, 0x2a14, 0x2a15, 0x2a16, 0x2a17, 0x2a18, 0x2a19, 0x2a1a, 0x2a1b, 0x2a1c, 0x2a2f, 0x2a6a, 0x2a6b, 0x2a7d, 0x2a7e, 0x2a7f, 0x2a80, 0x2a81, 0x2a82, 0x2a83, 0x2a84, 0x2a85, 0x2a86, 0x2a87, 0x2a88, 0x2a89, 0x2a8a, 0x2a8b, 0x2a8c, 0x2a8d, 0x2a8e, 0x2a8f, 0x2a90, 0x2a91, 0x2a92, 0x2a93, 0x2a94, 0x2a95, 0x2a96, 0x2a97, 0x2a98, 0x2a99, 0x2a9a, 0x2a9b, 0x2a9c, 0x2a9d, 0x2a9e, 0x2a9f, 0x2aa0, 0x2aae, 0x2aaf, 0x2ab0, 0x2ab1, 0x2ab2, 0x2ab3, 0x2ab4, 0x2ab5, 0x2ab6, 0x2ab7, 0x2ab8, 0x2ab9, 0x2aba, 0x2af9, 0x2afa, 0x2b00, 0x2b01, 0x2b02, 0x2b03, 0x2b04, 0x2b05, 0x2b06, 0x2b07, 0x2b08, 0x2b09, 0x2b0a, 0x2b0b, 0x2b0c, 0x2b0d, 0x2b0e, 0x2b0f, 0x2b10, 0x2b11, 0x2b12, 0x2b13, 0x2b14, 0x2b15, 0x2b16, 0x2b17, 0x2b18, 0x2b19, 0x2b1a, 0x2b1f, 0x2b20, 0x2b21, 0x2b22, 0x2b23, 0x2b24, 0x2b53, 0x2b54, 0x2c60, 0x2c61, 0x2c62, 0x2c63, 0x2c64, 0x2c65, 0x2c66, 0x2c67, 0x2c68, 0x2c69, 0x2c6a, 0x2c6b, 0x2c6c, 0x2c6d, 0x2c6e, 0x2c6f, 0x2c70, 0x2c71, 0x2c72, 0x2c73, 0x2c74, 0x2c75, 0x2c76, 0x2c77, 0x2c79, 0x2c7a, 0x2c7b, 0x2c7c, 0x2c7d, 0x2c7e, 0x2c7f, 0x2d00, 0x2d01, 0x2d02, 0x2d03, 0x2d04, 0x2d05, 0x2d06, 0x2d07, 0x2d08, 0x2d09, 0x2d0a, 0x2d0b, 0x2d0c, 0x2d0d, 0x2d0e, 0x2d0f, 0x2d10, 0x2d11, 0x2d12, 0x2d13, 0x2d14, 0x2d15, 0x2d16, 0x2d17, 0x2d18, 0x2d19, 0x2d1a, 0x2d1b, 0x2d1c, 0x2d1d, 0x2d1e, 0x2d1f, 0x2d20, 0x2d21, 0x2d22, 0x2d23, 0x2d24, 0x2d25, 0x2d30, 0x2d31, 0x2d32, 0x2d33, 0x2d34, 0x2d35, 0x2d36, 0x2d37, 0x2d38, 0x2d39, 0x2d3a, 0x2d3b, 0x2d3c, 0x2d3d, 0x2d3e, 0x2d3f, 0x2d40, 0x2d41, 0x2d42, 0x2d43, 0x2d44, 0x2d45, 0x2d46, 0x2d47, 0x2d48, 0x2d49, 0x2d4a, 0x2d4b, 0x2d4c, 0x2d4d, 0x2d4e, 0x2d4f, 0x2d50, 0x2d51, 0x2d52, 0x2d53, 0x2d54, 0x2d55, 0x2d56, 0x2d57, 0x2d58, 0x2d59, 0x2d5a, 0x2d5b, 0x2d5c, 0x2d5d, 0x2d5e, 0x2d5f, 0x2d60, 0x2d61, 0x2d62, 0x2d63, 0x2d64, 0x2d65, 0x2d6f, 0x2e18, 0x2e1e, 0x2e22, 0x2e23, 0x2e24, 0x2e25, 0x2e2e, 0x4dc0, 0x4dc1, 0x4dc2, 0x4dc3, 0x4dc4, 0x4dc5, 0x4dc6, 0x4dc7, 0x4dc8, 0x4dc9, 0x4dca, 0x4dcb, 0x4dcc, 0x4dcd, 0x4dce, 0x4dcf, 0x4dd0, 0x4dd1, 0x4dd2, 0x4dd3, 0x4dd4, 0x4dd5, 0x4dd6, 0x4dd7, 0x4dd8, 0x4dd9, 0x4dda, 0x4ddb, 0x4ddc, 0x4ddd, 0x4dde, 0x4ddf, 0x4de0, 0x4de1, 0x4de2, 0x4de3, 0x4de4, 0x4de5, 0x4de6, 0x4de7, 0x4de8, 0x4de9, 0x4dea, 0x4deb, 0x4dec, 0x4ded, 0x4dee, 0x4def, 0x4df0, 0x4df1, 0x4df2, 0x4df3, 0x4df4, 0x4df5, 0x4df6, 0x4df7, 0x4df8, 0x4df9, 0x4dfa, 0x4dfb, 0x4dfc, 0x4dfd, 0x4dfe, 0x4dff, 0xa4d0, 0xa4d1, 0xa4d2, 0xa4d3, 0xa4d4, 0xa4d5, 0xa4d6, 0xa4d7, 0xa4d8, 0xa4d9, 0xa4da, 0xa4db, 0xa4dc, 0xa4dd, 0xa4de, 0xa4df, 0xa4e0, 0xa4e1, 0xa4e2, 0xa4e3, 0xa4e4, 0xa4e5, 0xa4e6, 0xa4e7, 0xa4e8, 0xa4e9, 0xa4ea, 0xa4eb, 0xa4ec, 0xa4ed, 0xa4ee, 0xa4ef, 0xa4f0, 0xa4f1, 0xa4f2, 0xa4f3, 0xa4f4, 0xa4f5, 0xa4f6, 0xa4f7, 0xa4f8, 0xa4f9, 0xa4fa, 0xa4fb, 0xa4fc, 0xa4fd, 0xa4fe, 0xa4ff, 0xa644, 0xa645, 0xa646, 0xa647, 0xa64c, 0xa64d, 0xa650, 0xa651, 0xa654, 0xa655, 0xa656, 0xa657, 0xa662, 0xa663, 0xa664, 0xa665, 0xa666, 0xa667, 0xa668, 0xa669, 0xa66a, 0xa66b, 0xa66c, 0xa66d, 0xa66e, 0xa68a, 0xa68b, 0xa68c, 0xa68d, 0xa694, 0xa695, 0xa708, 0xa709, 0xa70a, 0xa70b, 0xa70c, 0xa70d, 0xa70e, 0xa70f, 0xa710, 0xa711, 0xa712, 0xa713, 0xa714, 0xa715, 0xa716, 0xa71b, 0xa71c, 0xa71d, 0xa71e, 0xa71f, 0xa722, 0xa723, 0xa724, 0xa725, 0xa726, 0xa727, 0xa728, 0xa729, 0xa72a, 0xa72b, 0xa730, 0xa731, 0xa732, 0xa733, 0xa734, 0xa735, 0xa736, 0xa737, 0xa738, 0xa739, 0xa73a, 0xa73b, 0xa73c, 0xa73d, 0xa73e, 0xa73f, 0xa740, 0xa741, 0xa746, 0xa747, 0xa748, 0xa749, 0xa74a, 0xa74b, 0xa74e, 0xa74f, 0xa750, 0xa751, 0xa752, 0xa753, 0xa756, 0xa757, 0xa764, 0xa765, 0xa766, 0xa767, 0xa780, 0xa781, 0xa782, 0xa783, 0xa789, 0xa78a, 0xa78b, 0xa78c, 0xa78d, 0xa78e, 0xa790, 0xa791, 0xa7a0, 0xa7a1, 0xa7a2, 0xa7a3, 0xa7a4, 0xa7a5, 0xa7a6, 0xa7a7, 0xa7a8, 0xa7a9, 0xa7aa, 0xa7fa, 0xa7fb, 0xa7fc, 0xa7fd, 0xa7fe, 0xa7ff, 0xef00, 0xef01, 0xef02, 0xef03, 0xef04, 0xef05, 0xef06, 0xef07, 0xef08, 0xef09, 0xef0a, 0xef0b, 0xef0c, 0xef0d, 0xef0e, 0xef0f, 0xef10, 0xef11, 0xef12, 0xef13, 0xef14, 0xef15, 0xef16, 0xef17, 0xef18, 0xef19, 0xf000, 0xf001, 0xf002, 0xf003, 0xf400, 0xf401, 0xf402, 0xf403, 0xf404, 0xf405, 0xf406, 0xf407, 0xf408, 0xf409, 0xf40a, 0xf40b, 0xf40c, 0xf40d, 0xf40e, 0xf40f, 0xf410, 0xf411, 0xf412, 0xf413, 0xf414, 0xf415, 0xf416, 0xf417, 0xf418, 0xf419, 0xf41a, 0xf41b, 0xf41c, 0xf41d, 0xf41e, 0xf41f, 0xf420, 0xf421, 0xf422, 0xf423, 0xf424, 0xf425, 0xf426, 0xf428, 0xf429, 0xf42a, 0xf42b, 0xf42c, 0xf42d, 0xf42e, 0xf42f, 0xf430, 0xf431, 0xf432, 0xf433, 0xf434, 0xf435, 0xf436, 0xf437, 0xf438, 0xf439, 0xf43a, 0xf43b, 0xf43c, 0xf43d, 0xf43e, 0xf43f, 0xf440, 0xf441, 0xf6c5, 0xfb00, 0xfb01, 0xfb02, 0xfb03, 0xfb04, 0xfb05, 0xfb06, 0xfb13, 0xfb14, 0xfb15, 0xfb16, 0xfb17, 0xfb1d, 0xfb1e, 0xfb1f, 0xfb20, 0xfb21, 0xfb22, 0xfb23, 0xfb24, 0xfb25, 0xfb26, 0xfb27, 0xfb28, 0xfb29, 0xfb2a, 0xfb2b, 0xfb2c, 0xfb2d, 0xfb2e, 0xfb2f, 0xfb30, 0xfb31, 0xfb32, 0xfb33, 0xfb34, 0xfb35, 0xfb36, 0xfb38, 0xfb39, 0xfb3a, 0xfb3b, 0xfb3c, 0xfb3e, 0xfb40, 0xfb41, 0xfb43, 0xfb44, 0xfb46, 0xfb47, 0xfb48, 0xfb49, 0xfb4a, 0xfb4b, 0xfb4c, 0xfb4d, 0xfb4e, 0xfb4f, 0xfb52, 0xfb53, 0xfb54, 0xfb55, 0xfb56, 0xfb57, 0xfb58, 0xfb59, 0xfb5a, 0xfb5b, 0xfb5c, 0xfb5d, 0xfb5e, 0xfb5f, 0xfb60, 0xfb61, 0xfb62, 0xfb63, 0xfb64, 0xfb65, 0xfb66, 0xfb67, 0xfb68, 0xfb69, 0xfb6a, 0xfb6b, 0xfb6c, 0xfb6d, 0xfb6e, 0xfb6f, 0xfb70, 0xfb71, 0xfb72, 0xfb73, 0xfb74, 0xfb75, 0xfb76, 0xfb77, 0xfb78, 0xfb79, 0xfb7a, 0xfb7b, 0xfb7c, 0xfb7d, 0xfb7e, 0xfb7f, 0xfb80, 0xfb81, 0xfb82, 0xfb83, 0xfb84, 0xfb85, 0xfb86, 0xfb87, 0xfb88, 0xfb89, 0xfb8a, 0xfb8b, 0xfb8c, 0xfb8d, 0xfb8e, 0xfb8f, 0xfb90, 0xfb91, 0xfb92, 0xfb93, 0xfb94, 0xfb95, 0xfb96, 0xfb97, 0xfb98, 0xfb99, 0xfb9a, 0xfb9b, 0xfb9c, 0xfb9d, 0xfb9e, 0xfb9f, 0xfba0, 0xfba1, 0xfba2, 0xfba3, 0xfbaa, 0xfbab, 0xfbac, 0xfbad, 0xfbd3, 0xfbd4, 0xfbd5, 0xfbd6, 0xfbd9, 0xfbda, 0xfbe8, 0xfbe9, 0xfbfc, 0xfbfd, 0xfbfe, 0xfbff, 0xfe00, 0xfe01, 0xfe02, 0xfe03, 0xfe04, 0xfe05, 0xfe06, 0xfe07, 0xfe08, 0xfe09, 0xfe0a, 0xfe0b, 0xfe0c, 0xfe0d, 0xfe0e, 0xfe0f, 0xfe20, 0xfe21, 0xfe22, 0xfe23, 0xfe70, 0xfe71, 0xfe72, 0xfe73, 0xfe74, 0xfe76, 0xfe77, 0xfe78, 0xfe79, 0xfe7a, 0xfe7b, 0xfe7c, 0xfe7d, 0xfe7e, 0xfe7f, 0xfe80, 0xfe81, 0xfe82, 0xfe83, 0xfe84, 0xfe85, 0xfe86, 0xfe87, 0xfe88, 0xfe89, 0xfe8a, 0xfe8b, 0xfe8c, 0xfe8d, 0xfe8e, 0xfe8f, 0xfe90, 0xfe91, 0xfe92, 0xfe93, 0xfe94, 0xfe95, 0xfe96, 0xfe97, 0xfe98, 0xfe99, 0xfe9a, 0xfe9b, 0xfe9c, 0xfe9d, 0xfe9e, 0xfe9f, 0xfea0, 0xfea1, 0xfea2, 0xfea3, 0xfea4, 0xfea5, 0xfea6, 0xfea7, 0xfea8, 0xfea9, 0xfeaa, 0xfeab, 0xfeac, 0xfead, 0xfeae, 0xfeaf, 0xfeb0, 0xfeb1, 0xfeb2, 0xfeb3, 0xfeb4, 0xfeb5, 0xfeb6, 0xfeb7, 0xfeb8, 0xfeb9, 0xfeba, 0xfebb, 0xfebc, 0xfebd, 0xfebe, 0xfebf, 0xfec0, 0xfec1, 0xfec2, 0xfec3, 0xfec4, 0xfec5, 0xfec6, 0xfec7, 0xfec8, 0xfec9, 0xfeca, 0xfecb, 0xfecc, 0xfecd, 0xfece, 0xfecf, 0xfed0, 0xfed1, 0xfed2, 0xfed3, 0xfed4, 0xfed5, 0xfed6, 0xfed7, 0xfed8, 0xfed9, 0xfeda, 0xfedb, 0xfedc, 0xfedd, 0xfede, 0xfedf, 0xfee0, 0xfee1, 0xfee2, 0xfee3, 0xfee4, 0xfee5, 0xfee6, 0xfee7, 0xfee8, 0xfee9, 0xfeea, 0xfeeb, 0xfeec, 0xfeed, 0xfeee, 0xfeef, 0xfef0, 0xfef1, 0xfef2, 0xfef3, 0xfef4, 0xfef5, 0xfef6, 0xfef7, 0xfef8, 0xfef9, 0xfefa, 0xfefb, 0xfefc, 0xfeff, 0xfff9, 0xfffa, 0xfffb, 0xfffc, 0xfffd, 0x10300, 0x10301, 0x10302, 0x10303, 0x10304, 0x10305, 0x10306, 0x10307, 0x10308, 0x10309, 0x1030a, 0x1030b, 0x1030c, 0x1030d, 0x1030e, 0x1030f, 0x10310, 0x10311, 0x10312, 0x10313, 0x10314, 0x10315, 0x10316, 0x10317, 0x10318, 0x10319, 0x1031a, 0x1031b, 0x1031c, 0x1031d, 0x1031e, 0x10320, 0x10321, 0x10322, 0x10323, 0x1d300, 0x1d301, 0x1d302, 0x1d303, 0x1d304, 0x1d305, 0x1d306, 0x1d307, 0x1d308, 0x1d309, 0x1d30a, 0x1d30b, 0x1d30c, 0x1d30d, 0x1d30e, 0x1d30f, 0x1d310, 0x1d311, 0x1d312, 0x1d313, 0x1d314, 0x1d315, 0x1d316, 0x1d317, 0x1d318, 0x1d319, 0x1d31a, 0x1d31b, 0x1d31c, 0x1d31d, 0x1d31e, 0x1d31f, 0x1d320, 0x1d321, 0x1d322, 0x1d323, 0x1d324, 0x1d325, 0x1d326, 0x1d327, 0x1d328, 0x1d329, 0x1d32a, 0x1d32b, 0x1d32c, 0x1d32d, 0x1d32e, 0x1d32f, 0x1d330, 0x1d331, 0x1d332, 0x1d333, 0x1d334, 0x1d335, 0x1d336, 0x1d337, 0x1d338, 0x1d339, 0x1d33a, 0x1d33b, 0x1d33c, 0x1d33d, 0x1d33e, 0x1d33f, 0x1d340, 0x1d341, 0x1d342, 0x1d343, 0x1d344, 0x1d345, 0x1d346, 0x1d347, 0x1d348, 0x1d349, 0x1d34a, 0x1d34b, 0x1d34c, 0x1d34d, 0x1d34e, 0x1d34f, 0x1d350, 0x1d351, 0x1d352, 0x1d353, 0x1d354, 0x1d355, 0x1d356, 0x1d538, 0x1d539, 0x1d53b, 0x1d53c, 0x1d53d, 0x1d53e, 0x1d540, 0x1d541, 0x1d542, 0x1d543, 0x1d544, 0x1d546, 0x1d54a, 0x1d54b, 0x1d54c, 0x1d54d, 0x1d54e, 0x1d54f, 0x1d550, 0x1d552, 0x1d553, 0x1d554, 0x1d555, 0x1d556, 0x1d557, 0x1d558, 0x1d559, 0x1d55a, 0x1d55b, 0x1d55c, 0x1d55d, 0x1d55e, 0x1d55f, 0x1d560, 0x1d561, 0x1d562, 0x1d563, 0x1d564, 0x1d565, 0x1d566, 0x1d567, 0x1d568, 0x1d569, 0x1d56a, 0x1d56b, 0x1d5a0, 0x1d5a1, 0x1d5a2, 0x1d5a3, 0x1d5a4, 0x1d5a5, 0x1d5a6, 0x1d5a7, 0x1d5a8, 0x1d5a9, 0x1d5aa, 0x1d5ab, 0x1d5ac, 0x1d5ad, 0x1d5ae, 0x1d5af, 0x1d5b0, 0x1d5b1, 0x1d5b2, 0x1d5b3, 0x1d5b4, 0x1d5b5, 0x1d5b6, 0x1d5b7, 0x1d5b8, 0x1d5b9, 0x1d5ba, 0x1d5bb, 0x1d5bc, 0x1d5bd, 0x1d5be, 0x1d5bf, 0x1d5c0, 0x1d5c1, 0x1d5c2, 0x1d5c3, 0x1d5c4, 0x1d5c5, 0x1d5c6, 0x1d5c7, 0x1d5c8, 0x1d5c9, 0x1d5ca, 0x1d5cb, 0x1d5cc, 0x1d5cd, 0x1d5ce, 0x1d5cf, 0x1d5d0, 0x1d5d1, 0x1d5d2, 0x1d5d3, 0x1d7d8, 0x1d7d9, 0x1d7da, 0x1d7db, 0x1d7dc, 0x1d7dd, 0x1d7de, 0x1d7df, 0x1d7e0, 0x1d7e1, 0x1d7e2, 0x1d7e3, 0x1d7e4, 0x1d7e5, 0x1d7e6, 0x1d7e7, 0x1d7e8, 0x1d7e9, 0x1d7ea, 0x1d7eb, 0x1f030, 0x1f031, 0x1f032, 0x1f033, 0x1f034, 0x1f035, 0x1f036, 0x1f037, 0x1f038, 0x1f039, 0x1f03a, 0x1f03b, 0x1f03c, 0x1f03d, 0x1f03e, 0x1f03f, 0x1f040, 0x1f041, 0x1f042, 0x1f043, 0x1f044, 0x1f045, 0x1f046, 0x1f047, 0x1f048, 0x1f049, 0x1f04a, 0x1f04b, 0x1f04c, 0x1f04d, 0x1f04e, 0x1f04f, 0x1f050, 0x1f051, 0x1f052, 0x1f053, 0x1f054, 0x1f055, 0x1f056, 0x1f057, 0x1f058, 0x1f059, 0x1f05a, 0x1f05b, 0x1f05c, 0x1f05d, 0x1f05e, 0x1f05f, 0x1f060, 0x1f061, 0x1f062, 0x1f063, 0x1f064, 0x1f065, 0x1f066, 0x1f067, 0x1f068, 0x1f069, 0x1f06a, 0x1f06b, 0x1f06c, 0x1f06d, 0x1f06e, 0x1f06f, 0x1f070, 0x1f071, 0x1f072, 0x1f073, 0x1f074, 0x1f075, 0x1f076, 0x1f077, 0x1f078, 0x1f079, 0x1f07a, 0x1f07b, 0x1f07c, 0x1f07d, 0x1f07e, 0x1f07f, 0x1f080, 0x1f081, 0x1f082, 0x1f083, 0x1f084, 0x1f085, 0x1f086, 0x1f087, 0x1f088, 0x1f089, 0x1f08a, 0x1f08b, 0x1f08c, 0x1f08d, 0x1f08e, 0x1f08f, 0x1f090, 0x1f091, 0x1f092, 0x1f093, 0x1f0a0, 0x1f0a1, 0x1f0a2, 0x1f0a3, 0x1f0a4, 0x1f0a5, 0x1f0a6, 0x1f0a7, 0x1f0a8, 0x1f0a9, 0x1f0aa, 0x1f0ab, 0x1f0ac, 0x1f0ad, 0x1f0ae, 0x1f0b1, 0x1f0b2, 0x1f0b3, 0x1f0b4, 0x1f0b5, 0x1f0b6, 0x1f0b7, 0x1f0b8, 0x1f0b9, 0x1f0ba, 0x1f0bb, 0x1f0bc, 0x1f0bd, 0x1f0be, 0x1f0c1, 0x1f0c2, 0x1f0c3, 0x1f0c4, 0x1f0c5, 0x1f0c6, 0x1f0c7, 0x1f0c8, 0x1f0c9, 0x1f0ca, 0x1f0cb, 0x1f0cc, 0x1f0cd, 0x1f0ce, 0x1f0cf, 0x1f0d1, 0x1f0d2, 0x1f0d3, 0x1f0d4, 0x1f0d5, 0x1f0d6, 0x1f0d7, 0x1f0d8, 0x1f0d9, 0x1f0da, 0x1f0db, 0x1f0dc, 0x1f0dd, 0x1f0de, 0x1f0df, 0x1f42d, 0x1f42e, 0x1f431, 0x1f435, 0x1f600, 0x1f601, 0x1f602, 0x1f603, 0x1f604, 0x1f605, 0x1f606, 0x1f607, 0x1f608, 0x1f609, 0x1f60a, 0x1f60b, 0x1f60c, 0x1f60d, 0x1f60e, 0x1f60f, 0x1f610, 0x1f611, 0x1f612, 0x1f613, 0x1f614, 0x1f615, 0x1f616, 0x1f617, 0x1f618, 0x1f619, 0x1f61a, 0x1f61b, 0x1f61c, 0x1f61d, 0x1f61e, 0x1f61f, 0x1f620, 0x1f621, 0x1f622, 0x1f623, 0x1f625, 0x1f626, 0x1f627, 0x1f628, 0x1f629, 0x1f62a, 0x1f62b, 0x1f62d, 0x1f62e, 0x1f62f, 0x1f630, 0x1f631, 0x1f632, 0x1f633, 0x1f634, 0x1f635, 0x1f636, 0x1f637, 0x1f638, 0x1f639, 0x1f63a, 0x1f63b, 0x1f63c, 0x1f63d, 0x1f63e, 0x1f63f, 0x1f640 )) # Cache of open fonts FONT_CACHE = {} def is_base(text): ''' Checks whether text should use CJK fonts. ''' return min([ord(char) in BASE_CHARS for char in text]) def get_font(size, bold=False, base_font=True): ''' Returns PIL font object matching parameters. ''' cache_key = '%d-%s-%s' % (size, bold, base_font) if cache_key not in FONT_CACHE: if base_font: if bold: name = 'DejaVuSans-Bold.ttf' else: name = 'DejaVuSans.ttf' else: name = 'DroidSansFallback.ttf' FONT_CACHE[cache_key] = ImageFont.truetype( os.path.join(appsettings.TTF_PATH, name), size ) return FONT_CACHE[cache_key]<|fim▁end|>
0x14d8, 0x14d9, 0x14da, 0x14db, 0x14dc, 0x14dd, 0x14de, 0x14df, 0x14e0, 0x14e1, 0x14e2, 0x14e3, 0x14e4, 0x14e5, 0x14e6, 0x14e7, 0x14e8, 0x14e9,
<|file_name|>passport.js<|end_file_name|><|fim▁begin|>// load all the things we need var LocalStrategy = require('passport-local').Strategy; var FacebookStrategy = require('passport-facebook').Strategy; var TwitterStrategy = require('passport-twitter').Strategy; var GoogleStrategy = require('passport-google-oauth').OAuth2Strategy; // load up the user model var User = require('../app/models/user'); // load the auth variables var configAuth = require('./auth'); // use this one for testing module.exports = function(passport) { // ========================================================================= // passport session setup ================================================== // ========================================================================= // required for persistent login sessions // passport needs ability to serialize and unserialize users out of session // used to serialize the user for the session passport.serializeUser(function(user, done) { done(null, user.id); }); // used to deserialize the user passport.deserializeUser(function(id, done) { User.findById(id, function(err, user) { done(err, user); }); }); // ========================================================================= // LOCAL LOGIN ============================================================= // ========================================================================= passport.use('local-login', new LocalStrategy({ // by default, local strategy uses username and password, we will override with email usernameField : 'email', passwordField : 'password', passReqToCallback : true // allows us to pass in the req from our route (lets us check if a user is logged in or not) }, function(req, email, password, done) { // asynchronous process.nextTick(function() { User.findOne({ 'local.email' : email }, function(err, user) { // if there are any errors, return the error if (err) return done(err); // if no user is found, return the message if (!user) return done(null, false, req.flash('loginMessage', 'No user found.')); if (!user.validPassword(password)) return done(null, false, req.flash('loginMessage', 'Oops! Wrong password.')); // all is well, return user else return done(null, user); }); }); })); // ========================================================================= // LOCAL SIGNUP ============================================================ // ========================================================================= passport.use('local-signup', new LocalStrategy({ // by default, local strategy uses username and password, we will override with email usernameField : 'email', passwordField : 'password', passReqToCallback : true // allows us to pass in the req from our route (lets us check if a user is logged in or not) }, function(req, email, password, done) { // asynchronous process.nextTick(function() { // check if the user is already logged ina if (!req.user) { User.findOne({ 'local.email' : email }, function(err, user) { // if there are any errors, return the error if (err) return done(err); // check to see if theres already a user with that email if (user) { return done(null, false, req.flash('signupMessage', 'That email is already taken.')); } else { // create the user var newUser = new User(); newUser.local.email = email; newUser.local.password = newUser.generateHash(password); newUser.save(function(err) { if (err) throw err; return done(null, newUser); }); } }); } else { var user = req.user; user.local.email = email; user.local.password = user.generateHash(password); user.save(function(err) { if (err) throw err; return done(null, user); }); } }); })); // ========================================================================= // FACEBOOK ================================================================ // ========================================================================= passport.use(new FacebookStrategy({ clientID : configAuth.facebookAuth.clientID, clientSecret : configAuth.facebookAuth.clientSecret, callbackURL : configAuth.facebookAuth.callbackURL, passReqToCallback : true // allows us to pass in the req from our route (lets us check if a user is logged in or not) }, function(req, token, refreshToken, profile, done) { // asynchronous process.nextTick(function() { // check if the user is already logged in if (!req.user) { User.findOne({ 'facebook.id' : profile.id }, function(err, user) { if (err) return done(err); if (user) { // if there is a user id already but no token (user was linked at one point and then removed) if (!user.facebook.token) { user.facebook.token = token; user.facebook.name = profile.name.givenName + ' ' + profile.name.familyName; user.facebook.email = profile.emails[0].value; user.save(function(err) { if (err) throw err; return done(null, user); }); } return done(null, user); // user found, return that user } else { // if there is no user, create them var newUser = new User(); newUser.facebook.id = profile.id; newUser.facebook.token = token; newUser.facebook.name = profile.name.givenName + ' ' + profile.name.familyName; newUser.facebook.email = profile.emails[0].value; newUser.save(function(err) { if (err) throw err; return done(null, newUser); }); } }); } else { // user already exists and is logged in, we have to link accounts var user = req.user; // pull the user out of the session user.facebook.id = profile.id; user.facebook.token = token; user.facebook.name = profile.name.givenName + ' ' + profile.name.familyName; user.facebook.email = profile.emails[0].value; user.save(function(err) { if (err) throw err; return done(null, user); }); } }); })); // ========================================================================= // TWITTER ================================================================= // ========================================================================= passport.use(new TwitterStrategy({ consumerKey : configAuth.twitterAuth.consumerKey, consumerSecret : configAuth.twitterAuth.consumerSecret, callbackURL : configAuth.twitterAuth.callbackURL, passReqToCallback : true // allows us to pass in the req from our route (lets us check if a user is logged in or not) }, function(req, token, tokenSecret, profile, done) { // asynchronous process.nextTick(function() { // check if the user is already logged in if (!req.user) { User.findOne({ 'twitter.id' : profile.id }, function(err, user) { if (err) return done(err); if (user) { // if there is a user id already but no token (user was linked at one point and then removed) if (!user.twitter.token) { user.twitter.token = token; user.twitter.username = profile.username; user.twitter.displayName = profile.displayName; user.save(function(err) { if (err) throw err; return done(null, user); }); } return done(null, user); // user found, return that user } else { // if there is no user, create them var newUser = new User(); newUser.twitter.id = profile.id; newUser.twitter.token = token; newUser.twitter.username = profile.username; newUser.twitter.displayName = profile.displayName; newUser.save(function(err) { if (err) throw err; return done(null, newUser); }); } }); } else { // user already exists and is logged in, we have to link accounts var user = req.user; // pull the user out of the session user.twitter.id = profile.id; user.twitter.token = token; user.twitter.username = profile.username; user.twitter.displayName = profile.displayName; user.save(function(err) { if (err) throw err; return done(null, user); }); } }); })); // ========================================================================= // GOOGLE ================================================================== // ========================================================================= passport.use(new GoogleStrategy({ clientID : configAuth.googleAuth.clientID, clientSecret : configAuth.googleAuth.clientSecret, callbackURL : configAuth.googleAuth.callbackURL, passReqToCallback : true // allows us to pass in the req from our route (lets us check if a user is logged in or not) }, function(req, token, refreshToken, profile, done) { // asynchronous process.nextTick(function() { // check if the user is already logged in if (!req.user) { User.findOne({ 'google.id' : profile.id }, function(err, user) { if (err) return done(err); if (user) { // if there is a user id already but no token (user was linked at one point and then removed) if (!user.google.token) { user.google.token = token; user.google.name = profile.displayName; user.google.email = profile.emails[0].value; // pull the first email user.save(function(err) { if (err) throw err; return done(null, user); }); } return done(null, user); } else { var newUser = new User(); newUser.google.id = profile.id; newUser.google.token = token; newUser.google.name = profile.displayName; newUser.google.email = profile.emails[0].value; // pull the first email newUser.save(function(err) { if (err) throw err; return done(null, newUser); }); } }); } else { // user already exists and is logged in, we have to link accounts var user = req.user; // pull the user out of the session user.google.id = profile.id; user.google.token = token; user.google.name = profile.displayName; user.google.email = profile.emails[0].value; // pull the first email user.save(function(err) { if (err) throw err; return done(null, user); }); } }); <|fim▁hole|> })); };<|fim▁end|>
<|file_name|>_test_ns.py<|end_file_name|><|fim▁begin|>import unittest import numpy as np import socket import Pyro4 from nested_sampling import NestedSampling, MonteCarloWalker, Harmonic, Replica class TestNS(unittest.TestCase): """to test distributed computing must start a dispatcher with --server-name test and --port 9090 """ def setUp(self): self.setUp1() def setUp1(self, nproc=1, multiproc=True): self.ndim = 3 self.harmonic = Harmonic(self.ndim) self.nreplicas = 10 self.stepsize = 0.1 self.nproc = nproc self.mc_runner = MonteCarloWalker(self.harmonic, mciter=40)<|fim▁hole|> if multiproc == False: hostname=socket.gethostname() host = Pyro4.socketutil.getIpAddress(hostname, workaround127=True) self.dispatcher_URI = "PYRO:"+"test@"+host+":9090" else: self.dispatcher_URI = None replicas = [] for i in xrange(self.nreplicas): x = self.harmonic.get_random_configuration() replicas.append(Replica(x, self.harmonic.get_energy(x))) self.ns = NestedSampling(replicas, self.mc_runner, stepsize=0.1, nproc=nproc, verbose=False, dispatcher_URI=self.dispatcher_URI) self.Emax0 = self.ns.replicas[-1].energy self.niter = 100 for i in xrange(self.niter): self.ns.one_iteration() self.Emax = self.ns.replicas[-1].energy self.Emin = self.ns.replicas[0].energy def test1(self): print "running TestNS" self.assert_(len(self.ns.replicas) == self.nreplicas) self.assert_(self.Emax < self.Emax0) self.assert_(self.Emin < self.Emax) self.assert_(self.Emin >= 0) self.assert_(self.ns.stepsize != self.stepsize) self.assertEqual(len(self.ns.max_energies), self.niter * self.nproc) class testNSParMultiproc(TestNS): def setUp(self): self.setUp1(nproc=3) class testNSParPyro(TestNS): def setUp(self): self.setUp1(nproc=3,multiproc=False) if __name__ == "__main__": unittest.main()<|fim▁end|>
<|file_name|>table_db.cc<|end_file_name|><|fim▁begin|>// // Copyleft RIME Developers // License: GPLv3 // // 2013-04-18 GONG Chen <[email protected]> // #include <boost/algorithm/string.hpp> #include <boost/lexical_cast.hpp> #include <rime/dict/table_db.h> #include <rime/dict/user_db.h> // Rime table entry format: // phrase <Tab> code [ <Tab> weight ] // for multi-syllable phrase, code is a space-separated list of syllables // weight is a double precision float, defaulting to 0.0 namespace rime { static bool rime_table_entry_parser(const Tsv& row, std::string* key, std::string* value) { if (row.size() < 2 || row[0].empty() || row[1].empty()) { return false; } std::string code(row[1]); boost::algorithm::trim(code); *key = code + " \t" + row[0]; UserDbValue v; if (row.size() >= 3 && !row[2].empty()) { try { v.commits = boost::lexical_cast<int>(row[2]); const double kS = 1e8; v.dee = (v.commits + 1) / kS; } catch (...) { } } *value = v.Pack(); return true; } static bool rime_table_entry_formatter(const std::string& key, const std::string& value, Tsv* tsv) { Tsv& row(*tsv); // key ::= code <space> <Tab> phrase boost::algorithm::split(row, key,<|fim▁hole|> UserDbValue v(value); if (v.commits < 0) // deleted entry return false; boost::algorithm::trim(row[0]); // remove trailing space row[0].swap(row[1]); row.push_back(boost::lexical_cast<std::string>(v.commits)); return true; } const TextFormat TableDb::format = { rime_table_entry_parser, rime_table_entry_formatter, "Rime table", }; TableDb::TableDb(const std::string& name) : TextDb(name + ".txt", "tabledb", TableDb::format) { } // StableDb StableDb::StableDb(const std::string& name) : TableDb(name) {} bool StableDb::Open() { if (loaded()) return false; if (!Exists()) { LOG(INFO) << "stabledb '" << name() << "' does not exist."; return false; } return TableDb::OpenReadOnly(); } } // namespace rime<|fim▁end|>
boost::algorithm::is_any_of("\t")); if (row.size() != 2 || row[0].empty() || row[1].empty()) return false;
<|file_name|>imap_attachment_to_s3_operator.py<|end_file_name|><|fim▁begin|># # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at #<|fim▁hole|># # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. """This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.""" import warnings # pylint: disable=unused-import from airflow.providers.amazon.aws.transfers.imap_attachment_to_s3 import ImapAttachmentToS3Operator # noqa warnings.warn( "This module is deprecated. Please use `airflow.providers.amazon.aws.transfers.imap_attachment_to_s3`.", DeprecationWarning, stacklevel=2, )<|fim▁end|>
# http://www.apache.org/licenses/LICENSE-2.0
<|file_name|>urls.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.conf import settings from django.conf.urls import include, url from django.conf.urls.static import static from django.contrib import admin from django.views.generic import TemplateView from django.views import defaults as default_views urlpatterns = [ url(r'^$', TemplateView.as_view(template_name='pages/home.html'), name="home"), url(r'^about/$', TemplateView.as_view(template_name='pages/about.html'), name="about"), # Django Admin, use {% url 'admin:index' %} url(r'^' + settings.ADMIN_URL, include(admin.site.urls)), url(r'^admin/doc/', include('django.contrib.admindocs.urls')), # User management url(r'^users/', include("therapyinvoicing.users.urls", namespace="users")), url(r'^accounts/', include('allauth.urls')), # Your stuff: custom urls includes go here url(r'^customers/', include("therapyinvoicing.customers.urls", namespace="customers")), url(r'^customerinvoicing/', include("therapyinvoicing.customerinvoicing.urls", namespace="customerinvoicing")), url(r'^kelainvoicing/', include("therapyinvoicing.kelainvoicing.urls", namespace="kelainvoicing")), url(r'^api/', include("therapyinvoicing.api.urls", namespace="api")), url(r'^reporting/', include("therapyinvoicing.reporting.urls", namespace="reporting")),<|fim▁hole|>] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages to be debugged during development, just visit # these url in browser to see how these error pages look like. urlpatterns += [ url(r'^400/$', default_views.bad_request), url(r'^403/$', default_views.permission_denied), url(r'^404/$', default_views.page_not_found), url(r'^500/$', default_views.server_error), ]<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from collections import defaultdict from datetime import datetime, timedelta from django.contrib.auth.models import User from django.db import models from django.db.models import Q, Count, Sum, Max, Min from django.db.models.signals import pre_save from django.dispatch import receiver from hashlib import sha1 from proso.dict import group_keys_by_value_lists from proso.django.cache import cache_pure from proso.list import flatten from proso_common.models import get_config from proso_models.models import Answer, Item, get_environment, get_mastery_trashold, get_predictive_model, get_time_for_knowledge_overview from time import time as time_lib import json import logging LOGGER = logging.getLogger('django.request') class TagManager(models.Manager): def prepare_related(self): return self.prefetch_related('concepts') class Tag(models.Model): """ Arbitrary tag for concepts. """ type = models.CharField(max_length=50) value = models.CharField(max_length=200) lang = models.CharField(max_length=2) type_name = models.CharField(max_length=100) value_name = models.CharField(max_length=100) objects = TagManager() class Meta: unique_together = ("type", "value", "lang") def to_json(self, nested=False): data = { "id": self.pk, "object_type": "tag", "type": self.type, "value": self.value, "lang": self.lang, "type_name": self.type_name, "value_name": self.value_name, } if not nested: data["concepts"] = [concept.to_json(nested=True) for concept in self.concepts.all()] return data def __str__(self): return "{}: {}".format(self.type, self.value) class ConceptManager(models.Manager): def prepare_related(self): return self.prefetch_related('tags', 'actions') @cache_pure() def get_concept_item_mapping(self, concepts=None, lang=None): """ Get mapping of concepts to items belonging to concept. Args: concepts (list of Concept): Defaults to None meaning all concepts lang (str): language of concepts, if None use language of concepts Returns: dict: concept (int) -> list of item ids (int) """ if concepts is None: concepts = self.filter(active=True) if lang is not None: concepts = concepts.filter(lang=lang) if lang is None: languages = set([concept.lang for concept in concepts]) if len(languages) > 1: raise Exception('Concepts has multiple languages') lang = list(languages)[0] item_lists = Item.objects.filter_all_reachable_leaves_many([json.loads(concept.query) for concept in concepts], lang) return dict(zip([c.pk for c in concepts], item_lists)) @cache_pure() def get_item_concept_mapping(self, lang): """ Get mapping of items_ids to concepts containing these items Args: lang (str): language of concepts Returns: dict: item (int) -> set of concepts (int) """ concepts = self.filter(active=True, lang=lang) return group_keys_by_value_lists(Concept.objects.get_concept_item_mapping(concepts, lang)) def get_concepts_to_recalculate(self, users, lang, concepts=None): """ Get concept which have same changes and have to be recalculated Args: users (list of users or user): users whose user stats we are interesting in lang (str): language of used concepts concepts (Optional[list of concepts]): list of primary keys of concepts or concepts Defaults to None meaning all concepts. Returns: dict: user -> set of concepts (int) - in case of list of users list of stats (str) - in case of one user """ only_one_user = False if not isinstance(users, list): only_one_user = True users = [users] mapping = self.get_item_concept_mapping(lang) current_user_stats = defaultdict(lambda: {}) user_stats_qs = UserStat.objects.filter(user__in=users, stat="answer_count") # we need only one type if concepts is not None: user_stats_qs = user_stats_qs.filter(concept__in=concepts) for user_stat in user_stats_qs: current_user_stats[user_stat.user_id][user_stat.concept_id] = user_stat concepts_to_recalculate = defaultdict(lambda: set()) for user, item, time in Answer.objects.filter(user__in=users)\ .values_list("user_id", "item").annotate(Max("time")): if item not in mapping: # in reality this should by corner case, so it is efficient to not filter Answers continue # item is not in concept time_expiration_lower_bound = get_config('proso_models', 'knowledge_overview.time_shift_hours', default=4) time_expiration_factor = get_config('proso_models', 'knowledge_overview.time_expiration_factor', default=2) for concept in mapping[item]: if user in current_user_stats and concept in current_user_stats[user] \ and current_user_stats[user][concept].time > time: if not self.has_time_expired(current_user_stats[user][concept].time, time, time_expiration_lower_bound, time_expiration_factor):<|fim▁hole|> if concepts is None or concept in ([c.pk for c in concepts] if type(concepts[0]) == Concept else Concept): concepts_to_recalculate[user].add(concept) if only_one_user: return concepts_to_recalculate[users[0]] return concepts_to_recalculate def has_time_expired(self, cache_time, last_answer_time, lower_bound, expiration_factor): cache_timedelta = cache_time - last_answer_time if cache_timedelta > timedelta(days=365): return False if cache_timedelta < timedelta(hours=lower_bound): return False return cache_timedelta < expiration_factor * (datetime.now() - cache_time) class Concept(models.Model): """ Model concepts for open learner model """ identifier = models.CharField(max_length=20, blank=True) query = models.TextField() name = models.CharField(max_length=200) lang = models.CharField(max_length=2) tags = models.ManyToManyField(Tag, related_name="concepts", blank=True) active = models.BooleanField(default=True) objects = ConceptManager() class Meta: unique_together = ("identifier", "lang") def to_json(self, nested=False): data = { "id": self.pk, "object_type": "concept", "identifier": self.identifier, "name": self.name, "query": self.query, "lang": self.lang, } if not nested: data["tags"] = [tag.to_json(nested=True) for tag in self.tags.all()] data["actions"] = [action.to_json(nested=True) for action in self.actions.all()] return data @staticmethod def create_identifier(query): """ Crete identifier of concept Args: query (str): query defining concept Returns: str: identifier of length 20 """ return sha1(query.encode()).hexdigest()[:20] def __str__(self): return self.name def __repr__(self): return "{}-{}".format(self.identifier, self.lang) class ActionManager(models.Manager): def prepare_related(self): return self.select_related('concept') class Action(models.Model): """ Actions which can be done with concept """ concept = models.ForeignKey(Concept, related_name="actions") identifier = models.CharField(max_length=50) name = models.CharField(max_length=200) url = models.CharField(max_length=200) objects = ActionManager() def to_json(self, nested=False): data = { "id": self.pk, "object_type": "action", "identifier": self.identifier, "name": self.name, "url": self.url, } if not nested: data["concept"] = self.concept.to_json(nested=True) return data def __str__(self): return "{} - {}".format(self.concept, self.name) class UserStatManager(models.Manager): def prepare_related(self): return self.select_related('concept') def recalculate_concepts(self, concepts, lang=None): """ Recalculated given concepts for given users Args: concepts (dict): user id (int -> set of concepts to recalculate) lang(Optional[str]): language used to get items in all concepts (cached). Defaults to None, in that case are get items only in used concepts """ if len(concepts) == 0: return if lang is None: items = Concept.objects.get_concept_item_mapping(concepts=Concept.objects.filter(pk__in=set(flatten(concepts.values())))) else: items = Concept.objects.get_concept_item_mapping(lang=lang) environment = get_environment() mastery_threshold = get_mastery_trashold() for user, concepts in concepts.items(): all_items = list(set(flatten([items[c] for c in concepts]))) answer_counts = environment.number_of_answers_more_items(all_items, user) correct_answer_counts = environment.number_of_correct_answers_more_items(all_items, user) predictions = dict(list(zip(all_items, get_predictive_model(). predict_more_items(environment, user, all_items, time=get_time_for_knowledge_overview())))) new_user_stats = [] stats_to_delete_condition = Q() for concept in concepts: answer_aggregates = Answer.objects.filter(user=user, item__in=items[concept]).aggregate( time_spent=Sum("response_time"), sessions=Count("session", True), time_first=Min("time"), time_last=Max("time"), ) stats = { "answer_count": sum(answer_counts[i] for i in items[concept]), "correct_answer_count": sum(correct_answer_counts[i] for i in items[concept]), "item_count": len(items[concept]), "practiced_items_count": sum([answer_counts[i] > 0 for i in items[concept]]), "mastered_items_count": sum([predictions[i] >= mastery_threshold for i in items[concept]]), "prediction": sum([predictions[i] for i in items[concept]]) / len(items[concept]), "time_spent": answer_aggregates["time_spent"] / 1000, "session_count": answer_aggregates["sessions"], "time_first": answer_aggregates["time_first"].timestamp(), "time_last": answer_aggregates["time_last"].timestamp(), } stats_to_delete_condition |= Q(user=user, concept=concept) for stat_name, value in stats.items(): new_user_stats.append(UserStat(user_id=user, concept_id=concept, stat=stat_name, value=value)) self.filter(stats_to_delete_condition).delete() self.bulk_create(new_user_stats) def get_user_stats(self, users, lang=None, concepts=None, since=None, recalculate=True): """ Finds all UserStats of given concepts and users. Recompute UserStats if necessary Args: users (Optional[list of users] or [user]): list of primary keys of user or users Defaults to None meaning all users. lang (string): use only concepts witch the lang. Defaults to None meaning all languages. concepts (Optional[list of concepts]): list of primary keys of concepts or concepts Defaults to None meaning all concepts. Returns: dict: user_id -> dict (concept_identifier - > (stat_name -> value)) -- for more users dict: concept_identifier - > (stat_name -> value) -- for one user """ only_one_user = False if not isinstance(users, list): users = [users] only_one_user = True if recalculate: if lang is None: raise ValueError('Recalculation without lang is not supported.') time_start = time_lib() concepts_to_recalculate = Concept.objects.get_concepts_to_recalculate(users, lang, concepts) LOGGER.debug("user_stats - getting identifying concepts to recalculate: %ss", (time_lib() - time_start)) time_start = time_lib() self.recalculate_concepts(concepts_to_recalculate, lang) LOGGER.debug("user_stats - recalculating concepts: %ss", (time_lib() - time_start)) qs = self.prepare_related().filter(user__in=users, concept__active=True) if concepts is not None: qs = qs.filter(concept__in=concepts) if lang is not None: qs = qs.filter(concept__lang=lang) if since is not None: qs = qs.filter(time__gte=since) data = defaultdict(lambda: defaultdict(lambda: {})) for user_stat in qs: data[user_stat.user_id][user_stat.concept.identifier][user_stat.stat] = user_stat.value if only_one_user: return data[users[0].pk if type(users[0]) == User else users[0]] return data class UserStat(models.Model): """ Represent arbitrary statistic (float) of the user on concept """ concept = models.ForeignKey(Concept) user = models.ForeignKey(User, related_name="stats") stat = models.CharField(max_length=50) time = models.DateTimeField(auto_now=True) value = models.FloatField() objects = UserStatManager() class Meta: unique_together = ("concept", "user", "stat") def __str__(self): return "{} - {}: {}".format(self.stat, self.concept, self.value) @receiver(pre_save, sender=Concept) def generate_identifier(sender, instance, **kwargs): """ Generate and set identifier of concept before saving object to DB Args: sender (class): should be Concept instance (Concept): saving concept """ identifier = Concept.create_identifier(instance.query) qs = Concept.objects.filter(identifier=identifier, lang=instance.lang) if instance.pk: qs = qs.exclude(pk=instance.pk) if qs.count() > 0: raise ValueError("Concept identifier conflict") instance.identifier = identifier<|fim▁end|>
continue # cache is up to date
<|file_name|>text.build.js<|end_file_name|><|fim▁begin|>//A simple build file using the tests directory for requirejs { baseUrl: "../../../requirejs/tests/text", paths: { text: "../../../requirejs/../text/text" }, dir: "builds/text", optimize: "none", optimizeAllPluginResources: true, <|fim▁hole|> } ] }<|fim▁end|>
modules: [ { name: "widget"
<|file_name|>scheduler.go<|end_file_name|><|fim▁begin|>/* Copyright 2014 Google Inc. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.<|fim▁hole|> package main import ( "runtime" "github.com/GoogleCloudPlatform/kubernetes/pkg/healthz" "github.com/GoogleCloudPlatform/kubernetes/pkg/util" "github.com/GoogleCloudPlatform/kubernetes/pkg/version/verflag" "github.com/GoogleCloudPlatform/kubernetes/plugin/cmd/kube-scheduler/app" "github.com/spf13/pflag" ) func init() { healthz.DefaultHealthz() } func main() { runtime.GOMAXPROCS(runtime.NumCPU()) s := app.NewSchedulerServer() s.AddFlags(pflag.CommandLine) util.InitFlags() util.InitLogs() defer util.FlushLogs() verflag.PrintAndExitIfRequested() s.Run(pflag.CommandLine.Args()) }<|fim▁end|>
*/
<|file_name|>ast_builder.py<|end_file_name|><|fim▁begin|>from . import ast from .pystates import symbols as syms from .grammar.sourcefile import SourceFile import token import six import re class ASTError(Exception): pass class ASTMeta(type): def __new__(cls, name, bases, attrs): handlers = {} attrs['handlers'] = handlers newcls = type.__new__(cls, name, bases, attrs) for k, v in attrs.items(): if k.startswith('handle_'): sym = k[len('handle_'):] handlers[syms[sym]] = getattr(newcls, k) return newcls operator_map = { '+': ast.Add, '+=': ast.Add, '-': ast.Sub, '-=': ast.Sub, '*': ast.Mult, '*=': ast.Mult, '/': ast.Div, '/=': ast.Div, '%': ast.Mod, '%=': ast.Mod, '**': ast.Pow, '**=': ast.Pow, '<<': ast.LShift, '<<=': ast.LShift, '>>': ast.RShift, '>>=': ast.RShift, '|': ast.BitOr, '|=': ast.BitOr, '^': ast.BitXor, '^=': ast.BitXor, '&': ast.BitAnd, '&=': ast.BitAnd, '//': ast.FloorDiv, '//=': ast.FloorDiv, } compare_map = { '==': ast.Eq, '!=': ast.NotEq, '<': ast.Lt, '<=': ast.LtE, '>': ast.Gt, '>=': ast.GtE, 'is': ast.Is, 'is not': ast.IsNot, 'in': ast.In, 'not in': ast.NotIn, } xdigits = re.compile(r'^[0-9a-z]{2}$', re.IGNORECASE) @six.add_metaclass(ASTMeta) class ASTBuilder(object): def __init__(self, src): if not isinstance(src, SourceFile): raise Exception('invalid sourcefile') self.src = src self.root = src.parse_tree.root self.ast = self.build() def syntax_error(self, msg, node): return SyntaxError(msg, (self.src.name, node.start[0], node.start[1], self.src.get_line(node.start[0]))) def build(self): n = self.root if n == syms.single_input: # single_input: NEWLINE | simple_stmt | compound_stmt NEWLINE if n[0] == token.NEWLINE: return ast.Interactive([]) return ast.Interactive(self.handle(n[0])) elif n == syms.file_input: # file_input: (NEWLINE | stmt)* ENDMARKER stmts = [] for stmt in n.filter(syms.stmt): stmts.extend(self.handle(stmt[0])) return ast.Module(stmts) elif n == syms.eval_input: # eval_input: testlist NEWLINE* ENDMARKER return ast.Expression(self.handle_testlist(n[0])) raise ASTError('invalid root node') def handle(self, node): handler = self.handlers.get(node.type, None) if handler is None: raise ASTError('invalid node: %r', node) return handler(self, node) def handle_stmt(self, stmt): # stmt: simple_stmt | compound_stmt if stmt[0] == syms.simple_stmt: return self.handle_simple_stmt(stmt[0]) return [self.handle(stmt[0][0])] def handle_simple_stmt(self, simple_stmt): # simple_stmt: small_stmt (';' small_stmt)* [';'] NEWLINE # small_stmt: (expr_stmt | del_stmt | pass_stmt | flow_stmt | # import_stmt | global_stmt | nonlocal_stmt | assert_stmt) stmts = [] for small_stmt in simple_stmt.filter(syms.small_stmt): stmts.append(self.handle(small_stmt[0])) return stmts def handle_compound_stmt(self, compound_stmt): # compound_stmt: (if_stmt | while_stmt | for_stmt | # try_stmt | with_stmt | funcdef) return [self.handle(compound_stmt[0])] def handle_testlist(self, testlist): # testlist: test (',' test)* [','] if len(testlist) == 1: return self.handle_test(testlist[0]) exprs = [] for test in testlist.filter(syms.test): exprs.append(self.handle_test(test)) return ast.Tuple(exprs, ast.Load, *testlist.start) def handle_test(self, test): # test: or_test ['if' or_test 'else' test] | lambdef if len(test) == 1: if test[0] == syms.lambdef: return self.handle_lambdef(test[0]) return self.handle_or_test(test[0]) body = self.handle_or_test(test[0]) te = self.handle_or_test(test[2]) orelse = self.handle_test(test[4]) return ast.IfExp(te, body, orelse, *test.start) def handle_or_test(self, or_test): # or_test: and_test ('or' and_test)* if len(or_test) == 1: return self.handle_and_test(or_test[0]) return ast.BoolOp(ast.Or, [self.handle_and_test(x) for x in or_test.filter(syms.and_test)], *or_test.start) def handle_and_test(self, and_test): #and_test: not_test ('and' not_test)* if len(and_test) == 1: return self.handle_not_test(and_test[0]) return ast.BoolOp(ast.And, [self.handle_not_test(x) for x in and_test.filter(syms.not_test)], *and_test.start) def handle_not_test(self, node): # not_test: 'not' not_test | comparison if len(node) == 2: return ast.UnaryOp( ast.Not, self.handle_not_test(node[1]), *node.start) # comparison: expr (comp_op expr)* # comp_op: '<'|'>'|'=='|'>='|'<='|'<>'|'!='|'in'|'not' 'in'|'is'|'is' 'not' node = node[0] expr = self.handle_expr(node[0]) if len(node) == 1: return expr operators = [] operands = [] for i in range(1, len(node), 2): if len(node[i]) == 1: op = node[i][0].val else: op = '%s %s' % (node[i][0].val, node[i][1].val) operators.append(compare_map[op]) operands.append(self.handle_expr(node[i + 1])) return ast.Compare(expr, operators, operands, *node.start) def handle_lambdef(self, node): # lambdef: 'lambda' [varargslist] ':' test if len(node) == 3: args = ast.arguments(args=[], vararg=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=[], kw_defaults=[]) else: args = self.handle_varargslist(node[1]) return ast.Lambda(args, self.handle_test(node[-1]), *node.start) def handle_varargslist(self, node): # typedargslist: (tfpdef ['=' test] (',' tfpdef ['=' test])* [',' # ['*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef]] # | '*' [tfpdef] (',' tfpdef ['=' test])* [',' '**' tfpdef] | '**' tfpdef) # tfpdef: NAME [':' test] # varargslist: (vfpdef ['=' test] (',' vfpdef ['=' test])* [',' # ['*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef]] # | '*' [vfpdef] (',' vfpdef ['=' test])* [',' '**' vfpdef] | '**' vfpdef) # vfpdef: NAME if node[0].val == '**': kwarg = node[1][0].val kwargannotation = node[1][2].val if len(node[1]) == 3 else None return ast.arguments(args=[], vararg=None, varargannotation=None, kwonlyargs=[], kwarg=kwarg, kwargannotation=kwargannotation, defaults=[], kw_defaults=[]) elif node[0].val == '*': vararg, i = node[1][0].val, 3 varargannotation = node[1][2].val if len(node[1]) == 3 else None kwonlyargs = [] kw_defaults = [] while i < len(node) and node[i].val != '**': arg = ast.arg(node[i][0].val, None) if len(node[i]) == 3: arg.annotation = node[i][2].val kwonlyargs.append(arg) if node[i + 1].val == '=': kw_defaults.append(self.handle_test(node[i + 2])) i += 4 else: i += 2 if i < len(node) and node[i].val == '**': kwarg = node[i + 1][0].val kwargannotation = node[i + 1][2] if len(node[i + 1]) == 3 else None else: kwarg, kwargannotation = None, None return ast.arguments(args=[], vararg=vararg, varargannotation=varargannotation, kwonlyargs=kwonlyargs, kwarg=kwarg, kwargannotation=kwargannotation, defaults=[], kw_defaults=kw_defaults) i = 0 args = [] defaults = [] while i < len(node) and node[i] != token.OP: arg = ast.arg(node[i][0].val, None) if len(node[i]) == 3: arg.annotation = node[i][2].val args.append(arg) if i + 1 < len(node) and node[i + 1].val == '=': defaults.append(self.handle_test(node[i + 2])) i += 4 elif len(defaults) > 0: # TODO: get line raise self.syntax_error( 'non-default argument follows default argument', node) else: i += 2 if i < len(node): argument = self.handle_varargslist(node.subs[i:]) argument.args = args argument.defaults = defaults return argument return ast.arguments(args=args, vararg=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=defaults, kw_defaults=[]) handle_typedargslist = handle_varargslist def handle_expr(self, node): # expr: xor_expr ('|' xor_expr)* # xor_expr: and_expr ('^' and_expr)* # and_expr: shift_expr ('&' shift_expr)* # shift_expr: arith_expr (('<<'|'>>') arith_expr)* # arith_expr: term (('+'|'-') term)* # term: factor (('*'|'/'|'%'|'//') factor)* if node == syms.factor: return self.handle_factor(node) if len(node) == 1: return self.handle_expr(node[0]) binop = ast.BinOp( self.handle_expr(node[0]), operator_map[node[1].val], self.handle_expr(node[2]), *node.start) for i in range(3, len(node), 2): binop = ast.BinOp(binop, operator_map[node[i].val], self.handle_expr(node[i + 1]), *node.start) return binop def handle_factor(self, node): # factor: ('+'|'-'|'~') factor | power if len(node) == 1: return self.handle_power(node[0]) uop = node[0].val if uop == '+': uop = ast.UAdd elif uop == '-': uop = ast.USub else: uop = ast.Invert return ast.UnaryOp(uop, self.handle_factor(node[1]), *node.start) def handle_power(self, node): # power: atom trailer* ['**' factor] atom = self.handle_atom(node[0]) if len(node) == 1: return atom for n in node.subs[1:]: if n != syms.trailer: break atom = self.get_trailer(n, atom) if node[-1] == syms.factor: return ast.BinOp( atom, ast.Pow, self.handle_factor(node[-1]), *node.start) return atom def get_trailer(self, node, atom): # trailer: '(' [arglist] ')' | '[' subscriptlist ']' | '.' NAME v = node[0].val if v == '.': return ast.Attribute(atom, node[1].val, ast.Load, *node.start) elif v == '(': if len(node) == 2: return ast.Call(atom, [], [], None, None, *node.start) args, keywords, starargs, kwargs = self.get_arglist(node[1]) return ast.Call(atom, args, keywords, starargs, kwargs, *node.start) return self.get_subscriptlist(atom, node[1]) def get_subscriptlist(self, left, node): # subscriptlist: subscript (',' subscript)* [','] # subscript: test | [test] ':' [test] [sliceop] # sliceop: ':' [test] if len(node) == 1: sl = self.get_slice(node[0]) return ast.Subscript(left, sl, ast.Load, *node.start) slices = [] for n in node.filter(syms.subscript): slices.append(self.get_slice(n)) extsl = ast.ExtSlice(slices) return ast.Subscript(left, extsl, ast.Load, *node.start) def get_slice(self, node): # subscript: test | [test] ':' [test] [sliceop] # sliceop: ':' [test] if len(node) == 1: if node[0] == syms.test: return ast.Index(self.handle_test(node[0])) return ast.Slice(None, None, None) if node[0] == syms.test: lower = self.handle_test(node[0]) next = 2 else: lower, next = None, 1 if next < len(node): upper, next = self.handle_test(node[next]), next + 1 step = None if next < len(node): sliceop = node[next] if len(sliceop) == 2: step = self.handle_test(sliceop[1]) return ast.Slice(lower, upper, step) return ast.Slice(lower, None, None) def get_arglist(self, node): # arglist: (argument ',')* (argument [','] # |'*' test (',' argument)* [',' '**' test] # |'**' test) # return args, keywords, starargs, kwargs args, keywords, starargs, kwargs = [], [], None, None i = 0 while i < len(node) and node[i] == syms.argument: arg = self.handle_argument(node[i]) if isinstance(arg, ast.keyword): keywords.append(arg) elif len(keywords) == 0: args.append(arg) else: raise self.syntax_error('non-keyword arg after keyword arg', node) i += 2 if i >= len(node): pass elif node[i].val == '*': starargs = self.handle_test(node[i + 1]) i += 3 while i < len(node) and node[i] == syms.argument: kw = self.handle_argument(node[i]) if not isinstance(kw, ast.keyword): raise self.syntax_error( 'only named arguments may follow *expression', node) keywords.append(kw) i += 2 if i < len(node): kwargs = self.handle_test(node[i + 1]) else: kwargs = self.handle_test(node[i + 1]) return args, keywords, starargs, kwargs def handle_argument(self, node): # argument: test [comp_for] | test '=' test if len(node) == 1: return self.handle_test(node[0]) elif len(node) == 3: k = self.handle_test(node[0]) if not isinstance(k, ast.Name): raise self.syntax_error( 'keyword must be a NAME', *node[0].start) v = self.handle_test(node[2]) return ast.keyword(k.id, v) return ast.GeneratorExp(self.handle_test(node[0]), self.get_comp_for(node[1]), *node.start) def get_comp_for(self, node): # comp_for: 'for' exprlist 'in' or_test [comp_iter] # comp_iter: comp_for | comp_if target = self.handle_exprlist(node[1]) if not isinstance(target, ast.AssignTypes): raise self.syntax_error( 'invalid assign to %s' % type(target).__name__, node[1]) self.loop_mark_ctx(target, ast.Store) compfor = ast.comprehension(target, self.handle_or_test(node[3]), []) if len(node) == 4: return [compfor] if node[-1][0] == syms.comp_if: tails = self.get_comp_if(node[-1][0]) else: tails = self.get_comp_for(node[-1][0]) ifs, i = [], 0 while i < len(tails) and isinstance(tails[i], ast.Compare): ifs.append(tails[i]) i += 1 compfor.ifs = ifs return [compfor] + tails[i:] def get_comp_if(self, node): # comp_if: 'if' test_nocond [comp_iter] # comp_iter: comp_for | comp_if comp = self.test_nocond(node[1]) if len(node) == 3: if node[2][0] == syms.comp_if: subs = self.get_comp_if(node[2][0]) else: subs = self.get_comp_for(node[2][0]) return [comp] + subs return [comp] def handle_exprlist(self, node): # exprlist: (expr|star_expr) (',' (expr|star_expr))* [','] exprs = [] for n in node.subs: if n == syms.expr: exprs.append(self.handle_expr(n)) elif n == syms.star_expr: exprs.append(self.handle_star_expr(n)) if len(exprs) == 1 and node[-1] != token.OP: return exprs[0] return ast.Tuple(exprs, ast.Store, *node.start) def handle_test_nocond(self, node): # test_nocond: or_test | lambdef_nocond # lambdef_nocond: 'lambda' [varargslist] ':' test_nocond if node[0] == syms.or_test: return self.handle_or_test(node[0]) node = node[0] if len(node) == 3: args = ast.arguments(args=[], vararg=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=[], kw_defaults=[]) else: args = self.handle_varargslist(node[1]) return ast.Lambda(args, self.handle_test_nocond(node[-1]), *node.start) def handle_atom(self, node): # atom: ('(' [yield_expr|testlist_comp] ')' | # '[' [testlist_comp] ']' | # '{' [dictorsetmaker] '}' | # NAME | NUMBER | STRING+ | '...' | 'None' | 'True' | 'False') n = node[0] if n == token.NAME: return ast.Name(n.val, ast.Load, *node.start) elif n == token.NUMBER: return ast.Num(eval(n.val), *node.start) elif n.val == '...': return ast.Ellipsis(*node.start) elif n == token.STRING: return self.get_string(node) elif n.val == '(': if len(node) == 2: return ast.Tuple([], ast.Load, *node.start) if node[1] == syms.yield_expr: return self.handle_yield_expr(node[1]) return self.get_testlist_comp('(', node[1]) elif n.val == '[': if len(node) == 2: return ast.List([], ast.Load, *node.start) return self.get_testlist_comp('[', node[1]) else: if len(node) == 2: return ast.Dict([], [], *node.start) return self.handle_dictorsetmaker(node[1]) def get_string(self, node): head = self.parse_string(node[0]) if len(node) == 1: return head t = type(head) strs = [head] for n in node.subs[1:]: s = self.parse_string(n) if not isinstance(s, t): raise self.syntax_error( 'cannot mix bytes and nonbytes literals', node) strs.append(s) if t is ast.Str: return ast.Str(''.join(strs), *node.start) return ast.Bytes(b''.join(strs), *node.start) def parse_string(self, node): is_str, is_re = True, False s = node.val pos = 0 while True: c = s[pos] if c == 'b': is_str = False elif c == 'r': is_re = True elif c == 'u': pass else: if s[pos] == s[pos + 1]: s = s[pos + 3:-3] else: s = s[pos + 1:-1] break pos += 1 if is_re: if is_str: return ast.Str(s, *node.start) return ast.Bytes(s, *node.start) chars = [] pos = 0 while pos < len(s): c = s[pos] if c == '\\': if pos == len(s) - 1: raise self.syntax_error( 'EOL while scanning string literal', *node.end) pos += 1 next = s[pos] if next == "'": v = "'" if is_str else ord("'") elif next == '"': v = '"' if is_str else ord('"') elif next == 'b': v = '\b' if is_str else ord('\b') elif next == 'f': v = '\f' if is_str else ord('\f') elif next == 't': v = '\t' if is_str else ord('\t') elif next == 'n': v = '\n' if is_str else ord('\n') elif next == 'r': v = '\r' if is_str else ord('\r') elif next == 'v': v = '\v' if is_str else ord('\v') elif next == 'a': v = '\a' if is_str else ord('\a') elif next == 'x': if pos + 2 >= len(s) - 1: raise self.syntax_error( 'truncated \\xXX escape', node) xs = s[pos:pos + 2] if not xdigits.match(xs): raise self.syntax_error('invalid \\xXX escape', node) pos += 2 n = eval('0x' + xs) v = chr(n) if is_str else n elif next in '01234567': n = int(next) if pos + 1 < len(s) and s[pos + 1] in '01234567': pos += 1 n = n * 8 + int(s[pos]) if pos + 1 < len(s) and s[pos + 1] in '01234567': pos += 1 n = n * 8 + int(s[pos]) v = chr(n) if is_str else n else: v = '\\' if is_str else ord('\\') chars.append(v) else: chars.append(c) pos += 1 <|fim▁hole|> def handle_yield_expr(self, node): # yield_expr: 'yield' [yield_arg] # yield_arg: 'from' test | testlist if len(node) == 1: return ast.Yield(None, *node.start) if len(node[1]) == 1: testlist = self.handle_testlist(node[1][0]) return ast.Yield(testlist, *node.start) test = self.handle_test(node[1][1]) return ast.YieldFrom(test, *node.start) def get_testlist_comp(self, outter, node): # testlist_comp: (test|star_expr) ( comp_for | (',' (test|star_expr))* [','] ) if node[0] == syms.test: expr = self.handle_test(node[0]) else: expr = self.handle_star_expr(node[0]) if len(node) == 1: # (test|star_expr) if outter == '(': return expr return ast.List([expr], ast.Load, *node.start) if node[1] == syms.comp_for: # (test|star_expr) comp_for generators = self.get_comp_for(node[1]) return ast.GeneratorExp(expr, generators, *node.start) # (test|star_expr) (',' (test|star_expr))* [','] i = 2 elts = [expr] while i < len(node): if node[i] == syms.test: elts.append(self.handle_test(node[i])) else: elts.append(self.handle_star_expr(node[i])) i += 2 if outter == '(': return ast.Tuple(elts, ast.Load, *node.start) return ast.List(elts, ast.Load, *node.start) def handle_dictorsetmaker(self, node): # dictorsetmaker: ( (test ':' test (comp_for | (',' test ':' test)* [','])) | # (test (comp_for | (',' test)* [','])) ) if len(node) > 1 and node[1] == token.OP: if node[3] == syms.comp_for: # test ':' test comp_for k = self.handle_test(node[0]) v = self.handle_test(node[2]) generators = self.get_comp_for(node[3]) return ast.DictComp(k, v, generators, *node.start) # test ':' test (',' test ':' test)* [','] i = 0 keys, values = [], [] while i < len(node): keys.append(self.handle_test(node[i])) values.append(self.handle_test(node[i + 2])) i += 4 return ast.Dict(keys, values, *node.start) # (test (comp_for | (',' test)* [','])) if len(node) > 1 and node[1] == syms.comp_for: # test comp_for elt = self.handle_test(node[0]) generators = self.get_comp_for(node[1]) return ast.SetComp(elt, generators, *node.start) # test (',' test)* [','] elts = [] i = 0 while i < len(node): elts.append(self.handle_test(node[i])) i += 2 return ast.Set(elts, *node.start) def handle_expr_stmt(self, node): # expr_stmt: testlist_star_expr (augassign (yield_expr|testlist) | # ('=' (yield_expr|testlist_star_expr))*) # augassign: ('+=' | '-=' | '*=' | '/=' | '%=' | '&=' | '|=' | '^=' | # '<<=' | '>>=' | '**=' | '//=') expr = self.handle_testlist_star_expr(node[0]) if len(node) == 1: return ast.Expr(expr, *node.start) if not isinstance(expr, ast.AssignTypes): raise self.syntax_error( 'invalid assign to %s' % type(expr).__name__, node) self.loop_mark_ctx(expr, ast.Store) if node[1] == syms.augassign: op = operator_map[node[1][0].val] if node[2] == syms.yield_expr: return ast.AugAssign( expr, op, self.handle_yield_expr(node[2]), *node.start) return ast.AugAssign( expr, op, self.handle_testlist(node[2]), *node.start) targets, i = [expr], 2 for i in range(2, len(node) - 1, 2): if node[i] == syms.yield_expr: t = self.handle_yield_expr(node[i]) else: t = self.handle_testlist_star_expr(node[i]) if not isinstance(t, ast.AssignTypes): raise self.syntax_error( 'invalid assign to %s' % type(t).__name__, node[i]) self.loop_mark_ctx(t, ast.Store) targets.append(t) i += 2 if node[-1] == syms.yield_expr: value = self.handle_yield_expr(node[-1]) else: value = self.handle_testlist_star_expr(node[-1]) self.loop_mark_ctx(value, ast.Load) return ast.Assign(targets, value, *node.start) @classmethod def loop_mark_ctx(cls, obj, ctx): obj.ctx = ctx if isinstance(obj, (ast.List, ast.Tuple)): for elt in obj.elts: cls.loop_mark_ctx(elt, ctx) def handle_testlist_star_expr(self, node): # testlist_star_expr: (test|star_expr) (',' (test|star_expr))* [','] exprs = [] for n in node.subs: if n == syms.test: exprs.append(self.handle_test(n)) elif n == syms.star_expr: exprs.append(self.handle_star_expr(n)) if len(exprs) == 1 and node[-1] != token.OP: return exprs[0] return ast.Tuple(exprs, ast.Store, *node.start) def handle_star_expr(self, node): # star_expr: '*' expr return ast.Starred(self.handle_expr(node[1]), ast.Store, *node.start) def handle_del_stmt(self, node): # del_stmt: 'del' exprlist expr = self.handle_exprlist(node[1]) self.loop_mark_ctx(expr, ast.Del) if isinstance(expr, ast.Tuple): return ast.Delete(expr.elts, *node.start) return ast.Delete([expr], *node.start) def handle_pass_stmt(self, node): # pass_stmt: 'pass' return ast.Pass(*node.start) def handle_flow_stmt(self, node): # flow_stmt: break_stmt | continue_stmt | return_stmt # | raise_stmt | yield_stmt # return_stmt: 'return' [testlist] # break_stmt: 'break' # continue_stmt: 'continue' # yield_stmt: yield_expr # raise_stmt: 'raise' [test ['from' test]] node = node[0] if node == syms.return_stmt: if len(node) == 2: return ast.Return(self.handle_testlist(node[1]), *node.start) return ast.Return(None, *node.start) elif node == syms.break_stmt: return ast.Break(*node.start) elif node == syms.continue_stmt: return ast.Continue(*node.start) elif node == syms.yield_stmt: return ast.Expr(self.handle_yield_expr(node[0]), *node.start) exc = len(node) > 1 and self.handle_test(node[1]) or None cause = len(node) == 4 and self.handle_test(node[3]) or None return ast.Raise(exc, cause, *node.start) def handle_import_stmt(self, node): if node[0] == syms.import_name: return self.handle_import_name(node[0]) return self.handle_import_from(node[0]) def handle_import_name(self, node): # import_name: 'import' dotted_as_names # dotted_as_names: dotted_as_name (',' dotted_as_name)* # dotted_as_name: dotted_name ['as' NAME] alias = [] for n in node[1].filter(syms.dotted_as_name): name = self.handle_dotted_name(n[0]) if len(n) == 1: alias.append(ast.alias(name, None)) else: alias.append(ast.alias(name, n[2].val)) return ast.Import(alias, *node.start) def handle_import_from(self, node): # import_from: ('from' (('.' | '...')* dotted_name | ('.' | '...')+) # 'import' ('*' | '(' import_as_names ')' | import_as_names)) level = 0 for i in range(1, len(node)): if node[i] != token.OP: break level += len(node[i].val) if node[i] == syms.dotted_name: module = self.handle_dotted_name(node[i]) i += 1 else: module = None v = node[i + 1].val if v == '*': names = [ast.alias('*', None)] elif v == '(': names = self.handle_import_as_names(node[i + 2]) else: names = self.handle_import_as_names(node[i + 1]) return ast.ImportFrom(module, names, level, *node.start) def handle_dotted_name(self, node): # dotted_name: NAME ('.' NAME)* return '.'.join([n.val for n in node.filter(token.NAME)]) def handle_import_as_names(self, node): # import_as_name: NAME ['as' NAME] # import_as_names: import_as_name (',' import_as_name)* [','] names = [] for n in node.filter(syms.import_as_name): if len(n) == 1: names.append(ast.alias(n[0].val, None)) else: names.append(ast.alias(n[0].val, n[2].val)) return names def handle_global_stmt(self, node): # global_stmt: 'global' NAME (',' NAME)* return ast.Global(list(node.filter(token.NAME)), *node.start) def handle_nonlocal_stmt(self, node): # nonlocal_stmt: 'nonlocal' NAME (',' NAME)* return ast.Nonlocal(list(node.filter(token.NAME)), *node.start) def handle_assert_stmt(self, node): # assert_stmt: 'assert' test [',' test] test = self.handle_test(node[1]) if len(node) == 2: msg = None else: msg = self.handle_test(node[3]) return ast.Assert(test, msg, *node.start) def handle_suite(self, node, get_stmts=False): # suite: simple_stmt | NEWLINE INDENT stmt+ DEDENT if len(node) == 1: stmts = self.handle_simple_stmt(node[0]) if get_stmts: return stmts return ast.Suite(stmts) stmts = [] for i in range(2, len(node) - 1): stmts.extend(self.handle_stmt(node[i])) if get_stmts: return stmts return ast.Suite(stmts) def handle_if_stmt(self, node): # if_stmt: 'if' test ':' suite ('elif' test ':' suite)* ['else' ':' suite] test = self.handle_test(node[1]) body = self.handle_suite(node[3], get_stmts=True) ifexpr = ast.If(test, body, [], *node.start) cur, i = ifexpr, 4 while i < len(node) and node[i].val == 'elif': test = self.handle_test(node[i + 1]) body = self.handle_suite(node[i + 3], get_stmts=True) expr = ast.If(test, body, [], *node[i].start) cur.orelse.append(expr) cur = expr i += 4 if i < len(node): cur.orelse = self.handle_suite(node[-1], get_stmts=True) return ifexpr def handle_while_stmt(self, node): # while_stmt: 'while' test ':' suite ['else' ':' suite] test = self.handle_test(node[1]) body = self.handle_suite(node[3], get_stmts=True) if len(node) == 4: orelse = [] else: orelse = self.handle_suite(node[6], get_stmts=True) return ast.While(test, body, orelse, *node.start) def handle_for_stmt(self, node): # for_stmt: 'for' exprlist 'in' testlist ':' suite ['else' ':' suite] target = self.handle_exprlist(node[1]) if not isinstance(target, ast.AssignTypes): raise self.syntax_error( 'invalid assign to %s' % type(target).__name__, node[1]) self.loop_mark_ctx(target, ast.Store) iterator = self.handle_testlist(node[3]) body = self.handle_suite(node[5], get_stmts=True) if len(node) == 6: orelse = [] else: orelse = self.handle_suite(node[8], get_stmts=True) return ast.For(target, iterator, body, orelse, *node.start) def handle_try_stmt(self, node): # try_stmt: ('try' ':' suite # ((except_clause ':' suite)+ # ['else' ':' suite] # ['finally' ':' suite] | # 'finally' ':' suite)) # except_clause: 'except' [test ['as' NAME]] body = self.handle_suite(node[2], get_stmts=True) handlers, i = [], 3 while i < len(node) and node[i] == syms.except_clause: expnode = node[i] exptype = len(expnode) > 1 and self.handle_test(expnode[1]) or None expname = len(expnode) > 2 and expnode[3].val or None expbody = self.handle_suite(node[i + 2], get_stmts=True) handlers.append( ast.ExceptHandler(exptype, expname, expbody, *expnode.start)) i += 3 orelse = [] if i < len(node) and node[i].val == 'else': orelse = self.handle_suite(node[i + 2], get_stmts=True) i += 3 finalbody = [] if i < len(node) and node[i].val == 'finally': finalbody = self.handle_suite(node[i + 2], get_stmts=True) return ast.Try(body, handlers, orelse, finalbody, *node.start) def handle_with_stmt(self, node): # with_stmt: 'with' with_item (',' with_item)* ':' suite # with_item: test ['as' expr] items, i = [], 1 while i < len(node) and node[i] == syms.with_item: wnode = node[i] item = ast.withitem(self.handle_test(wnode[0]), None) if len(wnode) == 3: item.optional_vars = self.handle_test(wnode[2]) items.append(item) i += 2 body = self.handle_suite(node[-1], get_stmts=True) return ast.With(items, body, *node.start) def handle_funcdef(self, node): # funcdef: 'def' NAME parameters ['->' test] ':' suite # parameters: '(' [typedargslist] ')' name = node[1].val if len(node[2]) == 2: params = ast.arguments(args=[], vararg=None, varargannotation=None, kwonlyargs=[], kwarg=None, kwargannotation=None, defaults=[], kw_defaults=[]) else: params = self.handle_typedargslist(node[2][1]) if node[3].val == ':': returns = None else: returns = self.handle_test(node[4]) body = self.handle_suite(node[-1], get_stmts=True) return ast.FunctionDef(name, params, body, [], returns, *node.start) def handle_classdef(self, node): # classdef: 'class' NAME ['(' [arglist] ')'] ':' suite name = node[1].val if len(node) == 7: bases, keywords, starargs, kwargs = self.get_arglist(node[3]) else: bases, keywords, starargs, kwargs = [], [], None, None body = self.handle_suite(node[-1], get_stmts=True) return ast.ClassDef( name, bases, keywords, starargs, kwargs, body, [], *node.start) def handle_decorated(self, node): # decorated: decorators (classdef | funcdef) # decorators: decorator+ # decorator: '@' dotted_name [ '(' [arglist] ')' ] NEWLINE # dotted_name: NAME ('.' NAME)* if node[1] == syms.funcdef: funccls = self.handle_funcdef(node[1]) else: funccls = self.handle_classdef(node[1]) decs, i = [], 0 for n in node[0]: name = self.get_attribute(n[1]) if n[2].val == '(': if len(n) == 6: args, keywords, starargs, kwargs = self.get_arglist(n[3]) else: args, keywords, starargs, kwargs = [], [], None, None decs.append(ast.Call( name, args, keywords, starargs, kwargs, *n[1].start)) else: decs.append(name) funccls.decorator_list = decs return funccls def get_attribute(self, node): # dotted_name: NAME ('.' NAME)* first = ast.Name(node[0].val, ast.Load, *node.start) if len(node) == 1: return first attr = ast.Attribute(first, node[2].val, ast.Load, *node.start) i = 4 while i < len(node): attr = ast.Attribute(attr, node[i].val, ast.Load, *node.start) i += 2 return attr<|fim▁end|>
if is_str: return ast.Str(''.join(chars), *node.start) return ast.Bytes(bytes(chars), *node.start)
<|file_name|>jsoncli.py<|end_file_name|><|fim▁begin|>""" JsonCli: Library for CLI based on JSON -------------------------------------- +------------------------+-------------+ | This is the JsonCli common library. | +------------------------+-------------+ """<|fim▁hole|> type_map = { 'structure': str, 'map': str, 'timestamp': str, 'list': str, 'string': str, 'float': float, 'integer': int, 'long': int, 'boolean': bool, 'double': float, 'blob': str} class OrderNamespace(argparse.Namespace): """ Namespace with Order: from argparse.Namespace """ __order__ = OrderedDict() def __init__(self, **kwargs): super(OrderNamespace, self).__init__(**kwargs) def __setattr__(self, attr, value): if value is not None: self.__order__[attr] = value super(OrderNamespace, self).__setattr__(attr, value) def add_arguments(group, args): """ Add Arguments to CLI """ for kkk, vvv in args.iteritems(): if 'type' in vvv and vvv['type'] in type_map: vvv['type'] = type_map[vvv['type']] if 'help' in vvv and not vvv['help']: vvv['help'] = argparse.SUPPRESS changed = xform_name(kkk, "-") if kkk != changed: kkk = "-".join(["", changed]) group.add_argument(kkk, **vvv) return group def recursive_parser(parser, args): """ Recursive CLI Parser """ subparser = parser.add_subparsers(help=args.get( '__help__', ''), dest=args.get('__dest__', '')) for k, v in args.iteritems(): if k == '__help__' or k == '__dest__': continue group = subparser.add_parser(k, help=v.get('help', '')) for kk, vv in v.iteritems(): if kk == 'Subparsers': group = recursive_parser(group, vv) elif kk == 'Arguments': group = add_arguments(group, vv) return parser def parse_args(args): """ Create the Command Line Interface :type args: dict :param args: describes the command structure for the CLI """ parser = argparse.ArgumentParser(description=args.get('Description', '')) for k, v in args.iteritems(): if k == 'Subparsers': parser = recursive_parser(parser, v) elif k == 'Arguments': parser = add_arguments(parser, v) autocomplete(parser) return parser.parse_args(None, OrderNamespace()) # vim: tabstop=4 shiftwidth=4 softtabstop=4<|fim▁end|>
import argparse from collections import OrderedDict from argcomplete import autocomplete from botocore import xform_name
<|file_name|>backfill_opportunity_ids.py<|end_file_name|><|fim▁begin|>""" Backfill opportunity ids for Enterprise Coupons, Enterprise Offers and Manual Order Offers. """ import csv import logging from collections import Counter, defaultdict from time import sleep from uuid import UUID from django.core.management import BaseCommand from ecommerce.core.constants import COUPON_PRODUCT_CLASS_NAME from ecommerce.extensions.offer.models import OFFER_PRIORITY_ENTERPRISE, OFFER_PRIORITY_MANUAL_ORDER from ecommerce.programs.custom import get_model ConditionalOffer = get_model('offer', 'ConditionalOffer') Product = get_model('catalogue', 'Product') logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) class Command(BaseCommand): """ Backfill opportunity ids for Enterprise Coupons, Enterprise Offers and Manual Order Offers. """ def add_arguments(self, parser): parser.add_argument( '--data-csv', action='store', dest='data_csv', default=None, help='Path of csv to read enterprise uuids and opportunity ids.', type=str, ) parser.add_argument( '--contract-type', action='store', dest='contract_type', default='single', choices=['single', 'multi'], help='Specify type of backfilling', type=str, ) parser.add_argument( '--batch-limit', action='store', dest='batch_limit', default=100, help='Number of records to be fetched in each batch of backfilling.', type=int, ) parser.add_argument( '--batch-offset', action='store', dest='batch_offset', default=0, help='Which index to start batching from.',<|fim▁hole|> '--batch-sleep', action='store', dest='batch_sleep', default=10, help='How long to sleep between batches.', type=int, ) def read_csv(self, csv_path): data = {} with open(csv_path) as csv_file: reader = csv.DictReader(csv_file) for row in reader: data[UUID(row['enterprise_customer_uuid'])] = row['opportunity_id'] return data def read_multi_contracts_csv(self, csv_path): data = { 'coupons': defaultdict(list), 'offers': defaultdict(list), 'ec_uuids': defaultdict(list), } with open(csv_path) as csv_file: reader = csv.DictReader(csv_file) for row in reader: if row['ORDER_LINE_OFFER_TYPE'] == 'Voucher': data['coupons'][row['ORDER_LINE_COUPON_ID']].append(row['OPP_ID']) elif row['ORDER_LINE_OFFER_TYPE'] in ('Site', 'User'): data['offers'][row['ORDER_LINE_OFFER_ID']].append(row['OPP_ID']) else: data['ec_uuids'][UUID(row['ENTERPRISE_CUSTOMER_UUID'])].append(row['OPP_ID']) # condition the data so that at the end we have only one opportunity id for each coupon/offer for __, category_data in data.items(): for category_object_id, opportunity_ids in category_data.items(): if len(opportunity_ids) > 1: most_common_opportunity_id, __ = Counter(opportunity_ids).most_common(1)[0] category_data[category_object_id] = most_common_opportunity_id else: category_data[category_object_id] = opportunity_ids[0] return data def get_enterprise_coupons_batch(self, coupon_filter, start, end): logger.info('Fetching new batch of enterprise coupons from indexes: %s to %s', start, end) return Product.objects.filter(**coupon_filter)[start:end] def get_enterprise_offers_batch(self, offer_filter, start, end): return ConditionalOffer.objects.filter(**offer_filter)[start:end] def _backfill_enterprise_coupons(self, data, options, coupon_filter): batch_limit = options['batch_limit'] batch_sleep = options['batch_sleep'] batch_offset = options['batch_offset'] current_batch_index = batch_offset logger.info('Started Backfilling Enterprise Coupons...') coupons = self.get_enterprise_coupons_batch(coupon_filter, batch_offset, batch_offset + batch_limit) while coupons: for coupon in coupons: opportunity_id = data.get(str(coupon.id)) or data.get(UUID(coupon.attr.enterprise_customer_uuid)) if getattr(coupon.attr, 'sales_force_id', None) is None and opportunity_id: logger.info( 'Enterprise Coupon updated. CouponID: [%s], OpportunityID: [%s]', coupon.id, opportunity_id ) coupon.attr.sales_force_id = opportunity_id coupon.save() sleep(batch_sleep) current_batch_index += len(coupons) coupons = self.get_enterprise_coupons_batch( coupon_filter, current_batch_index, current_batch_index + batch_limit ) logger.info('Backfilling for Enterprise Coupons finished.') def _backfill_offers(self, data, options, offer_filter, log_prefix): logger.info('[%s] Started Backfilling Offers...', log_prefix) batch_limit = options['batch_limit'] batch_sleep = options['batch_sleep'] batch_offset = options['batch_offset'] current_batch_index = batch_offset ent_offers = self.get_enterprise_offers_batch(offer_filter, batch_offset, batch_offset + batch_limit) while ent_offers: for ent_offer in ent_offers: opportunity_id = data.get(str(ent_offer.id)) or data.get(ent_offer.condition.enterprise_customer_uuid) if bool(ent_offer.sales_force_id) is False and opportunity_id: logger.info( '[%s] Offer updated. OfferID: [%s], OpportunityID: [%s]', log_prefix, ent_offer.id, opportunity_id, ) ent_offer.sales_force_id = opportunity_id ent_offer.save() sleep(batch_sleep) current_batch_index += len(ent_offers) ent_offers = self.get_enterprise_offers_batch( offer_filter, current_batch_index, current_batch_index + batch_limit ) logger.info('[%s] Backfilling for Offers finished.', log_prefix) def handle(self, *args, **options): if options['contract_type'] == 'single': logger.info('Backfilling for single contracts.') self.backfill_single_contracts(options) elif options['contract_type'] == 'multi': logger.info('Backfilling for multi contracts.') self.backfill_multi_contracts(options) def backfill_single_contracts(self, options): data = self.read_csv(options['data_csv']) self._backfill_enterprise_coupons(data, options, { 'product_class__name': COUPON_PRODUCT_CLASS_NAME, 'attributes__code': 'enterprise_customer_uuid', 'attribute_values__value_text__in': data.keys() }) self._backfill_offers(data, options, { 'offer_type': ConditionalOffer.SITE, 'priority': OFFER_PRIORITY_ENTERPRISE, 'condition__enterprise_customer_uuid__in': data.keys(), }, 'ENTERPRISE OFFER') self._backfill_offers(data, options, { 'offer_type': ConditionalOffer.USER, 'priority': OFFER_PRIORITY_MANUAL_ORDER, 'condition__enterprise_customer_uuid__in': data.keys(), }, 'ENTERPRISE MANUAL ORDER OFFER') def backfill_multi_contracts(self, options): data = self.read_multi_contracts_csv(options['data_csv']) coupons_data = data['coupons'] self._backfill_enterprise_coupons(coupons_data, options, { 'product_class__name': COUPON_PRODUCT_CLASS_NAME, 'id__in': coupons_data.keys() }) offers_data = data['offers'] self._backfill_offers(offers_data, options, { 'offer_type__in': (ConditionalOffer.SITE, ConditionalOffer.USER), 'priority__in': (OFFER_PRIORITY_ENTERPRISE, OFFER_PRIORITY_MANUAL_ORDER), 'id__in': offers_data.keys(), }, 'ALL ENTERPRISE OFFERS') # backfill coupons and offers missing both coupon id and offer id ec_uuids = data['ec_uuids'] self._backfill_enterprise_coupons(ec_uuids, options, { 'product_class__name': COUPON_PRODUCT_CLASS_NAME, 'attributes__code': 'enterprise_customer_uuid', 'attribute_values__value_text__in': ec_uuids.keys() }) self._backfill_offers(ec_uuids, options, { 'offer_type': ConditionalOffer.SITE, 'priority': OFFER_PRIORITY_ENTERPRISE, 'condition__enterprise_customer_uuid__in': ec_uuids.keys(), }, 'ENTERPRISE OFFER') self._backfill_offers(ec_uuids, options, { 'offer_type': ConditionalOffer.USER, 'priority': OFFER_PRIORITY_MANUAL_ORDER, 'condition__enterprise_customer_uuid__in': ec_uuids.keys(), }, 'ENTERPRISE MANUAL ORDER OFFER')<|fim▁end|>
type=int, ) parser.add_argument(
<|file_name|>console.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Console import sys, os, time, subprocess def MCS(): return subprocess.Popen(['python', 'mcs.py']) def color(text, color): if color == 0: color = "\033[0m" if color == 1: color = "\033[94m" if color == 2: color = "\033[92m" if color == 3: color = "\033[91m" if color == 4: color = "\033[93m" return color+text+"\033[0m" def showMenu(): print("\033[H\033[J", end="") print("== Music Control System v0.1 ==") print("= =") print("= {} MCS Time {} =".format(color("[7]",1), color("[8]",3))) print("= {} MCS Auto {} =".format(color("[4]",1), color("[5]",3))) print("= {} MCS EDT {} =".format(color("[1]",1), color("[2]",3))) print("= =") print("========= Informations ========") print("= =") print("= =") print("= =") print("===============================")<|fim▁hole|> def main(): # Old hack (TODO) class main: def poll(): return True while 1: showMenu() command = str(input("=> ")) if command == '4': if main.poll() == None: main.terminate() main.kill() main = MCS() if command == '5': if main.poll() == None: main.terminate() main.kill() if command == 'q': exit(0) if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>test_turtle.py<|end_file_name|><|fim▁begin|>import pickle import unittest from test import test_support as support turtle = support.import_module('turtle') Vec2D = turtle.Vec2D test_config = """\ width = 0.75 height = 0.8 canvwidth = 500 canvheight = 200 leftright = 100 topbottom = 100 mode = world colormode = 255 delay = 100 undobuffersize = 10000 shape = circle pencolor = red fillcolor = blue resizemode = auto visible = None language = english exampleturtle = turtle examplescreen = screen title = Python Turtle Graphics using_IDLE = '' """ test_config_two = """\ # Comments! # Testing comments! pencolor = red fillcolor = blue visible = False language = english # Some more # comments using_IDLE = False """ invalid_test_config = """ pencolor = red fillcolor: blue visible = False """ class TurtleConfigTest(unittest.TestCase): def get_cfg_file(self, cfg_str): self.addCleanup(support.unlink, support.TESTFN) with open(support.TESTFN, 'w') as f: f.write(cfg_str) return support.TESTFN def test_config_dict(self): cfg_name = self.get_cfg_file(test_config) parsed_cfg = turtle.config_dict(cfg_name) expected = { 'width' : 0.75, 'height' : 0.8, 'canvwidth' : 500, 'canvheight': 200, 'leftright': 100, 'topbottom': 100, 'mode': 'world', 'colormode': 255, 'delay': 100, 'undobuffersize': 10000, 'shape': 'circle', 'pencolor' : 'red', 'fillcolor' : 'blue', 'resizemode' : 'auto', 'visible' : None, 'language': 'english', 'exampleturtle': 'turtle', 'examplescreen': 'screen', 'title': 'Python Turtle Graphics', 'using_IDLE': '', } self.assertEqual(parsed_cfg, expected) def test_partial_config_dict_with_commments(self): cfg_name = self.get_cfg_file(test_config_two) parsed_cfg = turtle.config_dict(cfg_name) expected = { 'pencolor': 'red', 'fillcolor': 'blue', 'visible': False, 'language': 'english', 'using_IDLE': False, } self.assertEqual(parsed_cfg, expected) def test_config_dict_invalid(self): cfg_name = self.get_cfg_file(invalid_test_config) with support.captured_stdout() as stdout: parsed_cfg = turtle.config_dict(cfg_name) err_msg = stdout.getvalue() self.assertIn('Bad line in config-file ', err_msg) self.assertIn('fillcolor: blue', err_msg) self.assertEqual(parsed_cfg, { 'pencolor': 'red', 'visible': False, }) class VectorComparisonMixin: def assertVectorsAlmostEqual(self, vec1, vec2): if len(vec1) != len(vec2): self.fail("Tuples are not of equal size") for idx, (i, j) in enumerate(zip(vec1, vec2)): self.assertAlmostEqual( i, j, msg='values at index {} do not match'.format(idx)) class TestVec2D(VectorComparisonMixin, unittest.TestCase): def test_constructor(self): vec = Vec2D(0.5, 2) self.assertEqual(vec[0], 0.5) self.assertEqual(vec[1], 2) self.assertIsInstance(vec, Vec2D) self.assertRaises(TypeError, Vec2D) self.assertRaises(TypeError, Vec2D, 0) self.assertRaises(TypeError, Vec2D, (0, 1)) self.assertRaises(TypeError, Vec2D, vec) self.assertRaises(TypeError, Vec2D, 0, 1, 2) def test_repr(self): vec = Vec2D(0.567, 1.234) self.assertEqual(repr(vec), '(0.57,1.23)') def test_equality(self): vec1 = Vec2D(0, 1) vec2 = Vec2D(0.0, 1) vec3 = Vec2D(42, 1) self.assertEqual(vec1, vec2) self.assertEqual(vec1, tuple(vec1)) self.assertEqual(tuple(vec1), vec1) self.assertNotEqual(vec1, vec3) self.assertNotEqual(vec2, vec3) def test_pickling(self): vec = Vec2D(0.5, 2) for proto in range(pickle.HIGHEST_PROTOCOL + 1): pickled = pickle.dumps(vec, protocol=proto) unpickled = pickle.loads(pickled) self.assertEqual(unpickled, vec) self.assertIsInstance(unpickled, Vec2D) def _assert_arithmetic_cases(self, test_cases, lambda_operator): for test_case in test_cases: ((first, second), expected) = test_case op1 = Vec2D(*first) op2 = Vec2D(*second) result = lambda_operator(op1, op2) expected = Vec2D(*expected) self.assertVectorsAlmostEqual(result, expected) def test_vector_addition(self): test_cases = [ (((0, 0), (1, 1)), (1.0, 1.0)), (((-1, 0), (2, 2)), (1, 2)), (((1.5, 0), (1, 1)), (2.5, 1)), ] self._assert_arithmetic_cases(test_cases, lambda x, y: x + y) def test_vector_subtraction(self): test_cases = [ (((0, 0), (1, 1)), (-1, -1)), (((10.625, 0.125), (10, 0)), (0.625, 0.125)), ] self._assert_arithmetic_cases(test_cases, lambda x, y: x - y) def test_vector_multiply(self): vec1 = Vec2D(10, 10) vec2 = Vec2D(0.5, 3) answer = vec1 * vec2 expected = 35 self.assertAlmostEqual(answer, expected) vec = Vec2D(0.5, 3) answer = vec * 10 expected = Vec2D(5, 30) self.assertVectorsAlmostEqual(answer, expected) def test_vector_negative(self): vec = Vec2D(10, -10) expected = (-10, 10) self.assertVectorsAlmostEqual(-vec, expected) def test_distance(self): vec = Vec2D(6, 8) expected = 10 self.assertEqual(abs(vec), expected) vec = Vec2D(0, 0) expected = 0 self.assertEqual(abs(vec), expected) vec = Vec2D(2.5, 6) expected = 6.5 self.assertEqual(abs(vec), expected) <|fim▁hole|> cases = [ (((0, 0), 0), (0, 0)), (((0, 1), 90), (-1, 0)), (((0, 1), -90), (1, 0)), (((1, 0), 180), (-1, 0)), (((1, 0), 360), (1, 0)), ] for case in cases: (vec, rot), expected = case vec = Vec2D(*vec) got = vec.rotate(rot) self.assertVectorsAlmostEqual(got, expected) class TestTNavigator(VectorComparisonMixin, unittest.TestCase): def setUp(self): self.nav = turtle.TNavigator() def test_goto(self): self.nav.goto(100, -100) self.assertAlmostEqual(self.nav.xcor(), 100) self.assertAlmostEqual(self.nav.ycor(), -100) def test_pos(self): self.assertEqual(self.nav.pos(), self.nav._position) self.nav.goto(100, -100) self.assertEqual(self.nav.pos(), self.nav._position) def test_left(self): self.assertEqual(self.nav._orient, (1.0, 0)) self.nav.left(90) self.assertVectorsAlmostEqual(self.nav._orient, (0.0, 1.0)) def test_right(self): self.assertEqual(self.nav._orient, (1.0, 0)) self.nav.right(90) self.assertVectorsAlmostEqual(self.nav._orient, (0, -1.0)) def test_reset(self): self.nav.goto(100, -100) self.assertAlmostEqual(self.nav.xcor(), 100) self.assertAlmostEqual(self.nav.ycor(), -100) self.nav.reset() self.assertAlmostEqual(self.nav.xcor(), 0) self.assertAlmostEqual(self.nav.ycor(), 0) def test_forward(self): self.nav.forward(150) expected = Vec2D(150, 0) self.assertVectorsAlmostEqual(self.nav.position(), expected) self.nav.reset() self.nav.left(90) self.nav.forward(150) expected = Vec2D(0, 150) self.assertVectorsAlmostEqual(self.nav.position(), expected) self.assertRaises(TypeError, self.nav.forward, 'skldjfldsk') def test_backwards(self): self.nav.back(200) expected = Vec2D(-200, 0) self.assertVectorsAlmostEqual(self.nav.position(), expected) self.nav.reset() self.nav.right(90) self.nav.back(200) expected = Vec2D(0, 200) self.assertVectorsAlmostEqual(self.nav.position(), expected) def test_distance(self): self.nav.forward(100) expected = 100 self.assertAlmostEqual(self.nav.distance(Vec2D(0,0)), expected) def test_radians_and_degrees(self): self.nav.left(90) self.assertAlmostEqual(self.nav.heading(), 90) self.nav.radians() self.assertAlmostEqual(self.nav.heading(), 1.57079633) self.nav.degrees() self.assertAlmostEqual(self.nav.heading(), 90) def test_towards(self): coordinates = [ # coordinates, expected ((100, 0), 0.0), ((100, 100), 45.0), ((0, 100), 90.0), ((-100, 100), 135.0), ((-100, 0), 180.0), ((-100, -100), 225.0), ((0, -100), 270.0), ((100, -100), 315.0), ] for (x, y), expected in coordinates: self.assertEqual(self.nav.towards(x, y), expected) self.assertEqual(self.nav.towards((x, y)), expected) self.assertEqual(self.nav.towards(Vec2D(x, y)), expected) def test_heading(self): self.nav.left(90) self.assertAlmostEqual(self.nav.heading(), 90) self.nav.left(45) self.assertAlmostEqual(self.nav.heading(), 135) self.nav.right(1.6) self.assertAlmostEqual(self.nav.heading(), 133.4) self.assertRaises(TypeError, self.nav.right, 'sdkfjdsf') self.nav.reset() rotations = [10, 20, 170, 300] result = sum(rotations) % 360 for num in rotations: self.nav.left(num) self.assertEqual(self.nav.heading(), result) self.nav.reset() result = (360-sum(rotations)) % 360 for num in rotations: self.nav.right(num) self.assertEqual(self.nav.heading(), result) self.nav.reset() rotations = [10, 20, -170, 300, -210, 34.3, -50.2, -10, -29.98, 500] sum_so_far = 0 for num in rotations: if num < 0: self.nav.right(abs(num)) else: self.nav.left(num) sum_so_far += num self.assertAlmostEqual(self.nav.heading(), sum_so_far % 360) def test_setheading(self): self.nav.setheading(102.32) self.assertAlmostEqual(self.nav.heading(), 102.32) self.nav.setheading(-123.23) self.assertAlmostEqual(self.nav.heading(), (-123.23) % 360) self.nav.setheading(-1000.34) self.assertAlmostEqual(self.nav.heading(), (-1000.34) % 360) self.nav.setheading(300000) self.assertAlmostEqual(self.nav.heading(), 300000%360) def test_positions(self): self.nav.forward(100) self.nav.left(90) self.nav.forward(-200) self.assertVectorsAlmostEqual(self.nav.pos(), (100.0, -200.0)) def test_setx_and_sety(self): self.nav.setx(-1023.2334) self.nav.sety(193323.234) self.assertVectorsAlmostEqual(self.nav.pos(), (-1023.2334, 193323.234)) def test_home(self): self.nav.left(30) self.nav.forward(-100000) self.nav.home() self.assertVectorsAlmostEqual(self.nav.pos(), (0,0)) self.assertAlmostEqual(self.nav.heading(), 0) def test_distance_method(self): self.assertAlmostEqual(self.nav.distance(30, 40), 50) vec = Vec2D(0.22, .001) self.assertAlmostEqual(self.nav.distance(vec), 0.22000227271553355) another_turtle = turtle.TNavigator() another_turtle.left(90) another_turtle.forward(10000) self.assertAlmostEqual(self.nav.distance(another_turtle), 10000) class TestTPen(unittest.TestCase): def test_pendown_and_penup(self): tpen = turtle.TPen() self.assertTrue(tpen.isdown()) tpen.penup() self.assertFalse(tpen.isdown()) tpen.pendown() self.assertTrue(tpen.isdown()) def test_showturtle_hideturtle_and_isvisible(self): tpen = turtle.TPen() self.assertTrue(tpen.isvisible()) tpen.hideturtle() self.assertFalse(tpen.isvisible()) tpen.showturtle() self.assertTrue(tpen.isvisible()) def test_main(): support.run_unittest(TurtleConfigTest, TestVec2D, TestTNavigator, TestTPen) if __name__ == '__main__': test_main()<|fim▁end|>
def test_rotate(self):
<|file_name|>filterFASTA.py<|end_file_name|><|fim▁begin|>from Bio import SeqIO import sys, string fasta_file = "/Users/saljh8/GitHub/altanalyze/AltDatabase/EnsMart72/Hs/SequenceData/Homo_sapiens.GRCh37.72.cdna.all.fa" # Input fasta file<|fim▁hole|> fasta_sequences = SeqIO.parse(open(fasta_file),'fasta') with open(result_file, "w") as f: for seq in fasta_sequences: chr = string.split(seq.description,':')[3] try: float(chr) SeqIO.write([seq], f, "fasta") except: continue<|fim▁end|>
result_file = "/Users/saljh8/GitHub/altanalyze/AltDatabase/EnsMart72/Hs/SequenceData/Homo_sapiens.GRCh37.72.cdna.all.filtered.fa" # Output fasta file
<|file_name|>multiplecdrc_it_test.go<|end_file_name|><|fim▁begin|>// +build integration /* Real-time Online/Offline Charging System (OCS) for Telecom & ISP environments Copyright (C) ITsysCOM GmbH This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/> */ package general_tests import ( "errors" "flag" "fmt" "io/ioutil" "net/rpc" "net/rpc/jsonrpc" "os" "os/exec" "path" "testing" "time" "github.com/cgrates/cgrates/config" "github.com/cgrates/cgrates/engine" "github.com/cgrates/cgrates/utils" ) var cfgPath string var cfg *config.CGRConfig var rater *rpc.Client var testCalls = flag.Bool("calls", false, "Run test calls simulation, not by default.") var dataDir = flag.String("data_dir", "/usr/share/cgrates", "CGR data dir path here") var storDbType = flag.String("stordb_type", "mysql", "The type of the storDb database <mysql>") var waitRater = flag.Int("wait_rater", 100, "Number of miliseconds to wait for rater to start and cache") func startEngine() error { enginePath, err := exec.LookPath("cgr-engine") if err != nil { return errors.New("Cannot find cgr-engine executable") } stopEngine() engine := exec.Command(enginePath, "-config_dir", cfgPath) if err := engine.Start(); err != nil { return fmt.Errorf("Cannot start cgr-engine: %s", err.Error()) } time.Sleep(time.Duration(*waitRater) * time.Millisecond) // Give time to rater to fire up return nil } func stopEngine() error { exec.Command("pkill", "cgr-engine").Run() // Just to make sure another one is not running, bit brutal maybe we can fine tune it time.Sleep(time.Duration(*waitRater) * time.Millisecond) return nil } func TestMCDRCLoadConfig(t *testing.T) { var err error cfgPath = path.Join(*dataDir, "conf", "samples", "multiplecdrc") if cfg, err = config.NewCGRConfigFromFolder(cfgPath); err != nil { t.Error(err) } } // Remove data in both rating and accounting db func TestMCDRCResetDataDb(t *testing.T) { if err := engine.InitDataDb(cfg); err != nil { t.Fatal(err) } } func TestMCDRCEmptyTables(t *testing.T) { if err := engine.InitStorDb(cfg); err != nil { t.Fatal(err) } } func TestMCDRCCreateCdrDirs(t *testing.T) { for _, cdrcProfiles := range cfg.CdrcProfiles { for _, cdrcInst := range cdrcProfiles { for _, dir := range []string{cdrcInst.CdrInDir, cdrcInst.CdrOutDir} { if err := os.RemoveAll(dir); err != nil { t.Fatal("Error removing folder: ", dir, err) } if err := os.MkdirAll(dir, 0755); err != nil { t.Fatal("Error creating folder: ", dir, err) } } } } } // Connect rpc client to rater func TestMCDRCRpcConn(t *testing.T) { startEngine() var err error rater, err = jsonrpc.Dial("tcp", cfg.RPCJSONListen) // We connect over JSON so we can also troubleshoot if needed if err != nil { t.Fatal("Could not connect to rater: ", err.Error()) } } // Test here LoadTariffPlanFromFolder func TestMCDRCApierLoadTariffPlanFromFolder(t *testing.T) { reply := "" // Simple test that command is executed without errors attrs := &utils.AttrLoadTpFromFolder{FolderPath: path.Join(*dataDir, "tariffplans", "testtp")} if err := rater.Call("ApierV1.LoadTariffPlanFromFolder", attrs, &reply); err != nil { t.Error("Got error on ApierV1.LoadTariffPlanFromFolder: ", err.Error()) } else if reply != "OK" { t.Error("Calling ApierV1.LoadTariffPlanFromFolder got reply: ", reply) } time.Sleep(time.Duration(*waitRater) * time.Millisecond) // Give time for scheduler to execute topups } // The default scenario, out of cdrc defined in .cfg file func TestMCDRCHandleCdr1File(t *testing.T) { var fileContent1 = `dbafe9c8614c785a65aabd116dd3959c3c56f7f6,default,*voice,dsafdsaf,rated,*out,cgrates.org,call,1001,1001,+4986517174963,2013-11-07 08:42:25 +0000 UTC,2013-11-07 08:42:26 +0000 UTC,10000000000,1.0100,val_extra3,"",val_extra1 dbafe9c8614c785a65aabd116dd3959c3c56f7f7,default,*voice,dsafdsag,rated,*out,cgrates.org,call,1001,1001,+4986517174964,2013-11-07 09:42:25 +0000 UTC,2013-11-07 09:42:26 +0000 UTC,20000000000,1.0100,val_extra3,"",val_extra1 ` fileName := "file1.csv" tmpFilePath := path.Join("/tmp", fileName) if err := ioutil.WriteFile(tmpFilePath, []byte(fileContent1), 0644); err != nil { t.Fatal(err.Error()) } if err := os.Rename(tmpFilePath, path.Join("/tmp/cgrates/cdrc1/in", fileName)); err != nil { t.Fatal("Error moving file to processing directory: ", err)<|fim▁hole|> } } // Scenario out of first .xml config func TestMCDRCHandleCdr2File(t *testing.T) { var fileContent = `616350843,20131022145011,20131022172857,3656,1001,,,data,mo,640113,0.000000,1.222656,1.222660 616199016,20131022154924,20131022154955,3656,1001,086517174963,,voice,mo,31,0.000000,0.000000,0.000000 800873243,20140516063739,20140516063739,9774,1001,+49621621391,,sms,mo,1,0.00000,0.00000,0.00000` fileName := "file2.csv" tmpFilePath := path.Join("/tmp", fileName) if err := ioutil.WriteFile(tmpFilePath, []byte(fileContent), 0644); err != nil { t.Fatal(err.Error()) } if err := os.Rename(tmpFilePath, path.Join("/tmp/cgrates/cdrc2/in", fileName)); err != nil { t.Fatal("Error moving file to processing directory: ", err) } } // Scenario out of second .xml config func TestMCDRCHandleCdr3File(t *testing.T) { var fileContent = `4986517174960;4986517174963;Sample Mobile;08.04.2014 22:14:29;08.04.2014 22:14:29;1;193;Offeak;0,072728833;31619 4986517174960;4986517174964;National;08.04.2014 20:34:55;08.04.2014 20:34:55;1;21;Offeak;0,0079135;311` fileName := "file3.csv" tmpFilePath := path.Join("/tmp", fileName) if err := ioutil.WriteFile(tmpFilePath, []byte(fileContent), 0644); err != nil { t.Fatal(err.Error()) } if err := os.Rename(tmpFilePath, path.Join("/tmp/cgrates/cdrc3/in", fileName)); err != nil { t.Fatal("Error moving file to processing directory: ", err) } } func TestMCDRCStopEngine(t *testing.T) { stopEngine() }<|fim▁end|>