prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>print_table.rs<|end_file_name|><|fim▁begin|>use itertools::*; use std::cmp::max; use std::fmt::{self, Debug, Formatter}; pub fn debug_table<A, B, C, D, E, F, G>(name: A, column_names: B, column_alignments: D, rows: E) -> Box<Debug> where A: Into<String>, B: IntoIterator<Item = C>, C: Into<String>, D: IntoIterator<Item = Alignment>, E: IntoIterator<Item = F>, F: IntoIterator<Item = G>,<|fim▁hole|> let name = name.into(); let col_names = column_names.into_iter().map(Into::into).collect_vec(); let col_align = column_alignments.into_iter().collect_vec(); assert_eq!(col_names.len(), col_align.len()); let mut col_widths = col_names.iter().map(String::len).collect_vec(); let rows = rows.into_iter().map(|r| r.into_iter().map(Into::into).collect_vec()).collect_vec(); for row in rows.iter() { assert_eq!(col_widths.len(), row.len()); for (i, x) in row.iter().enumerate() { col_widths[i] = max(col_widths[i], x.len()); } } let header = format!("| {} |", col_names.into_iter() .enumerate() .map(|(i, s)| format!("{:^1$}", s, col_widths[i])) .join(" | ")); let sep = header.chars() .map(|c| { match c { '|' => "+", _ => "-", } }) .join(""); Box::new(TablePrinter { name: name, header: header, sep: sep, col_widths: col_widths, col_align: col_align, rows: rows, }) } pub enum Alignment { Left, Right, Center, } struct TablePrinter { name: String, header: String, sep: String, col_widths: Vec<usize>, col_align: Vec<Alignment>, rows: Vec<Vec<String>>, } impl TablePrinter { fn fmt_row(&self, row: &[String]) -> String { format!("| {} |", row.iter() .enumerate() .map(|(i, s)| { match self.col_align[i] { Alignment::Left => format!("{:<1$}", s, self.col_widths[i]), Alignment::Center => format!("{:^1$}", s, self.col_widths[i]), Alignment::Right => format!("{:>1$}", s, self.col_widths[i]), } }) .join(" | ")) } } impl Debug for TablePrinter { fn fmt(&self, f: &mut Formatter) -> fmt::Result { try!(writeln!(f, "{}:", self.name)); try!(writeln!(f, "{}", self.sep)); try!(writeln!(f, "{}", self.header)); try!(writeln!(f, "{}", self.sep)); for row in self.rows.iter() { try!(writeln!(f, "{}", self.fmt_row(&*row))); } writeln!(f, "{}", self.sep) } }<|fim▁end|>
G: Into<String> {
<|file_name|>RegisteredDriverImpl.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2011 Prashant Dighe * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * The above copyright notice and this permission notice shall be included * in all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. */ package com.example.demo.model.impl; import java.util.HashMap; import java.util.Map; import com.example.demo.model.RegisteredDriver; /** * * @author Prashant Dighe * */ public class RegisteredDriverImpl implements RegisteredDriver { public RegisteredDriverImpl() {} public RegisteredDriverImpl(Map<String, Object> map) { _name = (String)map.get("name"); _age = (Integer)map.get("age"); } @Override public String getName() { return _name; } @Override public void setName(String name) { _name = name; } @Override public int getAge() { return _age; } @Override public void setAge(int age) { _age = age; } public Map<String, Object> toMap() { Map<String, Object> map = new HashMap<String, Object>(); map.put("name", _name); map.put("age", _age); return map; } <|fim▁hole|> private static final long serialVersionUID = 1L; }<|fim▁end|>
private String _name = null; private int _age = 0;
<|file_name|>format.py<|end_file_name|><|fim▁begin|>import my_data_file d = my_data_file.my_data <|fim▁hole|>print "Hello my name is %s and i am %d years of age and my coolnes is %d " % (d [ 'naam' ], d [ 'age' ], d ['coolheid'])<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>/* > ++ptr; < --ptr; + ++*ptr; - --*ptr; . putchar(*ptr); , *ptr = getchar(); [ while (*ptr) { ] } */ use std::os; extern { fn getchar() -> i8; } fn get_val_of_ptr(val: &Vec<i8>, ptr: uint) -> i8 { if ptr < val.len() { val[ptr] } else { 0 } } fn set_val_of_ptr(val: &mut Vec<i8>, ptr: uint, v: i8){ if ptr >= val.len() { for _ in range(0, ptr + 1 - val.len()) { val.push(0); } } val[ptr] = v; } // inc (if x==1) or dec (if x==-1) fn inc_val_of_ptr(val: &mut Vec<i8>, ptr: uint, x: i8) { let x = get_val_of_ptr(val, ptr) + x; set_val_of_ptr(val, ptr, x); } #[deriving(Show)] struct LoopState { from: uint, to: Option<uint>, } impl Copy for LoopState {} impl LoopState { fn new(from: uint, to: Option<uint>) -> LoopState { LoopState { from: from, to: to } } } fn init_loop_states(states: &mut Vec<LoopState>, code: &str) { let mut stack: Vec<LoopState> = vec![]; for (i,c) in code.chars().enumerate() { match c { '[' => { stack.push(LoopState::new(i,None)); } ']' => { if stack.len() > 0 { let last = stack.len() - 1; stack[last].to = Some(i); states.push(stack.pop().unwrap()); } else { note_char_at(code, i); panic!("no `[` match this `]` at pos {}", i); } } _ => { } }; } for state in stack.iter() { if state.to.is_none() { note_char_at(code, state.from); panic!("no `]` match this `[` at pos {}", state.from); } } /* // for debug, print all loop states for state in states.iter() { println!("{}", state); } */ } fn note_char_at(code: &str, index: uint) { println!("\n{}", code); for _ in range(0, index) { print!(" "); } println!("^"); } fn match_loop_states(states: &mut Vec<LoopState>, index: uint) -> uint {<|fim▁hole|> return state.to.unwrap(); } else if state.to.unwrap() == index { return state.from; } } panic!("no match loop state at pos {}", index); } fn main() { let args = os::args(); if args.len() < 2 || args[1].len() == 0 { println!("Usage: bfc <code>\n"); return; } let mut ptr: uint = 0; let mut val: Vec<i8> = vec![]; let mut loop_states: Vec<LoopState> = vec![]; let code = args[1].as_slice(); init_loop_states(&mut loop_states, code); let mut ip = 0u; // Instruction pointer loop { if ip >= code.len() { break; } let op = code.char_at(ip); match op { '>' => ptr += 1, '<' => { if ptr == 0 { note_char_at(code, ip); panic!("can't < any more, pos {}", ip); } ptr -= 1; } '+' => inc_val_of_ptr(&mut val, ptr, 1), '-' => inc_val_of_ptr(&mut val, ptr, -1), '.' => println!("{}", get_val_of_ptr(&val, ptr) as u8 as char), ',' => { unsafe { set_val_of_ptr(&mut val, ptr, getchar() as i8); } } '[' => { if get_val_of_ptr(&val, ptr) == 0 { ip = match_loop_states(&mut loop_states, ip) + 1; continue; } } ']' => { ip = match_loop_states(&mut loop_states, ip); continue; } 'v' => println!("{}", get_val_of_ptr(&val, ptr)), _ => {} } ip += 1; } // println!("{}", val); // for debug, print all ptr values }<|fim▁end|>
for state in states.iter() { if state.from == index {
<|file_name|>dashboard-controller.js<|end_file_name|><|fim▁begin|>function DashboardController($scope, $state, $stateParams, dashboardFactory) { var dc = this; dc.playerStats = {}; dc.itemForAuction = {}; dc.auctionStarted = false; // Called on page load to retrieve player data dashboardFactory.getData(dashboardFactory.getName()).then(function(response) { dc.playerStats = response.data; }); var unbindLogout = $scope.$on('logout', function() { dashboardFactory.logout().then(function(response) { if (response.status === 200) { $state.go('login'); } }); }); var unbindStart = $scope.$on('startAuction', function(evt, data) { dc.auctionStarted = true; dc.itemForAuction = data; $scope.$broadcast('roll it'); }); var unbindClose = $scope.$on('auction closed', function(evt, data) { updateData(dc.playerStats, data); dashboardFactory.processBid(data).then(function(response) { if (response.data === 200) { dashboardFactory.getData(dashboardFactory.getName()).then(function(res) {<|fim▁hole|> }); }); // Clear events $scope.$on('$destroy', function() { unbindLogout(); unbindStart(); unbindClose(); }); /** * @desc function that updates player dashboard in real-time * @param {Object} playerData - logged in player's data * @param {Object} newData - contains player's recent transaction */ function updateData(playerData, newData) { playerData.coins = playerData.coins - newData.value; angular.forEach(playerData.inventoryItems, function(item) { if (item.name === newData.itemName) { item.quantity = item.quantity - newData.qty; } }); } } module.exports = DashboardController;<|fim▁end|>
dc.playerStats = res.data; }); }
<|file_name|>wsgi.py<|end_file_name|><|fim▁begin|>""" WSGI config for auth_example project. This module contains the WSGI application used by Django's development server and any production WSGI deployments. It should expose a module-level variable named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover this application via the ``WSGI_APPLICATION`` setting. Usually you will have the standard Django WSGI application here, but it also might make sense to replace the whole Django WSGI application with a custom one that later delegates to the Django one. For example, you could introduce WSGI middleware here, or combine a Django application with an application of another framework. """ import os # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use # os.environ["DJANGO_SETTINGS_MODULE"] = "auth_example.settings"<|fim▁hole|># file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. from django.core.wsgi import get_wsgi_application application = get_wsgi_application() # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application)<|fim▁end|>
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "auth_example.settings") # This application object is used by any WSGI server configured to use this
<|file_name|>io.go<|end_file_name|><|fim▁begin|>package file <|fim▁hole|> // CreateAndWrite creates a file called "name" (or overwrites if it already existed), reads all of r into the file, then closes the file. Returns any errors encountered along the way. func CreateAndWrite(name string, r io.Reader) error { fd, err := os.Create(name) if err != nil { return err } defer fd.Close() if _, err := io.Copy(fd, r); err != nil { return err } return nil }<|fim▁end|>
import ( "io" "os" )
<|file_name|>rssso2015schedule.py<|end_file_name|><|fim▁begin|>#!/usr/local/lib/python2.7.10/bin/python # -*- coding: utf-8 -*- """ Created on Sun Aug 09 13:15:13 2015 @author: Vedran Fetching a schedule from Google Docs, extracting information, converting to pdf and sending via e-mail. Copyright (C) 2015 Vedran Vukovic <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ import sys # for fetching command-line arguments for legal notices import os # for working with files and opening the temporary pdf file import pdfkit # for creating the temporary pdf file import urllib2 # for getting the original file from Google Drive from datetime import datetime # for printing today's date and time from textwrap import fill # for wrapping help, warranty & copyright text from getpass import getpass # for passwords import smtplib # for sending email from email.MIMEMultipart import MIMEMultipart # for multipart emails from email.MIMEText import MIMEText # for multipart emails from email.MIMEBase import MIMEBase # for multipart emails from email import Encoders # for multipart emails """ # fetchHTML(docsID) # This function fetches the google doc converted to the html format. # # PARAMETERS: # docsID - Google Docs ID of the file in question. It can be found in all # Google Docs URLs. Read the code for more details. # # RETURNS: # string html - html code of the Google Doc whose id is docsID # # NOTE: # Requires Internet connection and access to Google Docs servers """ def fetchHtml(docsID): address = "https://docs.google.com/document/d/" + docsID + \ "/export?format=html" req = urllib2.Request(address) # creates a html request try: response = urllib2.urlopen(req) # opens the url -> requires # Internet connection and access to Google Docs servers html = response.read() except urllib2.URLError: print "\n\nERROR! The URL with the schedule could not have been " \ "accessed. Check your internet connection." html = None return html """ # inputStr(query = "") # This function prints a query, reads an input string from console and # returns it. # # PARAMETERS: # query - The text to be printed before the prompt. If it doesn't end with # a soace, a space will be appended to it. # # RETURNS: # string response - user's console input converted to a string """ def inputStr(query = ""): theQuery = query[:] if query[-1] != " ": theQuery += " " response = str(raw_input(theQuery)) return response """ # inputInt(query = "") # This function prints a query, reads an input string from console, converts # it to an integer and returns it. # # PARAMETERS: # query - The text to be printed before the prompt. If it doesn't end with # a soace, a space will be appended to it. # # RETURNS: # string response - user's console input converted to an integer """ def inputInt(query = ""): try: response = int(raw_input(query)) except ValueError: print "\n\nERROR! The input should be an integer." response = None return response """ # cutHtml(html) # This function is responsible for going through the html string and # separating it into three parts. # # PARAMETERS: # html - The string containing the conent of a html file derived from the # schedule document. # # RETURNS: # string head - contains the text from the beginning of the html string # up to the table row containing the string "Day 1", # including the <tr ..> tag # string tail - the end part of the html string, containing the last </tr> in # the same table and everything after that. # list importantTDs - contains locations of all <td ..> tags located just # before the string "Day X", where X is a string # representation of any digit from 0 to 9 # NOTE: # All locations are relative to the beginning of the parameter string html, # using zero-based indexing. """ def cutHtml(html): allDays = [] # the list which will be populated with locations of all # strings that match the case-insensitive regex "day \d" for i in range(len(html)): if html[i:i+4].lower() == "day ": if html[i+4:i+5].lower() in ([str(x) for x in range(10)]): allDays.append(i) allTDs = [] # the list which will be populated with locations of all # strings that match "<td " for i in range(len(html)): if html[i:i+4].lower() == "<td ": allTDs.append(i) importantTDs = [] # the list which will be populated with locations of all # strings that match "<td " and are at most 150 positions before a location # of any "day \d" (which are contained in the list allDays) for i in range(len(allDays)): for j in range(len(allTDs)): if allTDs[j] - allDays[i] > -150 and allTDs[j] - allDays[i] < 0: importantTDs.append(allTDs[j]) head = html[:importantTDs[0]] tail = html[html.rfind("</tr></tbody></table>"):] return head, tail, importantTDs """ # getDataForDate(html, date, importantTDs) # This function selects the part of the html string containing the <|fim▁hole|># schedule document. # # date - The integer containing the date of the month chosen at the beginning # of the script. # # importantTDs - The list of integers containing all locations of <td ...> # tags that contain the case-insensitive regex "day \d"; # basically, the text between importantTDs[x] and # importantTDs[x+1] is the schedule for the day x since the # beginning of the course. # # RETURNS: # string middle - contains the part from the html string that renders as the # schedule for the given date """ def getDataForDate(html, date, importantTDs): day = date - 7 start = importantTDs[day] if day < len(importantTDs)-1: end = importantTDs[day+1] else: end = html.rfind("</tr></tbody></table>") middle = html[start:end] return middle """ # createMessage(date) # This function creates a multipart message containing the text from the # messages.txt file, the temporary pdf file with the schedule and other # components of an e-mail message. # # PARAMETERS: # fromAddr - The string containing the address of the sender. # # toAddrs - The list of strings containing the addresses of all receivers. # # fileName - The integer containing the date of the schedule. # # RETURNS: # MIMEMultipart msg - a MIME message object containing the message, # attachments and other necessary fields. """ def createMessage(fromAddr, toAddrs, ccAddrs, subject, messageText, fileName): msg = MIMEMultipart() msg.attach(MIMEText(messageText)) msg['From'] = fromAddr if toAddrs != None: msg['To'] = ', '.join(toAddrs) if ccAddrs != None: msg['Cc'] = ', '.join(ccAddrs) msg['Subject'] = subject part = MIMEBase('application', 'octet-stream') part.set_payload(open(fileName, 'rb').read()) Encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename="%s"' % os.path.basename \ (fileName)) msg.attach(part) return msg """ # sendEmail(username, password, server, port, fromAddr = None, toAddrs) # This function sends the email using the specified account and SMTP # server data. # # PARAMETERS: # username - The username used to log on to the server. # # password - The password used to log on to the server. # # server - The SMTP server that will be used to send the messages. # # port - The number of an open port on the SMTP server. This number, if not # a string, will be converted to a string automatically. # # fromAddr - The officiall e-mail address from which the server will send the # message. If None, the username will be used instead. # # toAddrs - The list of addresses to which the schedule will be delivered. # If the list is empty or None, the message will be sent to the # sender. # # RETURNS: # None """ def sendEmail(msg, username, password, server, port, fromAddr = None, \ toAddrs = None, ccAddrs = None, bccAddrs = None): if type(toAddrs) == "str": toAddrsN = [toAddrs] else: toAddrsN = toAddrs if type(ccAddrs) == "str": ccAddrsN = [ccAddrs] else: ccAddrsN = ccAddrs if type(bccAddrs) == "str": bccAddrsN = [bccAddrs] else: bccAddrsN = bccAddrs if fromAddr == "" or fromAddr == None: fromAddr = username if toAddrsN == None: toAddrsN = [] if ccAddrsN != None: toAddrsN += ccAddrsN if bccAddrsN != None: toAddrsN += bccAddrsN if toAddrsN == []: toAddrsN = [fromAddr] mailer = smtplib.SMTP(server + ":" + str(port)) try: mailer.starttls() # THIS HAS TO BE EXECUTED BEFORE LOGIN mailer.login(username,password) mailer.sendmail(fromAddr, toAddrs, msg.as_string()) except smtplib.SMTPRecipientsRefused: print "\n\nERROR! SMTPRecipientsRefused. All recipients refused to " \ "receive the message." except smtplib.SMTPDataError: print "\n\nERROR! SMTPDataError. The SMTP server refused to accept " \ "message data." except smtplib.SMTPSenderRefused: print "\n\nERROR! SMTPSenderRefused. The SMTP server didn't accept " \ "the sender address (fromAddr)." except smtplib.SMTPHeloError: print "\n\nERROR! SMTPHeloError. The server didn't properly reply " \ "to the HELO greeting." except smtplib.SMTPAuthenticationError: print "\n\nERROR! SMTPAuthenticationError. Authentication " \ "unsuccessful. Check your e-mail and password in the main " \ "part of the code." except smtplib.SMTPException: print "\n\nERROR! SMTPException. One potential cause is that TLS " \ "was not started before login. Check the script." finally: mailer.close() return """ # createPdfFromHtml(html, pdfFileName) # This function creates a pdf file from an html string using the WKHTMLTOPDF # tool via pdfkit library wrapper. # # PARAMETERS: # html - The html string on which the pdf file will be based. # # pdfFileName - The string containing the file name to which the pdf output # will be written. # # quietOption - Should the WKHTMLTOPDF print any output? # # RETURNS: # None """ def createPdfFromHtml(html, pdfFileName, quietOption = False): if quietOption == False: options = {} else: options = {'quiet': ''} pdfkit.from_string(html, fileName, options=options) if __name__=="__main__": commandLine = False # use the program from the command line argsSwitch = False # some arguments have been provided quietSwitch = False # no user prompt mailSwitch = False # send mail printSwitch = False # print if len(sys.argv) > 1: if sys.argv[1] == '-w' or sys.argv[1] == '--warranty': print fill('THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT ' \ 'PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN ' \ 'WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE ' \ 'PROGRAM “AS IS” WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED ' \ 'OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED ' \ 'WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR ' \ 'PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF ' \ 'THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, ' \ 'YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR ' \ 'CORRECTION. \n') commandLine = True elif sys.argv[1] == '-c' or sys.argv[1] == '--copyright': print fill('All rights granted under this License are granted ' \ 'for the term of copyright on the Program, and are ' \ 'irrevocable provided the stated conditions are met. ' \ 'This License explicitly affirms your unlimited ' \ 'permission to run the unmodified Program. The output ' \ 'from running a covered work is covered by this License ' \ 'only if the output, given its content, constitutes a ' \ 'covered work. This License acknowledges your rights of ' \ 'fair use or other equivalent, as provided by copyright ' \ 'law.\n' \ 'You may make, run and propagate covered works that you ' \ 'do not convey, without conditions so long as your ' \ 'license otherwise remains in force. You may convey ' \ 'covered works to others for the sole purpose of having ' \ 'them make modifications exclusively for you, or ' \ 'provide you with facilities for running those works, ' \ 'provided that you comply with the terms of this ' \ 'License in conveying all material for which you do not' \ 'control copyright. Those thus making or running the' \ 'covered works for you must do so exclusively on your ' \ 'behalf, under your direction and control, on terms ' \ 'that prohibit them from making any copies of your ' \ 'copyrighted material outside their relationship with ' \ 'you.\n' \ 'Conveying under any other circumstances is permitted ' \ 'solely under the conditions stated below. Sublicensing ' \ 'is not allowed; section 10 makes it unnecessary.\n' \ '\n For more information, visit ' \ '<http://www.gnu.org/licenses/>.') commandLine = True elif sys.argv[1] == '-h' or sys.argv[1] == '--help': print fill('This tool can be used as a command-line or an ' \ 'interactive one. Before using it, remember to install ' \ 'dependencies.\n\n' \ 'For warranty, copyright or help type' \ 'python and only one of the following:\n\n' \ 'rssso2015schedule.py [-w] [--warranty] [-c] ' \ '[--copyright] [-h] [--help]\n\n' \ 'To run the program from command line, type python ' \ ' and:\n\n' \ 'rssso2015.schedule.py date [-q] [--quiet] [-m]' \ '[--mail] [-p] [--print]\n\n' \ 'The arguments can be presented in any order. If -q or ' \ '--quiet are specified, the temporary pdf file will not ' \ 'be opened after creation and no prompts will be made. ' \ 'Errors will still be output to the standard output.'\ 'If -m or --message is specified, the mail will be sent. ' \ 'If -p or --print is specified, the file will be sent to ' \ 'the printer specified. If neither printing nor mailing ' \ 'is specified, the file will be mailed.\n\n' \ 'For more information on specifying e-mail destinations ' \ 'and printer names, please read the README.md file.\n\n') commandLine = True elif sys.argv[1].isdigit() == True: date = int(sys.argv[1]) argsSwitch = True if len(sys.argv) > 2: if "-q" in sys.argv[2:4] or "--quiet" in sys.argv[2:4]: quietSwitch = True if "-m" in sys.argv[2:4] or "--mail" in sys.argv[2:4]: mailSwitch = True if "-p" in sys.argv[2:4] or "--print" in sys.argv[2:4]: printSwitch = True if mailSwitch == False and printSwitch == False: mailSwitch = True if commandLine == False: if quietSwitch == False: print "RSSSO2015Schedule. Copyright (C) 2015 Vedran Vukovic\n" \ "This program comes with ABSOLUTELY NO WARRANTY; for " \ "details type `rssso2015schedule -w'.\n" \ "This is free software, and you are welcome to " \ "redistribute it\n" \ "under certain conditions; type `rssso2015schedule -c' " \ "for details.\n" \ "For details about using this as a command-line tool, " \ "type 'rssso2015schedule -h'\n\n" # ignore a single whitespace before and after the equal sign settingsTxt = open(".schedulerc").read().replace(" =", "=") \ .replace("= ", "=").split("\n") settings = {} for setting in settingsTxt: values = setting.split("=") if len(values) == 2: settings[values[0]] = values[1] docsId = settings["docsId"] fileName = settings["fileName"] fromAddr = settings["fromAddr"] if "fromAddrWithName" in settings: fromAddrWithName = settings["fromAddrWithName"] else: fromAddrWithName = fromAddr if "toAddrs" in settings: toAddrs = settings["toAddrs"].split(", ") else: toAddrs = None if "ccAddrs" in settings: ccAddrs = settings["ccAddrs"].split(", ") else: ccAddrs = None if "bccAddrs" in settings: bccAddrs = settings["bccAddrs"].split(", ") else: bccAddrs = None if "messageFile" in settings: messageFile = settings["messageFile"] else: messageFile = "" if "subject" in settings: subject = settings["subject"] else: subject = "" username = settings["username"] if "password" in settings: password = settings["password"] server = settings["server"] port = settings["port"] if "printerName" in settings: printerName = settings["printerName"] else: printerName = None del settings # no longer needed if quietSwitch == False: print 'Fetching the document from Google Docs...' html = fetchHtml(docsId) if html != None: if quietSwitch == False: print 'Document fetched.\n' # these two printouts are for convenience print "Today's date is: " + str(datetime.now().day) print "The time is: " + str(datetime.now().time()) if argsSwitch == False: # no arguments provided date = inputInt("Date? ") if date != None: # fileName and subject can have %d, which should be changed to # the selected date fileName = fileName.replace("%d", str(date)) subject = subject.replace("%d", str(date)) # dividing html to head, tail and importantTDs head, tail, importantTDs = cutHtml(html) # using importantTDs to grab only the relevant schedule data middle = getDataForDate(html, date, importantTDs) createPdfFromHtml(head + middle + tail, fileName, quietSwitch) if quietSwitch == False: os.system("xdg-open " + fileName) y = "n" if argsSwitch == True and quietSwitch == False: # QC y = inputStr("[Yy] = proceed?") else: y = "y" if y.lower() == "y": yy = "n" if argsSwitch == False: # should the file be mailed? yy = inputStr("[Yy] = send the e-mail? ") else: if mailSwitch == True: yy = "y" if yy.lower() == "y": try: if messageFile == "": messageText = "" else: messageText = open(messageFile, "r").read() \ .replace("%d", str(date)) msg = createMessage(fromAddrWithName, toAddrs, \ ccAddrs, subject, \ messageText, fileName) if password == "": password = getpass() # server = "smtp.gmail.com" # your server here sendEmail(msg, username, password, server, port, \ fromAddr, toAddrs, ccAddrs, bccAddrs) if quietSwitch == False: print "\nIf there were no error messages " \ "before this one, the e-mail should " \ "have been sent." except IOError: print "\n\nERROR! Message file not found!" else: print "\n\nE-mail sending aborted by user." yyy = "n" if argsSwitch == False: # should the file be printed? yyy = inputStr("[Yy] = print the file? ") else: if printSwitch == True: yyy = "y" if yyy.lower() == "y": os.system("lpr -P" + printerName + " " + fileName) if quietSwitch == False: print "\nIf there were no error messages before " \ "this one, the file should have been " \ "sent for printing." try: os.unlink(fileName) if quietSwitch == False: print "Temporary PDF file with the schedule should " \ "have been deleted." except WindowsError: print "\n\nERROR! PDF file with the schedule was " \ "probably opened, and could not have been deleted." if quietSwitch == False: print "\n\nRSSSO2015Schedule script: End of execution."<|fim▁end|>
# schedule for the chosen date. # # PARAMETERS: # html - The string containing the conent of a html file derived from the
<|file_name|>key.py<|end_file_name|><|fim▁begin|># Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2011, Nexenta Systems Inc. # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import email.utils import errno import hashlib import mimetypes import os import re import base64 import binascii import math from hashlib import md5 import boto.utils from boto.compat import BytesIO, six, urllib, encodebytes from boto.exception import BotoClientError from boto.exception import StorageDataError from boto.exception import PleaseRetryException from boto.provider import Provider from boto.s3.keyfile import KeyFile from boto.s3.user import User from boto import UserAgent from boto.utils import compute_md5, compute_hash from boto.utils import find_matching_headers from boto.utils import merge_headers_by_name class Key(object): """ Represents a key (object) in an S3 bucket. :ivar bucket: The parent :class:`boto.s3.bucket.Bucket`. :ivar name: The name of this Key object. :ivar metadata: A dictionary containing user metadata that you wish to store with the object or that has been retrieved from an existing object. :ivar cache_control: The value of the `Cache-Control` HTTP header. :ivar content_type: The value of the `Content-Type` HTTP header. :ivar content_encoding: The value of the `Content-Encoding` HTTP header. :ivar content_disposition: The value of the `Content-Disposition` HTTP header. :ivar content_language: The value of the `Content-Language` HTTP header. :ivar etag: The `etag` associated with this object. :ivar last_modified: The string timestamp representing the last time this object was modified in S3. :ivar owner: The ID of the owner of this object. :ivar storage_class: The storage class of the object. Currently, one of: STANDARD | REDUCED_REDUNDANCY | GLACIER :ivar md5: The MD5 hash of the contents of the object. :ivar size: The size, in bytes, of the object. :ivar version_id: The version ID of this object, if it is a versioned object. :ivar encrypted: Whether the object is encrypted while at rest on the server. """ DefaultContentType = 'application/octet-stream' RestoreBody = """<?xml version="1.0" encoding="UTF-8"?> <RestoreRequest xmlns="http://s3.amazonaws.com/doc/2006-03-01"> <Days>%s</Days> </RestoreRequest>""" BufferSize = boto.config.getint('Boto', 'key_buffer_size', 8192) # The object metadata fields a user can set, other than custom metadata # fields (i.e., those beginning with a provider-specific prefix like # x-amz-meta). base_user_settable_fields = set(["cache-control", "content-disposition", "content-encoding", "content-language", "content-md5", "content-type", "x-robots-tag", "expires"]) _underscore_base_user_settable_fields = set() for f in base_user_settable_fields: _underscore_base_user_settable_fields.add(f.replace('-', '_')) # Metadata fields, whether user-settable or not, other than custom # metadata fields (i.e., those beginning with a provider specific prefix # like x-amz-meta). base_fields = (base_user_settable_fields | set(["last-modified", "content-length", "date", "etag"])) def __init__(self, bucket=None, name=None): self.bucket = bucket self.name = name self.metadata = {} self.cache_control = None self.content_type = self.DefaultContentType self.content_encoding = None self.content_disposition = None self.content_language = None self.filename = None self.etag = None self.is_latest = False self.last_modified = None self.owner = None self._storage_class = None self.path = None self.resp = None self.mode = None self.size = None self.version_id = None self.source_version_id = None self.delete_marker = False self.encrypted = None # If the object is being restored, this attribute will be set to True. # If the object is restored, it will be set to False. Otherwise this # value will be None. If the restore is completed (ongoing_restore = # False), the expiry_date will be populated with the expiry date of the # restored object. self.ongoing_restore = None self.expiry_date = None self.local_hashes = {} def __repr__(self): if self.bucket: name = u'<Key: %s,%s>' % (self.bucket.name, self.name) else: name = u'<Key: None,%s>' % self.name # Encode to bytes for Python 2 to prevent display decoding issues if not isinstance(name, str): name = name.encode('utf-8') return name def __iter__(self): return self @property def provider(self): provider = None if self.bucket and self.bucket.connection: provider = self.bucket.connection.provider return provider def _get_key(self): return self.name def _set_key(self, value): self.name = value key = property(_get_key, _set_key); def _get_md5(self): if 'md5' in self.local_hashes and self.local_hashes['md5']: return binascii.b2a_hex(self.local_hashes['md5']) def _set_md5(self, value): if value: self.local_hashes['md5'] = binascii.a2b_hex(value) elif 'md5' in self.local_hashes: self.local_hashes.pop('md5', None) md5 = property(_get_md5, _set_md5); def _get_base64md5(self): if 'md5' in self.local_hashes and self.local_hashes['md5']: md5 = self.local_hashes['md5'] if not isinstance(md5, bytes): md5 = md5.encode('utf-8') return binascii.b2a_base64(md5).decode('utf-8').rstrip('\n') def _set_base64md5(self, value): if value: if not isinstance(value, six.string_types): value = value.decode('utf-8') self.local_hashes['md5'] = binascii.a2b_base64(value) elif 'md5' in self.local_hashes: del self.local_hashes['md5'] base64md5 = property(_get_base64md5, _set_base64md5); def _get_storage_class(self): if self._storage_class is None and self.bucket: # Attempt to fetch storage class list_items = list(self.bucket.list(self.name.encode('utf-8'))) if len(list_items) and getattr(list_items[0], '_storage_class', None): self._storage_class = list_items[0]._storage_class else: # Key is not yet saved? Just use default... self._storage_class = 'STANDARD' return self._storage_class def _set_storage_class(self, value): self._storage_class = value storage_class = property(_get_storage_class, _set_storage_class) def get_md5_from_hexdigest(self, md5_hexdigest): """ A utility function to create the 2-tuple (md5hexdigest, base64md5) from just having a precalculated md5_hexdigest. """ digest = binascii.unhexlify(md5_hexdigest) base64md5 = encodebytes(digest) if base64md5[-1] == '\n': base64md5 = base64md5[0:-1] return (md5_hexdigest, base64md5) def handle_encryption_headers(self, resp): provider = self.bucket.connection.provider if provider.server_side_encryption_header: self.encrypted = resp.getheader( provider.server_side_encryption_header, None) else: self.encrypted = None def handle_storage_class_header(self, resp): provider = self.bucket.connection.provider if provider.storage_class_header: self._storage_class = resp.getheader( provider.storage_class_header, None) if (self._storage_class is None and provider.get_provider_name() == 'aws'): # S3 docs for HEAD object requests say S3 will return this # header for all objects except Standard storage class objects. self._storage_class = 'STANDARD' def handle_version_headers(self, resp, force=False): provider = self.bucket.connection.provider # If the Key object already has a version_id attribute value, it # means that it represents an explicit version and the user is # doing a get_contents_*(version_id=<foo>) to retrieve another # version of the Key. In that case, we don't really want to # overwrite the version_id in this Key object. Comprende? if self.version_id is None or force: self.version_id = resp.getheader(provider.version_id, None) self.source_version_id = resp.getheader(provider.copy_source_version_id, None) if resp.getheader(provider.delete_marker, 'false') == 'true': self.delete_marker = True else: self.delete_marker = False def handle_restore_headers(self, response): provider = self.bucket.connection.provider header = response.getheader(provider.restore_header) if header is None: return parts = header.split(',', 1) for part in parts: key, val = [i.strip() for i in part.split('=')] val = val.replace('"', '') if key == 'ongoing-request': self.ongoing_restore = True if val.lower() == 'true' else False elif key == 'expiry-date': self.expiry_date = val def handle_addl_headers(self, headers): """ Used by Key subclasses to do additional, provider-specific processing of response headers. No-op for this base class. """ pass def open_read(self, headers=None, query_args='', override_num_retries=None, response_headers=None): """ Open this key for reading :type headers: dict :param headers: Headers to pass in the web request :type query_args: string :param query_args: Arguments to pass in the query string (ie, 'torrent') :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying GET. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. """ if self.resp is None: self.mode = 'r' provider = self.bucket.connection.provider self.resp = self.bucket.connection.make_request( 'GET', self.bucket.name, self.name, headers, query_args=query_args, override_num_retries=override_num_retries) if self.resp.status < 199 or self.resp.status > 299: body = self.resp.read() raise provider.storage_response_error(self.resp.status, self.resp.reason, body) response_headers = self.resp.msg self.metadata = boto.utils.get_aws_metadata(response_headers, provider) for name, value in response_headers.items(): # To get correct size for Range GETs, use Content-Range # header if one was returned. If not, use Content-Length # header. if (name.lower() == 'content-length' and 'Content-Range' not in response_headers): self.size = int(value) elif name.lower() == 'content-range': end_range = re.sub('.*/(.*)', '\\1', value) self.size = int(end_range) elif name.lower() in Key.base_fields: self.__dict__[name.lower().replace('-', '_')] = value self.handle_version_headers(self.resp) self.handle_encryption_headers(self.resp) self.handle_restore_headers(self.resp) self.handle_addl_headers(self.resp.getheaders()) def open_write(self, headers=None, override_num_retries=None): """ Open this key for writing. Not yet implemented :type headers: dict :param headers: Headers to pass in the write request :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying PUT. """ raise BotoClientError('Not Implemented') def open(self, mode='r', headers=None, query_args=None, override_num_retries=None): if mode == 'r': self.mode = 'r' self.open_read(headers=headers, query_args=query_args, override_num_retries=override_num_retries) elif mode == 'w': self.mode = 'w' self.open_write(headers=headers, override_num_retries=override_num_retries) else: raise BotoClientError('Invalid mode: %s' % mode) closed = False def close(self, fast=False): """ Close this key. :type fast: bool :param fast: True if you want the connection to be closed without first reading the content. This should only be used in cases where subsequent calls don't need to return the content from the open HTTP connection. Note: As explained at http://docs.python.org/2/library/httplib.html#httplib.HTTPConnection.getresponse, callers must read the whole response before sending a new request to the server. Calling Key.close(fast=True) and making a subsequent request to the server will work because boto will get an httplib exception and close/reopen the connection. """ if self.resp and not fast: self.resp.read() self.resp = None self.mode = None self.closed = True def next(self): """ By providing a next method, the key object supports use as an iterator. For example, you can now say: for bytes in key: write bytes to a file or whatever All of the HTTP connection stuff is handled for you. """ self.open_read() data = self.resp.read(self.BufferSize) if not data: self.close() raise StopIteration return data # Python 3 iterator support __next__ = next def read(self, size=0): self.open_read() if size == 0: data = self.resp.read() else: data = self.resp.read(size) if not data: self.close() return data def change_storage_class(self, new_storage_class, dst_bucket=None, validate_dst_bucket=True): """ Change the storage class of an existing key. Depending on whether a different destination bucket is supplied or not, this will either move the item within the bucket, preserving all metadata and ACL info bucket changing the storage class or it will copy the item to the provided destination bucket, also preserving metadata and ACL info. :type new_storage_class: string :param new_storage_class: The new storage class for the Key. Possible values are: * STANDARD * REDUCED_REDUNDANCY :type dst_bucket: string :param dst_bucket: The name of a destination bucket. If not provided the current bucket of the key will be used. :type validate_dst_bucket: bool :param validate_dst_bucket: If True, will validate the dst_bucket by using an extra list request. """ bucket_name = dst_bucket or self.bucket.name if new_storage_class == 'STANDARD': return self.copy(bucket_name, self.name, reduced_redundancy=False, preserve_acl=True, validate_dst_bucket=validate_dst_bucket) elif new_storage_class == 'REDUCED_REDUNDANCY': return self.copy(bucket_name, self.name, reduced_redundancy=True, preserve_acl=True, validate_dst_bucket=validate_dst_bucket) else: raise BotoClientError('Invalid storage class: %s' % new_storage_class) def copy(self, dst_bucket, dst_key, metadata=None, reduced_redundancy=False, preserve_acl=False, encrypt_key=False, validate_dst_bucket=True): """ Copy this Key to another bucket. :type dst_bucket: string :param dst_bucket: The name of the destination bucket :type dst_key: string :param dst_key: The name of the destination key :type metadata: dict :param metadata: Metadata to be associated with new key. If metadata is supplied, it will replace the metadata of the source key being copied. If no metadata is supplied, the source key's metadata will be copied to the new key. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will force the storage class of the new Key to be REDUCED_REDUNDANCY regardless of the storage class of the key being copied. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type preserve_acl: bool :param preserve_acl: If True, the ACL from the source key will be copied to the destination key. If False, the destination key will have the default ACL. Note that preserving the ACL in the new key object will require two additional API calls to S3, one to retrieve the current ACL and one to set that ACL on the new object. If you don't care about the ACL, a value of False will be significantly more efficient. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :type validate_dst_bucket: bool :param validate_dst_bucket: If True, will validate the dst_bucket by using an extra list request. :rtype: :class:`boto.s3.key.Key` or subclass :returns: An instance of the newly created key object """ dst_bucket = self.bucket.connection.lookup(dst_bucket, validate_dst_bucket) if reduced_redundancy: storage_class = 'REDUCED_REDUNDANCY' else: storage_class = self.storage_class return dst_bucket.copy_key(dst_key, self.bucket.name, self.name, metadata, storage_class=storage_class, preserve_acl=preserve_acl, encrypt_key=encrypt_key, src_version_id=self.version_id) def startElement(self, name, attrs, connection): if name == 'Owner': self.owner = User(self) return self.owner else: return None def endElement(self, name, value, connection): if name == 'Key': self.name = value elif name == 'ETag': self.etag = value elif name == 'IsLatest': if value == 'true': self.is_latest = True else: self.is_latest = False elif name == 'LastModified': self.last_modified = value elif name == 'Size': self.size = int(value) elif name == 'StorageClass': self.storage_class = value elif name == 'Owner': pass elif name == 'VersionId': self.version_id = value else: setattr(self, name, value) def exists(self, headers=None): """ Returns True if the key exists :rtype: bool :return: Whether the key exists on S3 """ return bool(self.bucket.lookup(self.name, headers=headers)) def delete(self, headers=None): """ Delete this key from S3 """ return self.bucket.delete_key(self.name, version_id=self.version_id, headers=headers) def get_metadata(self, name): return self.metadata.get(name) def set_metadata(self, name, value): # Ensure that metadata that is vital to signing is in the correct # case. Applies to ``Content-Type`` & ``Content-MD5``. if name.lower() == 'content-type': self.metadata['Content-Type'] = value elif name.lower() == 'content-md5': self.metadata['Content-MD5'] = value else: self.metadata[name] = value if name.lower() in Key.base_user_settable_fields: self.__dict__[name.lower().replace('-', '_')] = value def update_metadata(self, d): self.metadata.update(d) # convenience methods for setting/getting ACL def set_acl(self, acl_str, headers=None): if self.bucket is not None: self.bucket.set_acl(acl_str, self.name, headers=headers) def get_acl(self, headers=None): if self.bucket is not None: return self.bucket.get_acl(self.name, headers=headers) def get_xml_acl(self, headers=None): if self.bucket is not None: return self.bucket.get_xml_acl(self.name, headers=headers) def set_xml_acl(self, acl_str, headers=None): if self.bucket is not None: return self.bucket.set_xml_acl(acl_str, self.name, headers=headers) def set_canned_acl(self, acl_str, headers=None): return self.bucket.set_canned_acl(acl_str, self.name, headers) def get_redirect(self): """Return the redirect location configured for this key. If no redirect is configured (via set_redirect), then None will be returned. """ response = self.bucket.connection.make_request( 'HEAD', self.bucket.name, self.name) if response.status == 200: return response.getheader('x-amz-website-redirect-location') else: raise self.provider.storage_response_error( response.status, response.reason, response.read()) def set_redirect(self, redirect_location, headers=None): """Configure this key to redirect to another location. When the bucket associated with this key is accessed from the website endpoint, a 301 redirect will be issued to the specified `redirect_location`. :type redirect_location: string :param redirect_location: The location to redirect. """ if headers is None: headers = {} else: headers = headers.copy() headers['x-amz-website-redirect-location'] = redirect_location response = self.bucket.connection.make_request('PUT', self.bucket.name, self.name, headers) if response.status == 200: return True else: raise self.provider.storage_response_error( response.status, response.reason, response.read()) def make_public(self, headers=None): return self.bucket.set_canned_acl('public-read', self.name, headers) def generate_url(self, expires_in, method='GET', headers=None, query_auth=True, force_http=False, response_headers=None, expires_in_absolute=False, version_id=None, policy=None, reduced_redundancy=False, encrypt_key=False): """ Generate a URL to access this key. :type expires_in: int :param expires_in: How long the url is valid for, in seconds. :type method: string :param method: The method to use for retrieving the file (default is GET). :type headers: dict :param headers: Any headers to pass along in the request. :type query_auth: bool :param query_auth: If True, signs the request in the URL. :type force_http: bool :param force_http: If True, http will be used instead of https. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type expires_in_absolute: bool :param expires_in_absolute: :type version_id: string :param version_id: The version_id of the object to GET. If specified this overrides any value in the key. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :rtype: string :return: The URL to access the key """ provider = self.bucket.connection.provider version_id = version_id or self.version_id if headers is None: headers = {} else: headers = headers.copy() # add headers accordingly (usually PUT case) if policy: headers[provider.acl_header] = policy if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDANCY' if provider.storage_class_header: headers[provider.storage_class_header] = self.storage_class if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' headers = boto.utils.merge_meta(headers, self.metadata, provider) return self.bucket.connection.generate_url(expires_in, method, self.bucket.name, self.name, headers, query_auth, force_http, response_headers, expires_in_absolute, version_id) def send_file(self, fp, headers=None, cb=None, num_cb=10, query_args=None, chunked_transfer=False, size=None): """ Upload a file to a key into a bucket on S3. :type fp: file :param fp: The file pointer to upload. The file pointer must point at the offset from which you wish to upload. ie. if uploading the full file, it should point at the start of the file. Normally when a file is opened for reading, the fp will point at the first byte. See the bytes parameter below for more info. :type headers: dict :param headers: The headers to pass along with the PUT request :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. Providing a negative integer will cause your callback to be called with each buffer read. :type query_args: string :param query_args: (optional) Arguments to pass in the query string. :type chunked_transfer: boolean :param chunked_transfer: (optional) If true, we use chunked Transfer-Encoding. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where you are splitting the file up into different ranges to be uploaded. If not specified, the default behaviour is to read all bytes from the file pointer. Less bytes may be available. """ self._send_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, query_args=query_args, chunked_transfer=chunked_transfer, size=size) def _send_file_internal(self, fp, headers=None, cb=None, num_cb=10, query_args=None, chunked_transfer=False, size=None, hash_algs=None): provider = self.bucket.connection.provider try: spos = fp.tell() except IOError: spos = None self.read_from_stream = False # If hash_algs is unset and the MD5 hasn't already been computed, # default to an MD5 hash_alg to hash the data on-the-fly. if hash_algs is None and not self.md5: hash_algs = {'md5': md5} digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {}) def sender(http_conn, method, path, data, headers): # This function is called repeatedly for temporary retries # so we must be sure the file pointer is pointing at the # start of the data. if spos is not None and spos != fp.tell(): fp.seek(spos) elif spos is None and self.read_from_stream: # if seek is not supported, and we've read from this # stream already, then we need to abort retries to # avoid setting bad data. raise provider.storage_data_error( 'Cannot retry failed request. fp does not support seeking.') # If the caller explicitly specified host header, tell putrequest # not to add a second host header. Similarly for accept-encoding. skips = {} if boto.utils.find_matching_headers('host', headers): skips['skip_host'] = 1 if boto.utils.find_matching_headers('accept-encoding', headers): skips['skip_accept_encoding'] = 1 http_conn.putrequest(method, path, **skips) for key in headers: http_conn.putheader(key, headers[key]) http_conn.endheaders() save_debug = self.bucket.connection.debug self.bucket.connection.debug = 0 # If the debuglevel < 4 we don't want to show connection # payload, so turn off HTTP connection-level debug output (to # be restored below). # Use the getattr approach to allow this to work in AppEngine. if getattr(http_conn, 'debuglevel', 0) < 4: http_conn.set_debuglevel(0) data_len = 0 if cb: if size: cb_size = size elif self.size: cb_size = self.size else: cb_size = 0 if chunked_transfer and cb_size == 0: # For chunked Transfer, we call the cb for every 1MB # of data transferred, except when we know size. cb_count = (1024 * 1024) / self.BufferSize elif num_cb > 1: cb_count = int( math.ceil(cb_size / self.BufferSize / (num_cb - 1.0))) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = 0 cb(data_len, cb_size) bytes_togo = size if bytes_togo and bytes_togo < self.BufferSize: chunk = fp.read(bytes_togo) else: chunk = fp.read(self.BufferSize) if not isinstance(chunk, bytes): chunk = chunk.encode('utf-8') if spos is None: # read at least something from a non-seekable fp. self.read_from_stream = True while chunk: chunk_len = len(chunk) data_len += chunk_len if chunked_transfer: http_conn.send('%x;\r\n' % chunk_len) http_conn.send(chunk) http_conn.send('\r\n') else: http_conn.send(chunk) for alg in digesters: digesters[alg].update(chunk) if bytes_togo: bytes_togo -= chunk_len if bytes_togo <= 0: break if cb: i += 1 if i == cb_count or cb_count == -1: cb(data_len, cb_size) i = 0 if bytes_togo and bytes_togo < self.BufferSize: chunk = fp.read(bytes_togo) else: chunk = fp.read(self.BufferSize) if not isinstance(chunk, bytes): chunk = chunk.encode('utf-8') self.size = data_len for alg in digesters: self.local_hashes[alg] = digesters[alg].digest() if chunked_transfer: http_conn.send('0\r\n') # http_conn.send("Content-MD5: %s\r\n" % self.base64md5) http_conn.send('\r\n') if cb and (cb_count <= 1 or i > 0) and data_len > 0: cb(data_len, cb_size) http_conn.set_debuglevel(save_debug) self.bucket.connection.debug = save_debug response = http_conn.getresponse() body = response.read() if not self.should_retry(response, chunked_transfer): raise provider.storage_response_error( response.status, response.reason, body) return response if not headers: headers = {} else: headers = headers.copy() # Overwrite user-supplied user-agent. for header in find_matching_headers('User-Agent', headers): del headers[header] headers['User-Agent'] = UserAgent # If storage_class is None, then a user has not explicitly requested # a storage class, so we can assume STANDARD here if self._storage_class not in [None, 'STANDARD']: headers[provider.storage_class_header] = self.storage_class if find_matching_headers('Content-Encoding', headers): self.content_encoding = merge_headers_by_name( 'Content-Encoding', headers) if find_matching_headers('Content-Language', headers): self.content_language = merge_headers_by_name( 'Content-Language', headers) content_type_headers = find_matching_headers('Content-Type', headers) if content_type_headers: # Some use cases need to suppress sending of the Content-Type # header and depend on the receiving server to set the content # type. This can be achieved by setting headers['Content-Type'] # to None when calling this method. if (len(content_type_headers) == 1 and headers[content_type_headers[0]] is None): # Delete null Content-Type value to skip sending that header. del headers[content_type_headers[0]] else: self.content_type = merge_headers_by_name( 'Content-Type', headers) elif self.path: self.content_type = mimetypes.guess_type(self.path)[0] if self.content_type is None: self.content_type = self.DefaultContentType headers['Content-Type'] = self.content_type else: headers['Content-Type'] = self.content_type if self.base64md5: headers['Content-MD5'] = self.base64md5 if chunked_transfer: headers['Transfer-Encoding'] = 'chunked' #if not self.base64md5: # headers['Trailer'] = "Content-MD5" else: headers['Content-Length'] = str(self.size) # This is terrible. We need a SHA256 of the body for SigV4, but to do # the chunked ``sender`` behavior above, the ``fp`` isn't available to # the auth mechanism (because closures). Detect if it's SigV4 & embelish # while we can before the auth calculations occur. if 'hmac-v4-s3' in self.bucket.connection._required_auth_capability(): kwargs = {'fp': fp, 'hash_algorithm': hashlib.sha256} if size is not None: kwargs['size'] = size headers['_sha256'] = compute_hash(**kwargs)[0] headers['Expect'] = '100-Continue' headers = boto.utils.merge_meta(headers, self.metadata, provider) resp = self.bucket.connection.make_request( 'PUT', self.bucket.name, self.name, headers, sender=sender, query_args=query_args ) self.handle_version_headers(resp, force=True) self.handle_addl_headers(resp.getheaders()) def should_retry(self, response, chunked_transfer=False): provider = self.bucket.connection.provider if not chunked_transfer: if response.status in [500, 503]: # 500 & 503 can be plain retries. return True if response.getheader('location'): # If there's a redirect, plain retry. return True if 200 <= response.status <= 299: self.etag = response.getheader('etag') md5 = self.md5 if isinstance(md5, bytes): md5 = md5.decode('utf-8') # If you use customer-provided encryption keys, the ETag value that # Amazon S3 returns in the response will not be the MD5 of the # object. server_side_encryption_customer_algorithm = response.getheader( 'x-amz-server-side-encryption-customer-algorithm', None) if server_side_encryption_customer_algorithm is None: if self.etag != '"%s"' % md5: raise provider.storage_data_error( 'ETag from S3 did not match computed MD5. ' '%s vs. %s' % (self.etag, self.md5)) return True if response.status == 400: # The 400 must be trapped so the retry handler can check to # see if it was a timeout. # If ``RequestTimeout`` is present, we'll retry. Otherwise, bomb # out. body = response.read() err = provider.storage_response_error( response.status, response.reason, body ) if err.error_code in ['RequestTimeout']: raise PleaseRetryException( "Saw %s, retrying" % err.error_code, response=response ) return False def compute_md5(self, fp, size=None): """ :type fp: file :param fp: File pointer to the file to MD5 hash. The file pointer will be reset to the same position before the method returns. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where the file is being split in place into different parts. Less bytes may be available. """ hex_digest, b64_digest, data_size = compute_md5(fp, size=size) # Returned values are MD5 hash, base64 encoded MD5 hash, and data size. # The internal implementation of compute_md5() needs to return the # data size but we don't want to return that value to the external # caller because it changes the class interface (i.e. it might # break some code) so we consume the third tuple value here and # return the remainder of the tuple to the caller, thereby preserving # the existing interface. self.size = data_size return (hex_digest, b64_digest) def set_contents_from_stream(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, reduced_redundancy=False, query_args=None, size=None): """ Store an object using the name of the Key object as the key in cloud and the contents of the data stream pointed to by 'fp' as the contents. The stream object is not seekable and total size is not known. This has the implication that we can't specify the Content-Size and Content-MD5 in the header. So for huge uploads, the delay in calculating MD5 is avoided but with a penalty of inability to verify the integrity of the uploaded data. :type fp: file :param fp: the file whose contents are to be uploaded :type headers: dict :param headers: additional HTTP headers to be sent with the PUT request. :type replace: bool :param replace: If this parameter is False, the method will first check to see if an object exists in the bucket with the same key. If it does, it won't overwrite it. The default value is True which will overwrite the object. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to GS and the second representing the total number of bytes that need to be transmitted. :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter, this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.gs.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in GS. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where you are splitting the file up into different ranges to be uploaded. If not specified, the default behaviour is to read all bytes from the file pointer. Less bytes may be available. """ provider = self.bucket.connection.provider if not provider.supports_chunked_transfer(): raise BotoClientError('%s does not support chunked transfer' % provider.get_provider_name()) # Name of the Object should be specified explicitly for Streams. if not self.name or self.name == '': raise BotoClientError('Cannot determine the destination ' 'object name for the given stream') if headers is None: headers = {} if policy: headers[provider.acl_header] = policy if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDANCY' if provider.storage_class_header: headers[provider.storage_class_header] = self.storage_class if self.bucket is not None: if not replace: if self.bucket.lookup(self.name): return self.send_file(fp, headers, cb, num_cb, query_args, chunked_transfer=True, size=size) def set_contents_from_file(self, fp, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, query_args=None, encrypt_key=False, size=None, rewind=False): """ Store an object in S3 using the name of the Key object as the key in S3 and the contents of the file pointed to by 'fp' as the contents. The data is read from 'fp' from its current position until 'size' bytes have been read or EOF. :type fp: file :param fp: the file whose contents to upload :type headers: dict :param headers: Additional HTTP headers that will be sent with the PUT request. :type replace: bool :param replace: If this parameter is False, the method will first check to see if an object exists in the bucket with the same key. If it does, it won't overwrite it. The default value is True which will overwrite the object. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type md5: A tuple containing the hexdigest version of the MD5 checksum of the file as the first element and the Base64-encoded version of the plain checksum as the second element. This is the same format returned by the compute_md5 method. :param md5: If you need to compute the MD5 for any reason prior to upload, it's silly to have to do it twice so this param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :type size: int :param size: (optional) The Maximum number of bytes to read from the file pointer (fp). This is useful when uploading a file in multiple parts where you are splitting the file up into different ranges to be uploaded. If not specified, the default behaviour is to read all bytes from the file pointer. Less bytes may be available. :type rewind: bool :param rewind: (optional) If True, the file pointer (fp) will be rewound to the start before any bytes are read from it. The default behaviour is False which reads from the current position of the file pointer (fp). :rtype: int :return: The number of bytes written to the key. """ provider = self.bucket.connection.provider headers = headers or {} if policy: headers[provider.acl_header] = policy if encrypt_key: headers[provider.server_side_encryption_header] = 'AES256' if rewind: # caller requests reading from beginning of fp. fp.seek(0, os.SEEK_SET) else: # The following seek/tell/seek logic is intended # to detect applications using the older interface to # set_contents_from_file(), which automatically rewound the # file each time the Key was reused. This changed with commit # 14ee2d03f4665fe20d19a85286f78d39d924237e, to support uploads # split into multiple parts and uploaded in parallel, and at # the time of that commit this check was added because otherwise # older programs would get a success status and upload an empty # object. Unfortuantely, it's very inefficient for fp's implemented # by KeyFile (used, for example, by gsutil when copying between # providers). So, we skip the check for the KeyFile case. # TODO: At some point consider removing this seek/tell/seek # logic, after enough time has passed that it's unlikely any # programs remain that assume the older auto-rewind interface. if not isinstance(fp, KeyFile): spos = fp.tell() fp.seek(0, os.SEEK_END) if fp.tell() == spos: fp.seek(0, os.SEEK_SET) if fp.tell() != spos: # Raise an exception as this is likely a programming # error whereby there is data before the fp but nothing # after it. fp.seek(spos) raise AttributeError('fp is at EOF. Use rewind option ' 'or seek() to data start.') # seek back to the correct position. fp.seek(spos) if reduced_redundancy: self.storage_class = 'REDUCED_REDUNDANCY' if provider.storage_class_header: headers[provider.storage_class_header] = self.storage_class # TODO - What if provider doesn't support reduced reduncancy? # What if different providers provide different classes? if hasattr(fp, 'name'): self.path = fp.name if self.bucket is not None: if not md5 and provider.supports_chunked_transfer(): # defer md5 calculation to on the fly and # we don't know anything about size yet. chunked_transfer = True self.size = None else: chunked_transfer = False if isinstance(fp, KeyFile): # Avoid EOF seek for KeyFile case as it's very inefficient. key = fp.getkey() size = key.size - fp.tell() self.size = size # At present both GCS and S3 use MD5 for the etag for # non-multipart-uploaded objects. If the etag is 32 hex # chars use it as an MD5, to avoid having to read the file # twice while transferring. if (re.match('^"[a-fA-F0-9]{32}"$', key.etag)): etag = key.etag.strip('"') md5 = (etag, base64.b64encode(binascii.unhexlify(etag))) if not md5: # compute_md5() and also set self.size to actual # size of the bytes read computing the md5. md5 = self.compute_md5(fp, size) # adjust size if required size = self.size elif size: self.size = size else: # If md5 is provided, still need to size so # calculate based on bytes to end of content spos = fp.tell() fp.seek(0, os.SEEK_END) self.size = fp.tell() - spos fp.seek(spos) size = self.size self.md5 = md5[0] self.base64md5 = md5[1] if self.name is None: self.name = self.md5 if not replace: if self.bucket.lookup(self.name): return self.send_file(fp, headers=headers, cb=cb, num_cb=num_cb, query_args=query_args, chunked_transfer=chunked_transfer, size=size) # return number of bytes written. return self.size def set_contents_from_filename(self, filename, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False): """ Store an object in S3 using the name of the Key object as the key in S3 and the contents of the file named by 'filename'. See set_contents_from_file method for details about the parameters. :type filename: string :param filename: The name of the file that you want to put onto S3 :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type replace: bool :param replace: If True, replaces the contents of the file if it already exists. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type md5: A tuple containing the hexdigest version of the MD5 checksum of the file as the first element and the Base64-encoded version of the plain checksum as the second element. This is the same format returned by the compute_md5 method. :param md5: If you need to compute the MD5 for any reason prior to upload, it's silly to have to do it twice so this param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. :rtype: int :return: The number of bytes written to the key. """ with open(filename, 'rb') as fp: return self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key) def set_contents_from_string(self, string_data, headers=None, replace=True, cb=None, num_cb=10, policy=None, md5=None, reduced_redundancy=False, encrypt_key=False): """ Store an object in S3 using the name of the Key object as the key in S3 and the string 's' as the contents. See set_contents_from_file method for details about the parameters. :type headers: dict :param headers: Additional headers to pass along with the request to AWS. :type replace: bool :param replace: If True, replaces the contents of the file if it already exists. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type num_cb: int :param num_cb: (optional) If a callback is specified with the num_cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type policy: :class:`boto.s3.acl.CannedACLStrings` :param policy: A canned ACL policy that will be applied to the new key in S3. :type md5: A tuple containing the hexdigest version of the MD5 checksum of the file as the first element and the Base64-encoded version of the plain checksum as the second element. This is the same format returned by the compute_md5 method. :param md5: If you need to compute the MD5 for any reason prior to upload, it's silly to have to do it twice so this param, if present, will be used as the MD5 values of the file. Otherwise, the checksum will be computed. :type reduced_redundancy: bool :param reduced_redundancy: If True, this will set the storage class of the new Key to be REDUCED_REDUNDANCY. The Reduced Redundancy Storage (RRS) feature of S3, provides lower redundancy at lower storage cost. :type encrypt_key: bool :param encrypt_key: If True, the new copy of the object will be encrypted on the server-side by S3 and will be stored in an encrypted form while at rest in S3. """ if not isinstance(string_data, bytes): string_data = string_data.encode("utf-8") fp = BytesIO(string_data) r = self.set_contents_from_file(fp, headers, replace, cb, num_cb, policy, md5, reduced_redundancy, encrypt_key=encrypt_key) fp.close() return r def get_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, override_num_retries=None, response_headers=None): """ Retrieves a file from an S3 Key :type fp: file :param fp: File pointer to put the data into :type headers: string :param: headers to send when retrieving the files :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: Flag for whether to get a torrent for the file :type override_num_retries: int :param override_num_retries: If not None will override configured num_retries parameter for underlying GET. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. """ self._get_file_internal(fp, headers=headers, cb=cb, num_cb=num_cb, torrent=torrent, version_id=version_id, override_num_retries=override_num_retries, response_headers=response_headers, hash_algs=None, query_args=None) def _get_file_internal(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, override_num_retries=None, response_headers=None, hash_algs=None, query_args=None): if headers is None: headers = {} save_debug = self.bucket.connection.debug if self.bucket.connection.debug == 1: self.bucket.connection.debug = 0 query_args = query_args or [] if torrent: query_args.append('torrent') if hash_algs is None and not torrent: hash_algs = {'md5': md5} digesters = dict((alg, hash_algs[alg]()) for alg in hash_algs or {}) # If a version_id is passed in, use that. If not, check to see # if the Key object has an explicit version_id and, if so, use that. # Otherwise, don't pass a version_id query param. if version_id is None: version_id = self.version_id if version_id: query_args.append('versionId=%s' % version_id) if response_headers: for key in response_headers: query_args.append('%s=%s' % ( key, urllib.parse.quote(response_headers[key]))) query_args = '&'.join(query_args) self.open('r', headers, query_args=query_args, override_num_retries=override_num_retries) data_len = 0 if cb: if self.size is None: cb_size = 0 else: cb_size = self.size if self.size is None and num_cb != -1: # If size is not available due to chunked transfer for example, # we'll call the cb for every 1MB of data transferred. cb_count = (1024 * 1024) / self.BufferSize elif num_cb > 1: cb_count = int(math.ceil(cb_size/self.BufferSize/(num_cb-1.0))) elif num_cb < 0: cb_count = -1 else: cb_count = 0 i = 0 cb(data_len, cb_size) try: for bytes in self: fp.write(bytes) data_len += len(bytes) for alg in digesters: digesters[alg].update(bytes) if cb: if cb_size > 0 and data_len >= cb_size: break i += 1 if i == cb_count or cb_count == -1: cb(data_len, cb_size) i = 0 except IOError as e: if e.errno == errno.ENOSPC: raise StorageDataError('Out of space for destination file ' '%s' % fp.name) raise if cb and (cb_count <= 1 or i > 0) and data_len > 0: cb(data_len, cb_size) for alg in digesters: self.local_hashes[alg] = digesters[alg].digest() if self.size is None and not torrent and "Range" not in headers: self.size = data_len self.close() self.bucket.connection.debug = save_debug def get_torrent_file(self, fp, headers=None, cb=None, num_cb=10): """ Get a torrent file (see to get_file) :type fp: file :param fp: The file pointer of where to put the torrent :type headers: dict :param headers: Headers to be passed :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. """ return self.get_file(fp, headers, cb, num_cb, torrent=True) def get_contents_to_file(self, fp, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=None): """ Retrieve an object from S3 using the name of the Key object as the key in S3. Write the contents of the object to the file pointed to by 'fp'. :type fp: File -like object :param fp: :type headers: dict :param headers: additional HTTP headers that will be sent with the GET request. :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the<|fim▁hole|> :type torrent: bool :param torrent: If True, returns the contents of a torrent file as a string. :type res_upload_handler: ResumableDownloadHandler :param res_download_handler: If provided, this handler will perform the download. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. """ if self.bucket is not None: if res_download_handler: res_download_handler.get_file(self, fp, headers, cb, num_cb, torrent=torrent, version_id=version_id) else: self.get_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers) def get_contents_to_filename(self, filename, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, res_download_handler=None, response_headers=None): """ Retrieve an object from S3 using the name of the Key object as the key in S3. Store contents of the object to a file named by 'filename'. See get_contents_to_file method for details about the parameters. :type filename: string :param filename: The filename of where to put the file contents :type headers: dict :param headers: Any additional headers to send in the request :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type num_cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: If True, returns the contents of a torrent file as a string. :type res_upload_handler: ResumableDownloadHandler :param res_download_handler: If provided, this handler will perform the download. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. """ try: with open(filename, 'wb') as fp: self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, res_download_handler=res_download_handler, response_headers=response_headers) except Exception: os.remove(filename) raise # if last_modified date was sent from s3, try to set file's timestamp if self.last_modified is not None: try: modified_tuple = email.utils.parsedate_tz(self.last_modified) modified_stamp = int(email.utils.mktime_tz(modified_tuple)) os.utime(fp.name, (modified_stamp, modified_stamp)) except Exception: pass def get_contents_as_string(self, headers=None, cb=None, num_cb=10, torrent=False, version_id=None, response_headers=None, encoding=None): """ Retrieve an object from S3 using the name of the Key object as the key in S3. Return the contents of the object as a string. See get_contents_to_file method for details about the parameters. :type headers: dict :param headers: Any additional headers to send in the request :type cb: function :param cb: a callback function that will be called to report progress on the upload. The callback should accept two integer parameters, the first representing the number of bytes that have been successfully transmitted to S3 and the second representing the size of the to be transmitted object. :type cb: int :param num_cb: (optional) If a callback is specified with the cb parameter this parameter determines the granularity of the callback by defining the maximum number of times the callback will be called during the file transfer. :type torrent: bool :param torrent: If True, returns the contents of a torrent file as a string. :type response_headers: dict :param response_headers: A dictionary containing HTTP headers/values that will override any headers associated with the stored object in the response. See http://goo.gl/EWOPb for details. :type version_id: str :param version_id: The ID of a particular version of the object. If this parameter is not supplied but the Key object has a ``version_id`` attribute, that value will be used when retrieving the object. You can set the Key object's ``version_id`` attribute to None to always grab the latest version from a version-enabled bucket. :type encoding: str :param encoding: The text encoding to use, such as ``utf-8`` or ``iso-8859-1``. If set, then a string will be returned. Defaults to ``None`` and returns bytes. :rtype: bytes or str :returns: The contents of the file as bytes or a string """ fp = BytesIO() self.get_contents_to_file(fp, headers, cb, num_cb, torrent=torrent, version_id=version_id, response_headers=response_headers) value = fp.getvalue() if encoding is not None: value = value.decode(encoding) return value def add_email_grant(self, permission, email_address, headers=None): """ Convenience method that provides a quick way to add an email grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT's the new ACL back to S3. :type permission: string :param permission: The permission being granted. Should be one of: (READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL). :type email_address: string :param email_address: The email address associated with the AWS account your are granting the permission to. :type recursive: boolean :param recursive: A boolean value to controls whether the command will apply the grant to all keys within the bucket or not. The default value is False. By passing a True value, the call will iterate through all keys in the bucket and apply the same grant to each key. CAUTION: If you have a lot of keys, this could take a long time! """ policy = self.get_acl(headers=headers) policy.acl.add_email_grant(permission, email_address) self.set_acl(policy, headers=headers) def add_user_grant(self, permission, user_id, headers=None, display_name=None): """ Convenience method that provides a quick way to add a canonical user grant to a key. This method retrieves the current ACL, creates a new grant based on the parameters passed in, adds that grant to the ACL and then PUT's the new ACL back to S3. :type permission: string :param permission: The permission being granted. Should be one of: (READ, WRITE, READ_ACP, WRITE_ACP, FULL_CONTROL). :type user_id: string :param user_id: The canonical user id associated with the AWS account your are granting the permission to. :type display_name: string :param display_name: An option string containing the user's Display Name. Only required on Walrus. """ policy = self.get_acl(headers=headers) policy.acl.add_user_grant(permission, user_id, display_name=display_name) self.set_acl(policy, headers=headers) def _normalize_metadata(self, metadata): if type(metadata) == set: norm_metadata = set() for k in metadata: norm_metadata.add(k.lower()) else: norm_metadata = {} for k in metadata: norm_metadata[k.lower()] = metadata[k] return norm_metadata def _get_remote_metadata(self, headers=None): """ Extracts metadata from existing URI into a dict, so we can overwrite/delete from it to form the new set of metadata to apply to a key. """ metadata = {} for underscore_name in self._underscore_base_user_settable_fields: if hasattr(self, underscore_name): value = getattr(self, underscore_name) if value: # Generate HTTP field name corresponding to "_" named field. field_name = underscore_name.replace('_', '-') metadata[field_name.lower()] = value # self.metadata contains custom metadata, which are all user-settable. prefix = self.provider.metadata_prefix for underscore_name in self.metadata: field_name = underscore_name.replace('_', '-') metadata['%s%s' % (prefix, field_name.lower())] = ( self.metadata[underscore_name]) return metadata def set_remote_metadata(self, metadata_plus, metadata_minus, preserve_acl, headers=None): metadata_plus = self._normalize_metadata(metadata_plus) metadata_minus = self._normalize_metadata(metadata_minus) metadata = self._get_remote_metadata() metadata.update(metadata_plus) for h in metadata_minus: if h in metadata: del metadata[h] src_bucket = self.bucket # Boto prepends the meta prefix when adding headers, so strip prefix in # metadata before sending back in to copy_key() call. rewritten_metadata = {} for h in metadata: if (h.startswith('x-goog-meta-') or h.startswith('x-amz-meta-')): rewritten_h = (h.replace('x-goog-meta-', '') .replace('x-amz-meta-', '')) else: rewritten_h = h rewritten_metadata[rewritten_h] = metadata[h] metadata = rewritten_metadata src_bucket.copy_key(self.name, self.bucket.name, self.name, metadata=metadata, preserve_acl=preserve_acl, headers=headers) def restore(self, days, headers=None): """Restore an object from an archive. :type days: int :param days: The lifetime of the restored object (must be at least 1 day). If the object is already restored then this parameter can be used to readjust the lifetime of the restored object. In this case, the days param is with respect to the initial time of the request. If the object has not been restored, this param is with respect to the completion time of the request. """ response = self.bucket.connection.make_request( 'POST', self.bucket.name, self.name, data=self.RestoreBody % days, headers=headers, query_args='restore') if response.status not in (200, 202): provider = self.bucket.connection.provider raise provider.storage_response_error(response.status, response.reason, response.read())<|fim▁end|>
callback will be called during the file transfer.
<|file_name|>meg.rs<|end_file_name|><|fim▁begin|>extern crate env_logger; extern crate rustc_serialize; extern crate toml; extern crate turbo; extern crate meg; extern crate term_painter; #[macro_use] extern crate log; use std::collections::BTreeSet; use std::env; use std::fs; use std::io; use std::path::{PathBuf, Path}; use std::process::Command; use turbo::turbo::{execute_main_without_stdin, handle_error, shell}; use turbo::core::MultiShell; use turbo::util::{CliError, CliResult, Config}; use meg::util::{lev_distance}; use self::term_painter::Color::*; use self::term_painter::ToStyle; #[derive(RustcDecodable)] #[derive(RustcEncodable)] struct Flags { flag_list: bool, flag_verbose: bool, arg_command: String, arg_args: Vec<String>, } const USAGE: &'static str = " Megam command line Usage: meg <command> [<args>...] meg [options] Options: -h, --help Display this message version Print version info and exit --list List installed commands -v, --verbose Use verbose output meg commands are: ahoy Ping the status of megam. account Create an account with megam. sshkey Create SSHKey with megam. csar Create apps/services & torpedos See 'meg help <command>' for more information on a specific command. "; fn main() { env_logger::init().unwrap(); execute_main_without_stdin(execute, true, USAGE); } macro_rules! each_subcommand{ ($mac:ident) => ({ $mac!(help); $mac!(ahoy); $mac!(account); $mac!(sshkey); $mac!(csar); $mac!(version); }) } /** The top-level `cargo` command handles configuration and project location because they are fundamental (and intertwined). Other commands can rely on this top-level information. */ fn execute(flags: Flags, config: &Config) -> CliResult<Option<()>> { config.shell().set_verbose(flags.flag_verbose); if flags.flag_list { println!("{}", Green.paint("Installed commands:")); for command in list_commands().into_iter() { println!("{}", command); }; return Ok(None) } let args = match &flags.arg_command[..] { // For the commands `meg` and `meg help`, re-execute ourselves as // `meg -h` so we can go through the normal process of printing the // help message. "" | "help" if flags.arg_args.is_empty() => { config.shell().set_verbose(true); let args = &["meg".to_string(), "-h".to_string()]; let r = turbo::turbo::call_main_without_stdin(execute, config, USAGE, args, false); turbo::turbo::process_executed(r, &mut config.shell()); return Ok(None) } // For `meg help -h` and `meg help --help`, print out the help // message for `meg help` "help" if flags.arg_args[0] == "-h" || flags.arg_args[0] == "--help" => { vec!["meg".to_string(), "help".to_string(), "-h".to_string()] } // For `meg help foo`, print out the usage message for the specified // subcommand by executing the command with the `-h` flag. "help" => { vec!["meg".to_string(), flags.arg_args[0].clone(), "-h".to_string()] } // For all other invocations, we're of the form `meg foo args...`. We // use the exact environment arguments to preserve tokens like `--` for // example. "account" if flags.arg_args.is_empty() => { config.shell().set_verbose(true); let args = &["meg".to_string(), "help".to_string(), "account".to_string()]; let r = turbo::turbo::call_main_without_stdin(execute, config, USAGE, args, false); turbo::turbo::process_executed(r, &mut config.shell()); return Ok(None) } "sshkey" if flags.arg_args.is_empty() => { config.shell().set_verbose(true); let args = &["meg".to_string(), "help".to_string(), "sshkey".to_string()]; let r = turbo::turbo::call_main_without_stdin(execute, config, USAGE, args, false); turbo::turbo::process_executed(r, &mut config.shell()); return Ok(None) } "csar" if flags.arg_args.is_empty() => { config.shell().set_verbose(true); let args = &["meg".to_string(), "help".to_string(), "csar".to_string()]; let r = turbo::turbo::call_main_without_stdin(execute, config, USAGE, args, false); turbo::turbo::process_executed(r, &mut config.shell()); return Ok(None) } _ => env::args().collect(), }; macro_rules! cmd{ ($name:ident) => ( if args[1] == stringify!($name).replace("_", "-") { mod $name; config.shell().set_verbose(true); let r = turbo::turbo::call_main_without_stdin($name::execute, config, $name::USAGE, &args, false); turbo::turbo::process_executed(r, &mut config.shell()); return Ok(None) } ) } each_subcommand!(cmd); execute_subcommand(&args[1], &args, &mut config.shell()); Ok(None) } fn find_closest(cmd: &str) -> Option<String> { let cmds = list_commands(); // Only consider candidates with a lev_distance of 3 or less so we don't // suggest out-of-the-blue options. let mut filtered = cmds.iter().map(|c| (lev_distance(&c, cmd), c)) .filter(|&(d, _)| d < 4) .collect::<Vec<_>>(); filtered.sort_by(|a, b| a.0.cmp(&b.0)); if filtered.len() == 0 { None } else { Some(filtered[0].1.to_string()) } } fn execute_subcommand(cmd: &str, args: &[String], shell: &mut MultiShell) { let command = match find_command(cmd) { Some(command) => command, None => { let msg = match find_closest(cmd) { Some(closest) => format!("No such subcommand\n\n\t\ Did you mean `{}`?\n", closest), None => "No such subcommand".to_string() }; return handle_error(CliError::new(&msg, 127), shell) } }; match Command::new(&command).args(&args[1..]).status() { Ok(ref status) if status.success() => {} Ok(ref status) => { match status.code() { Some(code) => handle_error(CliError::new("", code), shell), None => { let msg = format!("subcommand failed with: {}", status); handle_error(CliError::new(&msg, 101), shell) } } } Err(ref e) if e.kind() == io::ErrorKind::NotFound => { handle_error(CliError::new("No such subcommand", 127), shell) } Err(err) => { let msg = format!("Subcommand failed to run: {}", err); handle_error(CliError::new(&msg, 127), shell) } } } /// List all runnable commands. find_command should always succeed /// if given one of returned command. fn list_commands() -> BTreeSet<String> { let command_prefix = "meg-"; let mut commands = BTreeSet::new(); for dir in list_command_directory().iter() { let entries = match fs::read_dir(dir) { Ok(entries) => entries, _ => continue }; for entry in entries { let entry = match entry { Ok(e) => e, Err(..) => continue }; let entry = entry.path(); let filename = match entry.file_name().and_then(|s| s.to_str()) { Some(filename) => filename, _ => continue }; if filename.starts_with(command_prefix) && filename.ends_with(env::consts::EXE_SUFFIX) && is_executable(&entry) { let command = &filename[ command_prefix.len().. filename.len() - env::consts::EXE_SUFFIX.len()]; commands.insert(command.to_string()); } } } macro_rules! add_cmd{ ($cmd:ident) => ({ commands.insert(stringify!($cmd).replace("_", "-")); }) } each_subcommand!(add_cmd); commands } #[cfg(unix)] fn is_executable(path: &Path) -> bool { //use std::os::unix; //use std::sys::ext; //fs::metadata(path).map(|m| { // m.permissions() == 0o001 // }).unwrap_or(false) return true } #[cfg(windows)] fn is_executable(path: &Path) -> bool { fs::metadata(path).map(|m| m.is_file()).unwrap_or(false) } /// Get `Command` to run given command. fn find_command(cmd: &str) -> Option<PathBuf> { let command_exe = format!("meg-{}{}", cmd, env::consts::EXE_SUFFIX);<|fim▁hole|> /// List candidate locations where subcommands might be installed. fn list_command_directory() -> Vec<PathBuf> { let mut dirs = vec![]; if let Ok(mut path) = env::current_exe() { path.pop(); dirs.push(path.join("../lib/meg")); dirs.push(path); } if let Some(val) = env::var_os("PATH") { dirs.extend(env::split_paths(&val)); } dirs }<|fim▁end|>
let dirs = list_command_directory(); let mut command_paths = dirs.iter().map(|dir| dir.join(&command_exe)); command_paths.find(|path| fs::metadata(&path).is_ok()) }
<|file_name|>forms.py<|end_file_name|><|fim▁begin|># Author: Martin Oehler <[email protected]> 2013 # License: GPL V2 from django.forms import ModelForm from django.forms import Form from django.forms import ModelChoiceField from django.forms.widgets import RadioSelect from django.forms.widgets import CheckboxSelectMultiple from django.forms.widgets import TextInput from django.forms.widgets import Textarea from django.forms.widgets import DateInput from django.contrib.admin import widgets <|fim▁hole|>from linboweb.linboserver.models import client from linboweb.linboserver.models import clientGroup from linboweb.linboserver.models import pxelinuxcfg class partitionForm(ModelForm): class Meta: model = partition class partitionSelectionForm(ModelForm): class Meta: model = partitionSelection class osForm(ModelForm): partitionselection = ModelChoiceField(queryset=partitionSelection.objects.all()) class Meta: model = os class vmForm(ModelForm): class Meta: model = vm class clientForm(ModelForm): pxelinuxconfiguration = ModelChoiceField(queryset=pxelinuxcfg.objects.all()) class Meta: model = client class clientGroupForm(ModelForm): class Meta: model = clientGroup class pxelinuxcfgForm(ModelForm): class Meta: model = pxelinuxcfg widgets = { 'configuration': Textarea(attrs={'cols': 80, 'rows': 40}), }<|fim▁end|>
from linboweb.linboserver.models import partition from linboweb.linboserver.models import partitionSelection from linboweb.linboserver.models import os from linboweb.linboserver.models import vm
<|file_name|>checkpoint.go<|end_file_name|><|fim▁begin|>/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package checkpoint import ( "encoding/json" "fmt" "k8s.io/klog" "k8s.io/api/core/v1" "k8s.io/kubernetes/pkg/apis/core" "k8s.io/kubernetes/pkg/kubelet/checkpointmanager" "k8s.io/kubernetes/pkg/kubelet/checkpointmanager/checksum" ) const (<|fim▁hole|> delimiter = "_" podPrefix = "Pod" ) // PodCheckpoint defines the operations to retrieve pod type PodCheckpoint interface { checkpointmanager.Checkpoint GetPod() *v1.Pod } // Data to be stored as checkpoint type Data struct { Pod *v1.Pod Checksum checksum.Checksum } // NewPodCheckpoint returns new pod checkpoint func NewPodCheckpoint(pod *v1.Pod) PodCheckpoint { return &Data{Pod: pod} } // MarshalCheckpoint returns marshalled data func (cp *Data) MarshalCheckpoint() ([]byte, error) { cp.Checksum = checksum.New(*cp.Pod) return json.Marshal(*cp) } // UnmarshalCheckpoint returns unmarshalled data func (cp *Data) UnmarshalCheckpoint(blob []byte) error { return json.Unmarshal(blob, cp) } // VerifyChecksum verifies that passed checksum is same as calculated checksum func (cp *Data) VerifyChecksum() error { return cp.Checksum.Verify(*cp.Pod) } // GetPod retrieves the pod from the checkpoint func (cp *Data) GetPod() *v1.Pod { return cp.Pod } // checkAnnotations will validate the checkpoint annotations exist on the Pod func checkAnnotations(pod *v1.Pod) bool { if podAnnotations := pod.GetAnnotations(); podAnnotations != nil { if podAnnotations[core.BootstrapCheckpointAnnotationKey] == "true" { return true } } return false } //getPodKey returns the full qualified path for the pod checkpoint func getPodKey(pod *v1.Pod) string { return fmt.Sprintf("%s%s%v.yaml", podPrefix, delimiter, pod.GetUID()) } // LoadPods Loads All Checkpoints from disk func LoadPods(cpm checkpointmanager.CheckpointManager) ([]*v1.Pod, error) { pods := make([]*v1.Pod, 0) checkpointKeys, err := cpm.ListCheckpoints() if err != nil { klog.Errorf("Failed to list checkpoints: %v", err) } for _, key := range checkpointKeys { checkpoint := NewPodCheckpoint(nil) err := cpm.GetCheckpoint(key, checkpoint) if err != nil { klog.Errorf("Failed to retrieve checkpoint for pod %q: %v", key, err) continue } pods = append(pods, checkpoint.GetPod()) } return pods, nil } // WritePod a checkpoint to a file on disk if annotation is present func WritePod(cpm checkpointmanager.CheckpointManager, pod *v1.Pod) error { var err error if checkAnnotations(pod) { data := NewPodCheckpoint(pod) err = cpm.CreateCheckpoint(getPodKey(pod), data) } else { // This is to handle an edge where a pod update could remove // an annotation and the checkpoint should then be removed. err = cpm.RemoveCheckpoint(getPodKey(pod)) } return err } // DeletePod deletes a checkpoint from disk if present func DeletePod(cpm checkpointmanager.CheckpointManager, pod *v1.Pod) error { return cpm.RemoveCheckpoint(getPodKey(pod)) }<|fim▁end|>
// Delimiter used on checkpoints written to disk
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>//! Entity Component System Library (ECS) //! //! For info about why an ECS may be beneficial, see some of these articles: //! //! - http://gameprogrammingpatterns.com/component.html //! - http://t-machine.org/index.php/2007/09/03/entity-systems-are-the-future-of-mmog-development-part-1/ //! - http://www.gamedev.net/page/resources/_/technical/game-programming/understanding-component-entity-systems-r3013 //! - http://cowboyprogramming.com/2007/01/05/evolve-your-heirachy/ //! //! There is a large variety of ways an ECS may work. This particular one is similar to //! [Artemis](http://gamadu.com/artemis/). //! Although this isn't a port to Rust, most functionality should be similar, and the //! tutorials/manual there should be able to make up for the current lack of documentation [FIXME] //! //! Here's the basic structure: //! //! - An `Entity` is just an identifier. It contains no data or logic whatsoever. //! - A `Component` is a piece of data (eg: Position, Velocity, Colour). While containing logic can //! sometimes be useful, it's best practice to avoid it wherever possible. //! - A `System` runs all the logic. Most of the time, it filters out entities based on their //! components, and only runs it's logic on the entities it's interested in. These filters are //! called `Aspect`s. Some systems ignore entities, and just apply logic to the world itself. //! - An `Aspect` is a simple helper to filter entities based on their components. //! - The `World` organises all the above items together to make sure everything runs as it should. #![crate_name = "ecs"] #![crate_type = "lib"] #![cfg_attr(feature="nightly", feature(drain))] #[cfg(feature="serialisation")] #[macro_use] extern crate cereal; extern crate vec_map; pub use aspect::Aspect; pub use component::{Component, ComponentList}; pub use component::{EntityBuilder, EntityModifier}; pub use entity::{Entity, IndexedEntity, EntityIter}; pub use system::{System, Process}; pub use world::{ComponentManager, ServiceManager, SystemManager, DataHelper, World}; use std::ops::Deref; pub mod aspect; pub mod component; pub mod entity; pub mod system; pub mod world; pub struct BuildData<'a, T: ComponentManager>(&'a IndexedEntity<T>); pub struct ModifyData<'a, T: ComponentManager>(&'a IndexedEntity<T>); pub struct EntityData<'a, T: ComponentManager>(&'a IndexedEntity<T>); impl<'a, T: ComponentManager> Deref for EntityData<'a, T> { type Target = IndexedEntity<T>; fn deref(&self) -> &IndexedEntity<T> { &self.0 } } impl<'a, T: ComponentManager> Copy for BuildData<'a, T> {} impl<'a, T: ComponentManager> Copy for ModifyData<'a, T> {} impl<'a, T: ComponentManager> Copy for EntityData<'a, T> {} impl<'a, T: ComponentManager> Clone for BuildData<'a, T> { fn clone(&self) -> BuildData<'a, T> { *self } } impl<'a, T: ComponentManager> Clone for ModifyData<'a, T> { fn clone(&self) -> ModifyData<'a, T> { *self } } impl<'a, T: ComponentManager> Clone for EntityData<'a, T> { fn clone(&self) -> EntityData<'a, T> { *self } } #[doc(hidden)] pub trait EditData<T: ComponentManager> { fn entity(&self) -> &IndexedEntity<T>; } impl<'a, T: ComponentManager> EditData<T> for ModifyData<'a, T> { fn entity(&self) -> &IndexedEntity<T> { &self.0 } } impl<'a, T: ComponentManager> EditData<T> for EntityData<'a, T> { fn entity(&self) -> &IndexedEntity<T> { &self.0 } } // XXX: Eventually make these syntax extensions, once they are stabilised mod macros { #[macro_export] macro_rules! process { { $world:expr, $system:ident } => { $crate::Process::process(&mut $world.systems.$system, &mut $world.data) }; { $world:expr, $system:ident . $function:ident ($($args:expr),*) } => { $world.systems.$system.$function($($args,)* &mut $world.data) }; } #[macro_export] macro_rules! components { { $(#[$attr:meta])* struct $Name:ident; } => { $(#[$attr])* pub struct $Name; impl $crate::ComponentManager for $Name { fn __new() -> $Name { $Name } fn __remove_all(&mut self, _: &$crate::IndexedEntity<$Name>) { } } }; { #[builder($Builder:ident)] $(#[$attr:meta])* struct $Name:ident { $(#[$kind:ident] $field_name:ident : $field_ty:ty),+ } } => { components!($(#[$attr])* struct $Name { $(#[$kind] $field_name : $field_ty),+ }); #[derive(Default)] pub struct $Builder { $( pub $field_name : Option<$field_ty>, )+ } impl $crate::EntityBuilder<$Name> for $Builder { fn build(self, e: $crate::BuildData<$Name>, c: &mut $Name) { $( self.$field_name.map(|cmpt| c.$field_name.add(&e, cmpt)) );+; } } }; { $(#[$attr:meta])* struct $Name:ident { $(#[$kind:ident] $field_name:ident : $field_ty:ty),+ } } => { $(#[$attr])* pub struct $Name { $( pub $field_name : $crate::ComponentList<$Name, $field_ty>, )+ } impl $crate::ComponentManager for $Name { fn __new() -> $Name { $Name { $( $field_name : $crate::ComponentList::$kind() ),+ } } fn __remove_all(&mut self, entity: &$crate::IndexedEntity<$Name>) { $( self.$field_name.__clear(entity) );+ } } }; { #[builder($Builder:ident)] $(#[$attr:meta])* struct $Name:ident { $(#[$kind:ident] $field_name:ident : $field_ty:ty),+, } } => { components!( #[builder($Builder)] $(#[$attr])* struct $Name { $(#[$kind] $field_name : $field_ty),+ } ); }; { $(#[$attr:meta])* struct $Name:ident { $(#[$kind:ident] $field_name:ident : $field_ty:ty),+, } } => { components!( $(#[$attr])* struct $Name { $(#[$kind] $field_name : $field_ty),+ } ); }; } #[macro_export] macro_rules! systems { { $(#[$attr:meta])* struct $Name:ident<$components:ty, $services:ty>; } => { $(#[$attr])* pub struct $Name; impl $crate::SystemManager for $Name { type Components = $components; type Services = $services; fn __new() -> $Name { $Name } fn __activated(&mut self, _: $crate::EntityData<$components>, _: &$components, _: &mut $services) { } fn __reactivated(&mut self, _: $crate::EntityData<$components>, _: &$components, _: &mut $services) { } fn __deactivated(&mut self, _: $crate::EntityData<$components>, _: &$components, _: &mut $services) { } fn __update(&mut self, _: &mut $crate::DataHelper<$components, $services>) { } } }; { $(#[$attr:meta])* struct $Name:ident<$components:ty, $services:ty> { active: { $($field_name:ident : $field_ty:ty = $field_init:expr,)* }, passive: { $($p_field_name:ident : $p_field_ty:ty = $p_field_init:expr,)* } } } => { $(#[$attr])* pub struct $Name { $(pub $field_name : $field_ty,)* $(pub $p_field_name : $p_field_ty,)* } impl $crate::SystemManager for $Name { type Components = $components; type Services = $services; fn __new() -> $Name { $Name { $( $field_name : $field_init, )* $( $p_field_name : $p_field_init, )* } } fn __activated(&mut self, en: $crate::EntityData<$components>, co: &$components, se: &mut $services) { $( $crate::System::activated(&mut self.$field_name, &en, co, se); )* $( $crate::System::activated(&mut self.$p_field_name, &en, co, se); )* } fn __reactivated(&mut self, en: $crate::EntityData<$components>, co: &$components, se: &mut $services) { $( $crate::System::reactivated(&mut self.$field_name, &en, co, se); )* $( $crate::System::reactivated(&mut self.$p_field_name, &en, co, se); )* } fn __deactivated(&mut self, en: $crate::EntityData<$components>, co: &$components, se: &mut $services) { $( $crate::System::deactivated(&mut self.$field_name, &en, co, se); )* $( $crate::System::deactivated(&mut self.$p_field_name, &en, co, se); )* } fn __update(&mut self, _co: &mut $crate::DataHelper<$components, $services>) { $( $crate::Process::process(&mut self.$field_name, _co); )* } } }; } #[macro_export] macro_rules! aspect { { <$components:ty> all: [$($all_field:ident),*] none: [$($none_field:ident),*] } => { $crate::Aspect::__new(Box::new(|_en: &$crate::EntityData<$components>, _co: &$components| { ($(_co.$all_field.has(_en) &&)* true) && !($(_co.$none_field.has(_en) ||)* false) })) }; { <$components:ty> all: [$($field:ident),*] } => { aspect!( <$components> all: [$($field),*]<|fim▁hole|> }; { <$components:ty> none: [$($field:ident),*] } => { aspect!( <$components> all: [] none: [$($field),*] ) }; } }<|fim▁end|>
none: [] )
<|file_name|>ask.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnChanges, Input, Output, EventEmitter } from "ng-metadata/core" @Component({ selector: "ask-cmp", template: require('./askCmp.html') //WEBPACK MAGIC INLINE HTML }) export class AskComponent implements OnChanges { constructor() { this.req = this.req || "Q"; this.txt = this.val || ""; } @Output("onRes") public res = new EventEmitter<string>() //external event handler<|fim▁hole|> private txt: string;//internal text bound @Input("<") public msg: string; //placeholder text @Input("@") public req: string; //label / question @Input("=") public val: any; //external vm.name public undo() { this.txt = this.val || ""; } public okemit(txt) { this.res.emit(txt); //emit use RxJS } ngOnChanges() { //old life-cycle $onChanges() this.txt = this.val || ""; } }<|fim▁end|>
<|file_name|>_ticktextsrc.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators class TicktextsrcValidator(_plotly_utils.basevalidators.SrcValidator): def __init__(self, plotly_name="ticktextsrc", parent_name="layout.xaxis", **kwargs): super(TicktextsrcValidator, self).__init__( plotly_name=plotly_name, parent_name=parent_name, edit_type=kwargs.pop("edit_type", "none"), **kwargs<|fim▁hole|><|fim▁end|>
)
<|file_name|>10926.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h> using namespace std; vector<int> adj[143]; int n; bool seen[143]; int dfs(int u) { seen[u] = true; int result = 0; for (int v: adj[u]) if (!seen[v]) result += dfs(v) + 1; return result; } int main() { ios_base::sync_with_stdio(0);cin.tie(0); while (cin >> n && n) { for (int i=0, sz; i<n; ++i) { cin >> sz; adj[i].resize(sz); for (int j=0; j<sz; ++j) { cin >> adj[i][j]; --adj[i][j]; } } int mx=-1, mxi; for (int i=0; i<n; ++i) { memset(seen, 0, n); int c = dfs(i); if (mx < c) { mx = c; mxi = i; }<|fim▁hole|> } cout << mxi+1 << "\n"; } }<|fim▁end|>
<|file_name|>liquidwrapper.rs<|end_file_name|><|fim▁begin|>extern crate liquid; use std::fs::File; use std::io::Read; use templatewrapper::TemplateWrapper; pub struct LiquidTemplate { template: String,<|fim▁hole|>} impl TemplateWrapper for LiquidTemplate { fn new(template_file: String) -> LiquidTemplate { let mut template = String::new(); File::open(template_file).unwrap().read_to_string(&mut template); LiquidTemplate { template: template, } } } impl liquid::Renderable for LiquidTemplate { fn render(&self, context: &mut liquid::Context) -> Option<String> { let mut options : liquid::LiquidOptions = Default::default(); let parsed = liquid::parse(&self.template, &mut options).unwrap(); parsed.render(context) } }<|fim▁end|>
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>macro_rules! timeit { ($func:expr) => ({ let t1 = std::time::Instant::now(); println!("{:?}", $func); let t2 = std::time::Instant::now().duration_since(t1); println!("{}", t2.as_secs() as f64 + t2.subsec_nanos() as f64 / 1000000000.00); }) } fn main() { fn is_prime(n: usize) -> bool { let primes: Vec<usize> = vec![1, 2, 3, 5, 7]; match n < 1 || n % 2 == 0 || n % 3 == 0 || primes.iter().filter(|&x| *x == n).collect::<Vec<_>>().len() > 0 { true => return false, false => { let mut i = 5; loop { if i * i > n { break; } else { if n % i == 0 || n % (i + 2) == 0 { return false; } } i = i + 6; } } }<|fim▁hole|> true } fn get_prime(n: usize) -> usize { (12..) .filter(|&x| is_prime(x)) .take(n - 5) .max() .unwrap() } timeit!(get_prime(10001)) }<|fim▁end|>
<|file_name|>R114.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python3 # -*- coding: utf-8 -*- '''Pychemqt, Chemical Engineering Process simulator Copyright (C) 2009-2017, Juan José Gómez Romera <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.''' from lib import unidades from lib.meos import MEoS class R114(MEoS): """Multiparameter equation of state for R114""" name = "1,2-dichloro-1,1,2,2-tetrafluoroethane" CASNumber = "76-14-2" formula = "CClF2CClF2" synonym = "R114" _refPropName = "R114" _coolPropName = "R114" rhoc = unidades.Density(579.969) Tc = unidades.Temperature(418.83) Pc = unidades.Pressure(3257.0, "kPa") M = 170.921 # g/mol Tt = unidades.Temperature(180.63) Tb = unidades.Temperature(276.741) f_acent = 0.25253 momentoDipolar = unidades.DipoleMoment(0.658, "Debye") id = 231 f = 1/8.31451*170.93 CP1 = {"ao": 0.97651380e-1*f, "an": [0.3240861e-2*f, -0.5895364e-5*f, 0.6737929e-8*f, -0.3546364e-11*f], "pow": [1, 2, 3, 4]} platzer = { "__type__": "Helmholtz", "__name__": "Bender equation of state for R-114 of Platzer (1990)", "__doi__": {"autor": "Platzer, B., Polt, A., Maurer, G.", "title": "Thermophysical Properties of Refrigerants", "ref": "Berlin: Springer-Verlag, 1990.", "doi": ""}, "R": 8.31451, "cp": CP1, "ref": "NBP", "Tmin": 273.15, "Tmax": 507.0, "Pmax": 21000.0, "rhomax": 8.942, "nr1": [-0.340776521414, 0.323001398420, -0.424950537596e-1,<|fim▁hole|> 0.108043243088, 0.460683793064e-1, -0.174821616881, 0.317530854287e-1], "d1": [0, 0, 0, 1, 1, 1, 1, 1, 2, 2, 2, 3, 3, 4, 4, 5], "t1": [3, 4, 5, 0, 1, 2, 3, 4, 0, 1, 2, 0, 1, 0, 1, 1], "nr2": [0.340776521414, -0.323001398420, 0.424950537596e-1, -0.166940100976e1, 0.408693082002e1, -0.241738963889e1], "d2": [0, 0, 0, 2, 2, 2], "t2": [3, 4, 5, 3, 4, 5], "c2": [2]*6, "gamma2": [1.21103865]*6} eq = platzer, _PR = [-0.1804, -16.3839] _surface = {"sigma": [0.05239], "exp": [1.258]} _vapor_Pressure = { "eq": 3, "n": [-0.72195e1, 0.16357e1, -0.14576e1, -0.69580e1, 0.57181e1], "t": [1.0, 1.5, 2.2, 4.8, 6.2]} _liquid_Density = { "eq": 1, "n": [0.43023, 0.22722e2, -0.27118e2, 0.13247e2, -0.90529e1], "t": [0.095, 0.93, 1.1, 2.0, 3.0]} _vapor_Density = { "eq": 2, "n": [-0.46609, -6.8355, -167.15, 1.5805e4, -3.1859e4, 2.1548e4], "t": [0.09, 0.76, 4.0, 6.5, 7.0, 8.0]}<|fim▁end|>
0.107938879710e1, -0.199243619673e1, -0.155135133506, -0.121465790553, -0.165038582393e-1, -0.186915808643, 0.308074612567, 0.115861416115, 0.276358316589e-1,
<|file_name|>tcp.rs<|end_file_name|><|fim▁begin|>// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. use option::{Option, Some, None}; use result::{Ok, Err}; use io::net::ip::SocketAddr; use io::{Reader, Writer, Listener, Acceptor}; use io::{io_error, EndOfFile}; use rt::rtio::{IoFactory, LocalIo, RtioSocket, RtioTcpListener}; use rt::rtio::{RtioTcpAcceptor, RtioTcpStream}; pub struct TcpStream { priv obj: ~RtioTcpStream } impl TcpStream { fn new(s: ~RtioTcpStream) -> TcpStream { TcpStream { obj: s } } pub fn connect(addr: SocketAddr) -> Option<TcpStream> { LocalIo::maybe_raise(|io| { io.tcp_connect(addr).map(TcpStream::new) }) } pub fn peer_name(&mut self) -> Option<SocketAddr> { match self.obj.peer_name() { Ok(pn) => Some(pn), Err(ioerr) => { debug!("failed to get peer name: {:?}", ioerr); io_error::cond.raise(ioerr); None } } } pub fn socket_name(&mut self) -> Option<SocketAddr> { match self.obj.socket_name() { Ok(sn) => Some(sn), Err(ioerr) => { debug!("failed to get socket name: {:?}", ioerr); io_error::cond.raise(ioerr); None } } } } impl Reader for TcpStream { fn read(&mut self, buf: &mut [u8]) -> Option<uint> { match self.obj.read(buf) { Ok(read) => Some(read), Err(ioerr) => { // EOF is indicated by returning None if ioerr.kind != EndOfFile { io_error::cond.raise(ioerr); } return None; } } } } impl Writer for TcpStream { fn write(&mut self, buf: &[u8]) { match self.obj.write(buf) { Ok(_) => (), Err(ioerr) => io_error::cond.raise(ioerr), } } } pub struct TcpListener { priv obj: ~RtioTcpListener } impl TcpListener { pub fn bind(addr: SocketAddr) -> Option<TcpListener> { LocalIo::maybe_raise(|io| { io.tcp_bind(addr).map(|l| TcpListener { obj: l }) }) } pub fn socket_name(&mut self) -> Option<SocketAddr> { match self.obj.socket_name() { Ok(sn) => Some(sn), Err(ioerr) => { debug!("failed to get socket name: {:?}", ioerr); io_error::cond.raise(ioerr); None } } } } impl Listener<TcpStream, TcpAcceptor> for TcpListener { fn listen(self) -> Option<TcpAcceptor> { match self.obj.listen() { Ok(acceptor) => Some(TcpAcceptor { obj: acceptor }), Err(ioerr) => { io_error::cond.raise(ioerr); None } } } } pub struct TcpAcceptor { priv obj: ~RtioTcpAcceptor } impl Acceptor<TcpStream> for TcpAcceptor { fn accept(&mut self) -> Option<TcpStream> { match self.obj.accept() { Ok(s) => Some(TcpStream::new(s)), Err(ioerr) => { io_error::cond.raise(ioerr); None } } } } #[cfg(test)] mod test { use super::*; use io::net::ip::SocketAddr; use io::*; use prelude::*; // FIXME #11530 this fails on android because tests are run as root iotest!(fn bind_error() { let mut called = false; io_error::cond.trap(|e| { assert!(e.kind == PermissionDenied); called = true; }).inside(|| { let addr = SocketAddr { ip: Ipv4Addr(0, 0, 0, 0), port: 1 }; let listener = TcpListener::bind(addr); assert!(listener.is_none()); }); assert!(called); } #[ignore(cfg(windows))] #[ignore(cfg(target_os = "android"))]) iotest!(fn connect_error() { let mut called = false; io_error::cond.trap(|e| { assert_eq!(e.kind, ConnectionRefused); called = true; }).inside(|| { let addr = SocketAddr { ip: Ipv4Addr(0, 0, 0, 0), port: 1 }; let stream = TcpStream::connect(addr); assert!(stream.is_none()); }); assert!(called); }) iotest!(fn smoke_test_ip4() { let addr = next_test_ip4(); let (port, chan) = Chan::new(); do spawn { port.recv(); let mut stream = TcpStream::connect(addr); stream.write([99]); } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let mut buf = [0]; stream.read(buf); assert!(buf[0] == 99); }) iotest!(fn smoke_test_ip6() { let addr = next_test_ip6(); let (port, chan) = Chan::new(); do spawn { port.recv(); let mut stream = TcpStream::connect(addr); stream.write([99]); } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let mut buf = [0]; stream.read(buf); assert!(buf[0] == 99); }) iotest!(fn read_eof_ip4() { let addr = next_test_ip4(); let (port, chan) = Chan::new(); do spawn { port.recv(); let _stream = TcpStream::connect(addr); // Close } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let mut buf = [0]; let nread = stream.read(buf); assert!(nread.is_none()); }) iotest!(fn read_eof_ip6() { let addr = next_test_ip6(); let (port, chan) = Chan::new(); do spawn { port.recv(); let _stream = TcpStream::connect(addr); // Close } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let mut buf = [0]; let nread = stream.read(buf); assert!(nread.is_none()); }) iotest!(fn read_eof_twice_ip4() { let addr = next_test_ip4(); let (port, chan) = Chan::new(); do spawn { port.recv(); let _stream = TcpStream::connect(addr); // Close } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let mut buf = [0]; let nread = stream.read(buf); assert!(nread.is_none()); io_error::cond.trap(|e| { if cfg!(windows) { assert_eq!(e.kind, NotConnected); } else { fail!(); } }).inside(|| { let nread = stream.read(buf); assert!(nread.is_none()); }) }) iotest!(fn read_eof_twice_ip6() { let addr = next_test_ip6(); let (port, chan) = Chan::new(); do spawn { port.recv(); let _stream = TcpStream::connect(addr); // Close } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let mut buf = [0]; let nread = stream.read(buf); assert!(nread.is_none()); io_error::cond.trap(|e| { if cfg!(windows) { assert_eq!(e.kind, NotConnected); } else { fail!(); } }).inside(|| { let nread = stream.read(buf); assert!(nread.is_none()); }) }) iotest!(fn write_close_ip4() { let addr = next_test_ip4(); let (port, chan) = Chan::new(); do spawn { port.recv(); let _stream = TcpStream::connect(addr); // Close } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let buf = [0]; loop { let mut stop = false; io_error::cond.trap(|e| { // NB: ECONNRESET on linux, EPIPE on mac, ECONNABORTED // on windows assert!(e.kind == ConnectionReset || e.kind == BrokenPipe || e.kind == ConnectionAborted, "unknown error: {:?}", e); stop = true; }).inside(|| { stream.write(buf); }); if stop { break } } }) iotest!(fn write_close_ip6() { let addr = next_test_ip6(); let (port, chan) = Chan::new(); do spawn { port.recv(); let _stream = TcpStream::connect(addr); // Close } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); let mut stream = acceptor.accept(); let buf = [0]; loop { let mut stop = false; io_error::cond.trap(|e| { // NB: ECONNRESET on linux, EPIPE on mac, ECONNABORTED // on windows assert!(e.kind == ConnectionReset || e.kind == BrokenPipe || e.kind == ConnectionAborted, "unknown error: {:?}", e); stop = true; }).inside(|| { stream.write(buf); }); if stop { break } } }) iotest!(fn multiple_connect_serial_ip4() { let addr = next_test_ip4(); let max = 10; let (port, chan) = Chan::new(); do spawn { port.recv(); max.times(|| { let mut stream = TcpStream::connect(addr); stream.write([99]); }); } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); for ref mut stream in acceptor.incoming().take(max) { let mut buf = [0]; stream.read(buf); assert_eq!(buf[0], 99); } }) iotest!(fn multiple_connect_serial_ip6() { let addr = next_test_ip6(); let max = 10; let (port, chan) = Chan::new(); do spawn { port.recv(); max.times(|| { let mut stream = TcpStream::connect(addr); stream.write([99]); }); } let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); for ref mut stream in acceptor.incoming().take(max) { let mut buf = [0]; stream.read(buf); assert_eq!(buf[0], 99); } }) iotest!(fn multiple_connect_interleaved_greedy_schedule_ip4() { let addr = next_test_ip4(); static MAX: int = 10; let (port, chan) = Chan::new(); do spawn { let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); for (i, stream) in acceptor.incoming().enumerate().take(MAX as uint) { // Start another task to handle the connection do spawn { let mut stream = stream; let mut buf = [0]; stream.read(buf); assert!(buf[0] == i as u8); debug!("read"); } } } port.recv(); connect(0, addr); fn connect(i: int, addr: SocketAddr) { if i == MAX { return } do spawn { debug!("connecting"); let mut stream = TcpStream::connect(addr); // Connect again before writing connect(i + 1, addr); debug!("writing"); stream.write([i as u8]); } } }) iotest!(fn multiple_connect_interleaved_greedy_schedule_ip6() { let addr = next_test_ip6(); static MAX: int = 10; let (port, chan) = Chan::<()>::new(); do spawn { let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); for (i, stream) in acceptor.incoming().enumerate().take(MAX as uint) { // Start another task to handle the connection do spawn { let mut stream = stream; let mut buf = [0]; stream.read(buf); assert!(buf[0] == i as u8); debug!("read"); } } } port.recv(); connect(0, addr); fn connect(i: int, addr: SocketAddr) { if i == MAX { return } do spawn { debug!("connecting"); let mut stream = TcpStream::connect(addr); // Connect again before writing connect(i + 1, addr); debug!("writing"); stream.write([i as u8]); } } }) iotest!(fn multiple_connect_interleaved_lazy_schedule_ip4() { let addr = next_test_ip4(); static MAX: int = 10; let (port, chan) = Chan::new(); do spawn { let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); for stream in acceptor.incoming().take(MAX as uint) { // Start another task to handle the connection do spawn { let mut stream = stream; let mut buf = [0]; stream.read(buf); assert!(buf[0] == 99); debug!("read"); } } } port.recv(); connect(0, addr); fn connect(i: int, addr: SocketAddr) { if i == MAX { return } do spawn { debug!("connecting"); let mut stream = TcpStream::connect(addr); // Connect again before writing connect(i + 1, addr); debug!("writing"); stream.write([99]); } } }) iotest!(fn multiple_connect_interleaved_lazy_schedule_ip6() { let addr = next_test_ip6(); static MAX: int = 10; let (port, chan) = Chan::new(); do spawn { let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); for stream in acceptor.incoming().take(MAX as uint) { // Start another task to handle the connection do spawn { let mut stream = stream; let mut buf = [0]; stream.read(buf); assert!(buf[0] == 99); debug!("read"); } } } port.recv(); connect(0, addr); fn connect(i: int, addr: SocketAddr) { if i == MAX { return } do spawn { debug!("connecting"); let mut stream = TcpStream::connect(addr); // Connect again before writing connect(i + 1, addr); debug!("writing"); stream.write([99]); } } }) pub fn socket_name(addr: SocketAddr) { let mut listener = TcpListener::bind(addr).unwrap(); // Make sure socket_name gives // us the socket we binded to. let so_name = listener.socket_name(); assert!(so_name.is_some()); assert_eq!(addr, so_name.unwrap()); } pub fn peer_name(addr: SocketAddr) {<|fim▁hole|> let mut acceptor = TcpListener::bind(addr).listen(); chan.send(()); acceptor.accept(); } port.recv(); let stream = TcpStream::connect(addr); assert!(stream.is_some()); let mut stream = stream.unwrap(); // Make sure peer_name gives us the // address/port of the peer we've // connected to. let peer_name = stream.peer_name(); assert!(peer_name.is_some()); assert_eq!(addr, peer_name.unwrap()); } iotest!(fn socket_and_peer_name_ip4() { peer_name(next_test_ip4()); socket_name(next_test_ip4()); }) iotest!(fn socket_and_peer_name_ip6() { // XXX: peer name is not consistent //peer_name(next_test_ip6()); socket_name(next_test_ip6()); }) iotest!(fn partial_read() { let addr = next_test_ip4(); let (p, c) = Chan::new(); do spawn { let mut srv = TcpListener::bind(addr).listen(); c.send(()); let mut cl = srv.accept().unwrap(); cl.write([10]); let mut b = [0]; cl.read(b); c.send(()); } p.recv(); let mut c = TcpStream::connect(addr).unwrap(); let mut b = [0, ..10]; assert_eq!(c.read(b), Some(1)); c.write([1]); p.recv(); }) }<|fim▁end|>
let (port, chan) = Chan::new(); do spawn {
<|file_name|>AndToOr.java<|end_file_name|><|fim▁begin|>package fmautorepair.mutationoperators.features; import org.apache.log4j.Logger; import de.ovgu.featureide.fm.core.Feature; import de.ovgu.featureide.fm.core.FeatureModel; import fmautorepair.mutationoperators.FMMutator; /** transform And to or */ public class AndToOr extends FeatureMutator { private static Logger logger = Logger.getLogger(AndToOr.class.getName()); public static FMMutator instance = new AndToOr(); @Override String mutate(FeatureModel fm, Feature tobemutated) { // if has more than one child or one child but optional tobemutated.changeToOr(); logger.info("mutating feature " + tobemutated.getName() + " from AND TO OR"); return (tobemutated.getName() + " from AND TO OR"); }<|fim▁hole|> @Override boolean isMutable(FeatureModel fm, Feature tobemutated) { int size = tobemutated.getChildren().size(); return (tobemutated.isAnd() && size >0); } }<|fim▁end|>
<|file_name|>IParser.d.ts<|end_file_name|><|fim▁begin|>/** * Namespace of the jquery-scope-watch. * @namespace */<|fim▁hole|> /** * Parser instance * @interface */ interface IParser { /** * Return result that parse expression. * @param {Scope} scope * @return {*} parse result */ (scope: Scope): any; /** * Set value to expression result. * @param {Scope} scope * @param {*} value */ assign(scope: Scope, value: any); } }<|fim▁end|>
declare module scope {
<|file_name|>ja.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- { "A location that specifies the geographic area for this region. This can be a location from the location hierarchy, or a 'group location', or a location that has a boundary for the area.": 'この地域を地理的に指定するロケーション。これはロケーションの階層構造のうちの一つか、ロケーショングループの一つか、この地域の境界に面するロケーションです。', "Acronym of the organization's name, eg. IFRC.": '団体の略称 (IFRCなど)', "Authenticate system's Twitter account": '認証システムの Twitter アカウント', "Can't import tweepy": 'tweepyをインポートできません', "Click on 'Pledge' button in the left-hand column to make a Pledge to match a request for aid.": "救援要請と寄付項目を関連付けるには、項目左の'寄付'ボタンを押してください。", "Couldn't import tweepy library": 'tweepy libraryをインポートできません', "Detailed address of the site for informational/logistics purpose. Please note that you can add GIS/Mapping data about this site in the 'Location' field mentioned below.": 'サイトの所在地住所を詳細に記述します。情報伝達と物品搬送に使用します。このサイトに関する情報を、以下の「ロケーション」項目にGIS/地図データを挿入できることに注意してください。', "If this configuration represents a region for the Regions menu, give it a name to use in the menu. The name for a personal map configuration will be set to the user's name.": 'もしこの設定が地域メニューにある地域を指しているのであれば、メニューで使う名前を設定してください。個人用の地図設定の名前では、ユーザの名前で設定されます。', "If this field is populated then a user who specifies this Organization when signing up will be assigned as a Staff of this Organization unless their domain doesn't match the domain field.": 'この項目が設定されている場合、ユーザーは、登録の際、この団体のスタッフとしてアサインされるように指定することができます。ただし、ユーザーのドメインと団体のドメイン項目に差異がない場合のみ有効です。', "If this is ticked, then this will become the user's Base Location & hence where the user is shown on the Map": 'この項目の内容はユーザーの基本所在地となり、ユーザーが地図上に表示されるようになります。', "If this setting is enabled then all deleted records are just flagged as deleted instead of being really deleted. They will appear in the raw database access but won't be visible to normal users.": 'この設定が有効の場合、削除されたレコードには削除済みフラグが付与されるだけで、実際のデータは消去されません。一般のユーザが閲覧することはできませんが、データベースを直接参照することでデータを確認できます。', "If you cannot find the record of the person you want to report missing, you can add it by clicking 'Add Person' below:": '行方不明者の登録が存在しない場合、「人物情報を追加」ボタンを押して、新規登録を行ってください。', "If you don't see the Hospital in the list, you can add a new one by clicking link 'Create Hospital'.": 'リストに病院が表示されない場合、「病院情報を追加」することで新規に登録が可能です。', "If you don't see the Office in the list, you can add a new one by clicking link 'Create Office'.": 'オフィスが一覧にない場合は、「オフィスを追加」をクリックすることで新規のオフィスを追加できます。', "If you don't see the Organization in the list, you can add a new one by clicking link 'Create Organization'.": "もしあなたの団体の登録がない場合、'団体を追加'リンクをクリックすることで追加が可能です", "Instead of automatically syncing from other peers over the network, you can also sync from files, which is necessary where there's no network. You can use this page to import sync data from files and also export data to sync files. Click the link on the right to go to this page.": 'データを同期する際には、ネットワークを経由してではなく、ファイルから行うことも可能です。ネットワークが存在しない場合に利用されます。ファイルからのデータインポート、およびファイルへのエクスポートはこのページから実行可能です。右部のリンクをクリックしてください。', "Level is higher than parent's": '親情報よりも高いレベルです', "NB SMS requests are filtered to just those which are 'actionable', whilst the Tweet requests are unfiltered, so that is likely to be a good place to start Searching.": "注意: SMS は'アクション可能'のためリクエストがフィルターされます。一方、ツイートのリクエストはフィルターされません。よって、これは検索する手段となります", "Need a 'url' argument!": "'url'引数が必要です。", "Optional. The name of the geometry column. In PostGIS this defaults to 'the_geom'.": "オプション項目。ジオメトリカラムの名称です。PostGISでのデフォルト値は 'the_geom'となります。", "Parent level should be higher than this record's level. Parent level is": '親レベルは、このレコードのレベルより上位でなければなりません。親レベルは', "Password fields don't match": 'パスワードが一致しません。', "Phone number to donate to this organization's relief efforts.": 'この団体の救援活動に対して寄付を行う際の連絡先となる電話番号を記載します。', "Please come back after sometime if that doesn't help.": 'この方法で問題が解決しない場合は、しばらく時間を置いて再度アクセスしてください。', "Press the 'Delete Old' button to have all records which reference this one be repointed at the new one & then the old record will be deleted.": "'Delete Old'ボタンを押すことで、データを参照しているレコードは全て参照先を再指定され、古い方のレコードは削除されます。", "Quantity in %s's Inventory": '%s 倉庫にある量', "Search here for a person's record in order to:": '人物情報の検索を行い、以下の機能を実現します:', "Select a person in charge for status 'assigned'": "状況が '割り当て済み' である担当者を選択します", "Select this if all specific locations need a parent at the deepest level of the location hierarchy. For example, if 'district' is the smallest division in the hierarchy, then all specific locations would be required to have a district as a parent.": 'もし全ての特定の場所が住所階層の最下層で親の場所を必要とするなら、これを選択して下さい。例えば、もし「地区」が階層の最小の地域なら、全ての特定の場所は親階層の地区を持っている必要が有るでしょう。', "Select this if all specific locations need a parent location in the location hierarchy. This can assist in setting up a 'region' representing an affected area.": 'もし全ての特定の場所が住所階層での親の場所を必要とするなら、これを選択して下さい。これは被災地の「地域」表示の設定に役立てられます。', "Sorry, things didn't get done on time.": 'すいません、時間通りに行われていません。', "Sorry, we couldn't find that page.": 'すいません、お探しのページは見つかりませんでした。', "System's Twitter account updated": 'システムのTwitterアカウントを変更しました', "The <a href='http://en.wikipedia.org/wiki/Well-known_text' target=_blank>Well-Known Text</a> representation of the Polygon/Line.": "この線、あるいは面の<a href='http://en.wikipedia.org/wiki/Well-known_text' target=_blank>具体的な説明</a>", "The Donor(s) for this project. Multiple values can be selected by holding down the 'Control' key.": 'このプロジェクトの資金提供組織を選択します。複数の項目を選択するには、Ctrlキーを押しながらクリックしてください。', "The Sector(s) this organization works in. Multiple values can be selected by holding down the 'Control' key.": 'この団体の活動分野を選択します。複数の項目を選択するには、コントロールキーを押しながらクリックしてください。', "The URL of the image file. If you don't upload an image file, then you must specify its location here.": '画像ファイルのURLです。ファイルのアップロードを行わない場合、ロケーションをURL項目に入力してください。', "The person's manager within this Office/Project.": 'このオフィス/プロジェクトのマネージャ。', "To search for a body, enter the ID label of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": '遺体の検索を行うには、遺体のID番号を入力してください。検索時のワイルドカード文字として、%を使うことができます。入力せずに「検索」すると、全ての遺体が表示されます。', "To search for a body, enter the ID tag number of the body. You may use % as wildcard. Press 'Search' without input to list all bodies.": 'ID情報を入力することで、遺体を検索します。ワイルドカードとして % が使用できます。何も指定せずに「検索」すると、全ての遺体が表示されます。', "To search for a hospital, enter any of the names or IDs of the hospital, or the organization name or acronym, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": "病院を検索するには、名前、病院のID、団体名、省略名のいずれかをスペース(空白)で区切って入力してください。 % がワイルドカードとして使えます。全病院のリストを表示するにはなにも入力せずに '検索' ボタンを押してください。", "To search for a hospital, enter any of the names or IDs of the hospital, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all hospitals.": '探し出したい病院をテキスト入力し、検索を行うことができます。検索時のワイルドカード文字として、%を使うことができます。何も入力せずに「検索」ボタンを押した場合、全ての病院を表示します。', "To search for a hospital, enter any part of the name or ID. You may use % as wildcard. Press 'Search' without input to list all hospitals.": '病院を検索するには、名称の一部かIDを入力してください。検索時のワイルドカード文字として、%を使うことができます。何も入力せずに「検索」を押した場合、全ての病院を表示します。', "To search for a location, enter the name. You may use % as wildcard. Press 'Search' without input to list all locations.": "ロケーションを検索するには、名前を入力します。%をワイルドカード文字として使用することが出来ます。何も入力しないで '検索' をクリックするとすべてのロケーションが表示されます。", "To search for a person, enter any of the first, middle or last names and/or an ID number of a person, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": '苗字、名前などを半角スペースで区切って入力し、人物検索して下さい。「%」を使うとファジー検索できます。何も入力せずに検索すれば、全ての情報を検索表示します。', "To search for a person, enter any of the first, middle or last names, separated by spaces. You may use % as wildcard. Press 'Search' without input to list all persons.": '人を検索するためには、お名前(苗字、名前または両方)を入力してください。また姓名の間にはスペースをいれてください。ワイルドカードとして % が使えます。すべての人物情報をリストするには、検索ボタンをおしてください。', "To search for a request, enter some of the text that you are looking for. You may use % as wildcard. Press 'Search' without input to list all requests.": '探し出したい支援要請をテキスト入力し、検索を行うことができます。検索時のワイルドカード文字として、%を使うことができます。何も入力せずに「検索」ボタンを押した場合、全ての支援要請を表示します。', "To search for an assessment, enter any portion the ticket number of the assessment. You may use % as wildcard. Press 'Search' without input to list all assessments.": 'アセスメントを検索するには、アセスメントのチケット番号の一部を入力してください。ワイルドカードとして % が使えます。すべてのアセスメントをリストするには、なにも入力せず検索ボタンをおしてください。', "Type the first few characters of one of the Person's names.": '検索したい人物の名前の先頭数文字を入力してください', "Upload an image file here. If you don't upload an image file, then you must specify its location in the URL field.": '画像ファイルのアップロードはここから行ってください。ファイルのアップロードを行わない場合、ロケーションをURL項目に入力してください。', "View and/or update details of the person's record": '人物情報を検索し、詳細の閲覧や更新を行ないます', "View/Edit the Database directly (caution: doesn't respect the framework rules!)": 'データベースの直接閲覧/編集(注意:フレームワークの規則に反します)', "What are the people's normal ways to obtain food in this area?": 'この地域で食料を調達するための手段を記載してください', "What should be done to reduce women and children's vulnerability to violence?": '未成年や女性を暴力から守るために、どのような活動や設備が必要かを記載してください', "When syncing data with others, conflicts happen in cases when two (or more) parties want to sync information which both of them have modified, i.e. conflicting information. Sync module tries to resolve such conflicts automatically but in some cases it can't. In those cases, it is up to you to resolve those conflicts manually, click on the link on the right to go to this page.": '他とデータを同期するとき、二つ(以上)の団体がそれぞれ更新した情報を同期するときにコンフリクトが発生することがあります。同期モジュールは、コンフリクトを自動解決しようと試みますが、解決できないことがあります。そのような場合、手作業でコンフリクトを解決するか、クリックして次のページに進んでください。', "You have personalised settings, so changes made here won't be visible to you. To change your personalised settings, click ": 'ユーザ固有の設定を行っている場合、ここで変更を行っても、目に見える変化がない場合があります。ユーザ固有の設定を行うには、以下をクリックしてください。 ', "You have unsaved changes. Click Cancel now, then 'Save' to save them. Click OK now to discard them.": '変更が保存されていません。「キャンセル」をクリックした後、「保存」を押して保存してください。変更を破棄するには、OK をクリックしてください。', "You haven't made any calculations": '計算が実行されていません', "You haven't yet Verified your account - please check your email": '利用者登録はまだ有効ではありません。', "couldn't be parsed so NetworkLinks not followed.": 'パースできなかったため、 NetworkLinksはフォローされません。', "includes a GroundOverlay or ScreenOverlay which aren't supported in OpenLayers yet, so it may not work properly.": 'OpenLayersで未サポートの機能である GroundOverlayやScreenOverlayを含むため、不具合がある可能性があります。', '"update" is an optional expression like "field1=\'newvalue\'". You cannot update or delete the results of a JOIN': '"更新" は、"field1=\'newvalue\'" のようなオプションです。"JOIN の結果を更新または削除することはできません。', '# Houses Damaged': '損傷した家屋の数', '# Houses Flooded': '浸水した家屋数', '# People Needing Food': '食料が必要な人の数', '# People at Risk From Vector-Borne Diseases': '生物が媒介する疾病の危険性がある人の数', '# People without Access to Safe Drinking-Water': '安全な飲料水が確保されていない人の数', '# of Houses Damaged': '損壊した家屋数', '# of Houses Destroyed': '全壊した家屋数', '# of International Staff': '国外スタッフ人数', '# of National Staff': '国内スタッフの人数', '# of People Affected': '被災者数', '# of People Deceased': '死亡者数', '# of People Injured': '負傷者数', '# of Vehicles': '車両数', '%s Create a new site or ensure that you have permissions for an existing site.': '%s 新しいサイトを作成するか既存のサイトに対する権限を持っているかどうか確認して下さい', '%s rows deleted': '%s 行を削除しました', '%s rows updated': '%s 行を更新しました', '& then click on the map below to adjust the Lat/Lon fields': 'そして下の地図をクリックして、緯度 / 経度フィールドを調節してください', '* Required Fields': '* は必須項目です', '0-15 minutes': '0-15 分間', '1 Assessment': '1アセスメント', '1 location, shorter time, can contain multiple Tasks': '1つの地域における短期間の活動を表し、1つの支援活動のなかで複数のタスクを実行します。', '1-3 days': '1-3 日間', '1. Fill the necessary fields in BLOCK letters.': '1. 太字の項目は必須項目です.', '15-30 minutes': '15-30 分間', '2 different options are provided here currently:': '現在は、2種類のオプションが提供されています。', '2. Always use one box per letter and leave one box space to seperate words.': '2. 一マス一文字で、単語の間は一マス開けてください。', '2x4 Car': '2x4 車両', '30-60 minutes': '30-60 分間', '4-7 days': '4-7 日間', '4x4 Car': '四輪駆動車', '8-14 days': '8-14 日間', 'A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class.': '機能クラスに設定したマーカーを上書きする必要があれば、個々のロケーションに設定したマーカーを設定します', 'A Reference Document such as a file, URL or contact person to verify this data. You can type the 1st few characters of the document name to link to an existing document.': 'このデータ内容を確認できるファイルやURL情報、連絡先担当者などのリファレンスデータを記載します。最初の何文字かを入力することで、既存の類似文書にリンクすることが可能です。', 'A Warehouse is a physical place which contains Relief Items available to be Distributed.': '倉庫とは、救援物資の配布を行うことができる物理的な地点を意味します。', 'A Warehouse/Site is a physical location with an address and GIS data where Items are Stored. It can be a Building, a particular area in a city or anything similar.': '倉庫 / サイトとは、物資の保管場所のことであり、住所とGIS情報が付帯します。特定の建物や、市内の特定地域などがあげられます。', 'A brief description of the group (optional)': 'グループの詳細(オプション)', 'A file downloaded from a GPS containing a series of geographic points in XML format.': 'GPSからダウンロードしたファイルには、その地点に関する様々な情報がXML形式で保存されています。', 'A file in GPX format taken from a GPS whose timestamps can be correlated with the timestamps on the photos to locate them on the map.': 'GPSから取得したGPX形式のファイル。タイムスタンプは画像と関連づけられ、地図上に配置することができます。', 'A library of digital resources, such as photos, documents and reports': '写真や文書、レポートなど、電子化された資料', 'A location group is a set of locations (often, a set of administrative regions representing a combined area). Member locations are added to a location group here. Location groups may be used to filter what is shown on the map and in search results to only entities covered by locations in the group. A location group can be used to define the extent of an affected area, if it does not fall within one administrative region. Location groups can be used in the Regions menu.': 'ロケーションを取りまとめた単位はロケーショングループと呼称されます(たいていは、一定範囲内の管理対象地域をさします)。このページから、ロケーションをグループに追加することができます。ロケーショングループ単位で地図上に表示させたり、検索結果として表示させることが可能となります。グループを使用することで、1つの管理地域に縛られない被災地域定義が可能となります。ロケーショングループは、地域メニューから定義できます。', 'A location group must have at least one member.': 'ロケーショングループには、メンバーが最低一人必要です。', 'A place within a Site like a Shelf, room, bin number etc.': 'Site内に存在する施設。例えば棚、部屋、Binの番号など', 'A snapshot of the bin or additional documents that contain supplementary information about it can be uploaded here.': 'binのスナップショットや追加情報の更新は、ここから行えます。', 'A snapshot of the location or additional documents that contain supplementary information about the Site Location can be uploaded here.': 'ロケーションのスナップショットや、Siteに関する追加情報の更新は、ここから行えます。', 'A snapshot of the location or additional documents that contain supplementary information about the Site can be uploaded here.': 'ロケーションのスナップショットや、Siteに関する追加情報の更新は、ここから行えます。', 'A survey series with id %s does not exist. Please go back and create one.': 'ID番号 %sに関するsurvey seriesは存在しません。「戻る」ボタンを押して、新規に作成してください。', 'ABOUT THIS MODULE': 'このモジュールについて', 'ABOUT': '概要', 'ACCESS DATA': 'アクセスデータ', 'ANY': '全て', 'API is documented here': 'APIに関する文書はこちら', 'ATC-20 Rapid Evaluation modified for New Zealand': 'ニュージーランド向けに変更したATC-20(建物の簡易安全性評価プロセス)', 'ATC-20': 'ATC-20(建物の簡易安全性評価プロセス)', 'Abbreviation': '省略', 'Ability to Fill Out Surveys': '調査記入能力', 'Ability to customize the list of details tracked at a Shelter': '避難所で追跡する詳細のリストのカスタマイズ可否', 'Ability to customize the list of human resource tracked at a Shelter': '避難所で追跡する詳細のリストのカスタマイズの可否', 'Ability to customize the list of important facilities needed at a Shelter': '避難所で追跡する人的資源のリストのカスタマイズの可否', 'Ability to track partial fulfillment of the request': '支援要請の部分的な達成度の追跡可否', 'Ability to view Results of Completed and/or partially filled out Surveys': '完了または一部完了した聞き取り調査の結果をみる機能', 'About Sahana Eden': 'Sahana Edenについて', 'About Sahana': 'Sahanaについて', 'About this module': 'モジュールの詳細', 'About': '情報', 'Access denied': 'アクセスが拒否されました', 'Access to Shelter': '避難所へのアクセス', 'Access to education services': '学校へのアクセス', 'Accessibility of Affected Location': '被災地域へのアクセス方法', 'Account registered, however registration is still pending approval - please wait until confirmation received.': '利用者登録の申請を受け付けました。所属団体またはサイト管理者による承認を待っています。', 'Acronym': '略称/イニシャル', 'Actionable by all targeted recipients': 'すべての対象受信者にとって実用的な', 'Actionable only by designated exercise participants; exercise identifier SHOULD appear in <note>': '指定された参加者のみ実施可能です。<note>の中に行使するためのIDがあることが必要です。', 'Actionable': '対応可能', 'Actioned?': '実施済み?', 'Actions taken as a result of this request.': '要請に対して行われるアクション', 'Actions': 'アクション', 'Active Problems': '対処中の問題', 'Activities Map': '支援活動マップ', 'Activities are blue.': '支援活動(アクティビティ)は青色で表示されます。', 'Activities matching Assessments:': 'アセスメントに適合した支援活動', 'Activities of boys 13-17yrs before disaster': '災害発生前の13-17歳男子の活動状況', 'Activities of boys 13-17yrs now': '現在の13-17歳男子の活動状況', 'Activities of boys <12yrs before disaster': '災害発生前の12歳以下男子の活動状況', 'Activities of boys <12yrs now': '現在の12歳以下男子の活動状況', 'Activities of children': '子供たちの活動', 'Activities of girls 13-17yrs before disaster': '災害発生前の13-17歳女子の活動状況', 'Activities of girls 13-17yrs now': '現在の13-17歳女子の活動状況', 'Activities of girls <12yrs before disaster': '災害発生前の12歳以下女子の活動状況', 'Activities of girls <12yrs now': '現在の12歳以下女子の活動状況', 'Activities': '支援活動', 'Activity Added': '支援活動を追加しました', 'Activity Deleted': '支援活動を削除しました', 'Activity Details': '支援活動の詳細', 'Activity Report': '支援活動レポート', 'Activity Reports': '支援活動レポート', 'Activity Type': '支援活動タイプ', 'Activity Updated': '支援活動を更新しました', 'Activity': '支援活動', 'Add Address': 'アドレスを追加', 'Add Activity Type': '支援活動タイプを追加', 'Add Aid Request': '治療要請を追加', 'Add Alternative Item': '代わりの物資を追加', 'Add Assessment Summary': 'アセスメントの要約を追加', 'Add Assessment': 'アセスメントを追加', 'Add Baseline Type': '基準値タイプの追加', 'Add Baseline': '基準値の追加', 'Add Bin Type': 'Bin Typeを追加', 'Add Bins': 'Binを追加', 'Add Bundle': 'Bundleを追加', 'Add Catalog.': 'カタログを追加', 'Add Category': 'カテゴリを追加', 'Add Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog 関係を追加', 'Add Config': '設定を追加', 'Add Contact': '連絡先を追加', 'Add Contact Information': '連絡先情報を追加', 'Add Credential': '証明書の追加', 'Add Credentials': '証明書の追加', 'Add Detailed Evaluation': '詳細な評価を追加', 'Add Disaster Victims': '被災者情報を追加', 'Add Distribution.': '配給所を追加', 'Add Donor': '資金提供組織を追加', 'Add Flood Report': '洪水レポートを追加', 'Add Group Member': 'グループメンバを追加', 'Add Identity': 'IDを追加', 'Add Image': '画像を追加', 'Add Impact Type': '災害影響のタイプを追加', 'Add Impact': '被災状況の追加', 'Add Inventory Item': '備蓄物資を追加します', 'Add Inventory Store': '物資集積地点を追加', 'Add Item (s)': '物資を追加', 'Add Item Catalog': '物資カタログを追加', 'Add Item Category': '救援物資カタログカテゴリを追加', 'Add Item Sub-Category': '救援物資サブカテゴリを追加', 'Add Item to Request': '要求する支援物資の登録', 'Add Item to Shipment': '輸送に物資を追加する', 'Add Item': '物資を追加', 'Add Key': 'Keyを追加', 'Add Kit': 'Kitを追加', 'Add Level 1 Assessment': 'レベル1アセスメントを追加', 'Add Level 2 Assessment': 'レベル2アセスメントを追加', 'Add Line': '行を追加', 'Add Location Group': 'ロケーショングループを追加', 'Add Locations': 'ロケーションを追加', 'Add Log Entry': 'ログエントリを追加', 'Add Member': 'メンバを追加', 'Add Membership': 'メンバシップを追加', 'Add Message': 'メッセージを追加', 'Add Need Type': '需要タイプを追加', 'Add Need': '要求を追加', 'Add New Aid Request': '援助要請を新規追加', 'Add New Assessment Summary': '新規アセスメントの要約を追加', 'Add New Baseline Type': '基準値タイプの新規追加', 'Add New Baseline': '新しい基準値を追加', 'Add New Bin Type': 'Bin Typeを新規追加', 'Add New Bin': 'Binを新規追加', 'Add New Budget': '予算を新規追加', 'Add New Bundle': 'Bundleを新規追加', 'Add New Cluster Subsector': 'クラスタのサブセクタを新規作成', 'Add New Cluster': 'クラスタを新規追加', 'Add New Commitment Item': '物資コミットメントを新規追加', 'Add New Config': '設定を新規追加', 'Add New Distribution Item': '配給物資を新規追加', 'Add New Distribution': '配給所を新規追加', 'Add New Document': '文書を新規追加', 'Add New Donor': '資金提供組織を新規追加', 'Add New Entry': 'エントリを新規追加', 'Add New Flood Report': '洪水情報を新規追加', 'Add New Image': '画像を新規追加', 'Add New Impact Type': '災害影響のタイプを新規追加', 'Add New Impact': '新規影響を追加', 'Add New Inventory Item': '備蓄物資を新規追加', 'Add New Inventory Store': '物資集積場所を新規追加', 'Add New Item Catalog Category': '物資カタログカテゴリを新規追加', 'Add New Item Catalog': '物資カタログを新規追加', 'Add New Item Sub-Category': '物資サブカテゴリを新規追加', 'Add New Item to Kit': 'キットに救援物資を新規追加', 'Add New Key': 'Keyを新規追加', 'Add New Level 1 Assessment': 'レベル1アセスメントを新規追加', 'Add New Level 2 Assessment': 'レベル2アセスメントを新規追加', 'Add New Member': 'メンバを新規追加', 'Add New Membership': 'メンバシップを新規追加', 'Add New Metadata': 'メタデータを新規追加', 'Add New Need Type': '需要タイプを新規追加', 'Add New Need': '新しい要求を登録する', 'Add New Note': '追加情報を新規追加', 'Add New Peer': 'データ同期先を新規追加', 'Add New Position': '場所を新規追加', 'Add New Problem': '問題を新規追加', 'Add New Rapid Assessment': '被災地の現況アセスメントを新規追加', 'Add New Received Item': '受領した物資を新規追加', 'Add New Record': 'レコードを新規追加', 'Add New Request Item': '特定物資の要請を新規追加', 'Add New Request': '支援要請を新規追加', 'Add New Response': '支援要請を新規追加', 'Add New River': '河川情報を新規追加', 'Add New Role to User': 'ユーザに役割を新規割り当て', 'Add New Sent Item': '送った物資の追加', 'Add New Setting': '設定を新規追加', 'Add New Shipment to Send': '発送する輸送物資を新規追加', 'Add New Site': 'Siteを新規追加', 'Add New Solution': '解決案を提示する', 'Add New Staff Type': 'スタッフタイプを新規追加', 'Add New Staff': 'スタッフを新規追加', 'Add New Storage Location': '備蓄場所を新規追加', 'Add New Survey Answer': '新しい調査の回答を追加しました', 'Add New Survey Question': '調査項目を新規追加', 'Add New Survey Section': '新しい調査セクションを追加', 'Add New Survey Series': '新しい一連の調査を追加します', 'Add New Survey Template': 'Survey Templateを新規追加', 'Add New Team': 'チームを新規追加', 'Add New Ticket': 'チケットを新規追加', 'Add New Track': '追跡情報を新規追加', 'Add New Unit': '単位を新規追加', 'Add New User to Role': '新規ユーザに役割を割り当て', 'Add New Warehouse Item': '倉庫物資を新規追加', 'Add New': '新規追加', 'Add Note': 'ノートを追加', 'Add Peer': 'データ同期先を追加', 'Add Performance Evaluation': 'パフォーマンス評価を追加', 'Add Person': '人物情報を追加', 'Add Photo': '写真を追加', 'Add Point': 'ポイントを追加', 'Add Polygon': 'Polygonを追加', 'Add Position': '場所を追加', 'Add Problem': '問題を追加', 'Add Projections': '地図投影法を追加', 'Add Question': '質問事項を追加', 'Add Rapid Assessment': '被災地の現況アセスメントを追加', 'Add Rapid Evaluation': '迅速評価を追加', 'Add Recipient Site': '受け取りSiteを追加', 'Add Recipient': '受け取り担当者を追加', 'Add Record': 'レコードを追加', 'Add Recovery Report': '遺体回収レポートを追加', 'Add Reference Document': 'リファレンス文書を追加', 'Add Report': 'レポートを追加', 'Add Request Detail': '支援要請の詳細を追加', 'Add Request Item': '物資の要請を追加します', 'Add Request': '支援要請を追加', 'Add Response': '返答を追加', 'Add Section': 'Sectionを追加', 'Add Sender Organization': '送付元団体を追加', 'Add Sender Site': '送付元Siteを追加', 'Add Setting': '設定を追加', 'Add Shipment Transit Log': '輸送履歴を追加', 'Add Shipment/Way Bills': '輸送費/渡航費を追加', 'Add Site': 'サイトを追加', 'Add Skill Types': 'スキルタイプを追加', 'Add Solution': '解決案を追加', 'Add Staff Type': 'スタッフタイプを追加', 'Add Staff': 'スタッフを追加', 'Add Storage Bin ': 'Storage Binを追加 ', 'Add Storage Bin Type': 'Storage Bin Typeを追加', 'Add Storage Location': '備蓄地点を追加', 'Add Sub-Category': 'サブカテゴリを追加', 'Add Subscription': '寄付金情報を追加', 'Add Survey Answer': '調査の回答を追加', 'Add Survey Question': '聞き取り調査項目を追加', 'Add Survey Section': '調査セクションの追加', 'Add Survey Series': '一連の調査を追加', 'Add Survey Template': '調査テンプレートを追加', 'Add Team Member': 'メンバを追加', 'Add Team': 'チームを追加', 'Add Ticket': 'チケットを追加', 'Add Unit': '単位を追加', 'Add Volunteer Registration': 'ボランティア登録を追加', 'Add Warehouse Item': '倉庫物資を追加', 'Add a Reference Document such as a file, URL or contact person to verify this data. If you do not enter a Reference Document, your email will be displayed instead.': 'ファイル、URL、あるいは、このデータの確認を行なう連絡先のような参照文書を追加します。参照文書を入力しない場合、代わりにあなたのメールが表示されます。', 'Add a Volunteer': 'ボランティアの追加', 'Add a new Relief Item.': '救援物資を新規追加', 'Add a new Site from where the Item is being sent.': 'この救援物資の送付先を新規サイトとして追加', 'Add a new Site where the Item is being sent to.': 'この物資の送付先サイトを新規追加', 'Add an Photo.': '写真を追加.', 'Add location': 'ロケーションを追加', 'Add main Item Category.': '主要なアイテムカテゴリを追加', 'Add main Item Sub-Category.': '主要な救援物資サブカテゴリを追加', 'Add new Group': 'グループを新規追加', 'Add new Individual': '個人を新規追加', 'Add new position.': '新しいポジションを追加してください。', 'Add new project.': 'プロジェクトを新規追加', 'Add new staff role.': 'スタッフの権限を新規追加', 'Add or Update': '追加、あるいは更新', 'Add the Storage Bin Type.': 'Storage Binタイプを追加します。', 'Add the Storage Location where this bin is located.': 'binが保存されている貯蔵場所を追加します。', 'Add the Storage Location where this this Bin belongs to.': 'このBinがある備蓄地点を追加します。', 'Add the main Warehouse/Site information where this Bin belongs to.': 'その物資の備蓄スペースとなっている倉庫/サイトの情報を追加してください。', 'Add the main Warehouse/Site information where this Item is to be added.': 'この物資が追加されることになっている主要な倉庫 / サイトの情報を追加してください。', 'Add the main Warehouse/Site information where this Storage location is.': 'その物資の備蓄場所となっている倉庫/サイトの情報を追加してください。', 'Add the unit of measure if it doesnt exists already.': '距離単位が未登録の場合、単位を追加します。', 'Add to Bundle': 'Bundleへの登録', 'Add to Catalog': 'カタログへ登録', 'Add to budget': '予算項目へ登録', 'Add': '追加', 'Add/Edit/Remove Layers': 'レイヤを追加/編集/削除', 'Added to Group': 'メンバシップを追加しました', 'Added to Team': 'メンバシップを追加しました', 'Additional Beds / 24hrs': '追加ベッド予測数 / 24h', 'Additional Comments': '追加コメント', 'Additional quantity quantifier – i.e. “4x5”.': '数量を表す追記(例 「4x5」)', 'Address Details': '住所情報の詳細', 'Address Type': '住所情報タイプ', 'Address added': '住所情報を追加しました', 'Address deleted': '住所情報を削除しました', 'Address updated': '住所情報を更新しました', 'Address': '住所情報', 'Addresses': '住所', 'Adequate food and water available': '適切な量の食料と水が供給されている', 'Adequate': '適正', 'Adjust Item(s) Quantity': 'アイテム量の修正', 'Adjust Items due to Theft/Loss': 'アイテム量の修正(盗難/紛失のため)', 'Admin Email': '管理者の電子メール', 'Admin Name': '管理者名', 'Admin Tel': '管理者の電話番号', 'Admin': '管理者', 'Administration': '管理', 'Administrator': '管理者', 'Admissions/24hrs': '患者増加数/24h', 'Adolescent (12-20)': '青年(12-20)', 'Adolescent participating in coping activities': '未成年が災害対応に従事', 'Adult (21-50)': '成人(21-50)', 'Adult ICU': '成人 ICU', 'Adult Psychiatric': '精神病の成人', 'Adult female': '成人女性', 'Adult male': '成人男性', 'Adults in prisons': '刑務所で服役中の成人がいる', 'Advanced Bin Search': 'Binの詳細検索', 'Advanced Catalog Search': 'カタログの詳細検索', 'Advanced Category Search': '詳細カテゴリー検索', 'Advanced Item Search': '詳細な物資検索', 'Advanced Location Search': '詳細な位置検索', 'Advanced Site Search': 'Siteの詳細検索', 'Advanced Sub-Category Search': 'サブカテゴリの詳細検索', 'Advanced Unit Search': '高度な単位検索', 'Advanced': '詳細', 'Advanced:': 'もっと正確に:', 'Advisory': '注意喚起', 'After clicking on the button, a set of paired items will be shown one by one. Please select the one solution from each pair that you prefer over the other.': 'このボタンをクリックすると、解決法のペアが順に表示されます。各ペアから、最も適する項目を1つずつ選択してください。', 'Age Group': '年齢グループ', 'Age group does not match actual age.': '年齢グループが実際の年齢と一致しません。', 'Age group': '年齢グループ', 'Aggravating factors': '悪化要因', 'Aggregate Items': 'アイテムの集約', 'Agriculture': '農業', 'Aid Request Details': '援助要請の詳細', 'Aid Request added': '援助要請を追加しました', 'Aid Request deleted': '救援要請を追加しました', 'Aid Request updated': '援助要請を更新しました', 'Aid Request': '治療要請', 'Aid Requests': '援助要請', 'Air Transport Service': '物資空輸サービス', 'Aircraft Crash': '飛行機事故', 'Aircraft Hijacking': '航空機ハイジャック', 'Airport Closure': '空港閉鎖', 'Airspace Closure': '離陸地点閉鎖', 'Alcohol': 'アルコール', 'Alert': 'アラート', 'All Inbound & Outbound Messages are stored here': '送受信した全てのメッセージはここに格納されます。', 'All Locations': '全てのロケーション', 'All Records': 'すべてのレコード', 'All Requested Items': '物資要請一覧', 'All Resources': 'すべての資源', 'All data provided by the Sahana Software Foundation from this site is licenced under a Creative Commons Attribution licence. However, not all data originates here. Please consult the source field of each entry.': 'このサイトのSahana Software Foundationで提供されるデータのライセンスは、CCA (Creative Commons Attribution licence)となります。しかし、すべてのデータの発生源が、このサイトであるとは限りません。詳細は、各エントリの情報ソースの項目に記載されています。', 'All': '全て', 'Allowed to push': 'プッシュが許可済みである', 'Allows a Budget to be drawn up': '予算の策定を行ないます', 'Allows authorized users to control which layers are available to the situation map.': '認証済みユーザーが「状況地図のどのレイヤが利用できるか」を制御することを許可します。', 'Alternative Item Details': '代わりの品物についての詳細', 'Alternative Item added': '代わりの物資を追加しました', 'Alternative Item deleted': '代わりの品物が削除されました', 'Alternative Item updated': '代わりの物資を更新しました', 'Alternative Item': '代わりの物資', 'Alternative Items': '代わりとなる物資', 'Alternative infant nutrition in use': '利用中の乳児用代替食', 'Alternative places for studying available': '学校以外の場所を学習に利用可能である', 'Alternative places for studying': '授業開設に利用可能な施設', 'Ambulance Service': '救急サービス', 'An Inventory Store is a physical place which contains Relief Items available to be Distributed.': '物資集積場所とは、救援物資の配給能力をもつ、物理的な場所を指します。', 'An intake system, a warehouse management system, commodity tracking, supply chain management, procurement and other asset and resource management capabilities.': '物資の受け入れ、貯蔵設備の管理、必要な物資の記録、サプライチェーン・マネジメント、調達、その他様々な資産やリソースの管理といった機能。', 'An item which can be used in place of another item': '他の物資の代わりに使う物資', 'Analysis of Completed Surveys': '完了したフィードバックの分析', 'Animal Die Off': '動物の死', 'Animal Feed': '動物のエサ', 'Animals': '動物', 'Answer Choices (One Per Line)': '選択肢(一行に一つ)', 'Anthropology': '人類学', 'Antibiotics available': '抗生物質が利用可能', 'Antibiotics needed per 24h': '24時間ごとに必要な抗生物質', 'Any available Metadata in the files will be read automatically, such as Timestamp, Author, Latitude & Longitude.': 'ファイル内の利用可能なすべてのメタデータ(タイムスタンプ、作成者、緯度経度等)を自動的に読み込みます。', 'Any comments about this sync partner.': 'データの同期先に関するコメント', 'Apparent Age': '年齢(外見)', 'Apparent Gender': '性別(外見)', 'Application Permissions': 'アプリケーションに対する権限', 'Application': '申請', 'Applications': 'アプリケーション', 'Appropriate clothing available': '適切な衣料が利用可能である', 'Appropriate cooking equipment/materials in HH': '世帯内にて適切な調理器具/食材が利用可能である', 'Approved': '承認されました', 'Approver': '承認者', 'Approx. number of cases/48h': '事象の発生概数/48h', 'Approximately how many children under 5 with diarrhea in the past 48 hours?': '過去48時間以内に発生した、5歳未満小児の下痢症状発生件数を記載してください。概数でかまいません', 'Archive not Delete': 'Archiveを削除しない', 'Arctic Outflow': '北極気団の南下', 'Are basic medical supplies available for health services since the disaster?': '災害発生後、基本的な医療行為を行えるよう、ヘルスサービスに対して供給があったかどうかを記載します', 'Are breast milk substitutes being used here since the disaster?': '災害発生後、母乳代替品が使われているかどうかを記載します', 'Are the areas that children, older people, and people with disabilities live in, play in and walk through on a daily basis physically safe?': '日中時間帯、この地域での生活や遊び、通行によって、未成年や高齢者、障碍者に肉体的な危害が及ぶ可能性があるかを記載します', 'Are the chronically ill receiving sufficient care and assistance?': '慢性病の罹患者に対して、十分なケアと介護が行われているかを記載します', 'Are there adults living in prisons in this area?': 'この地域で刑務所に収容されている成人がいるかどうかを記載してください', 'Are there alternative places for studying?': '学校以外に学習を行える場所があるかどうかを記載してください', 'Are there cases of diarrhea among children under the age of 5?': '5歳未満の幼児に下痢症状が発生しているかどうかを記載してください', 'Are there children living in adult prisons in this area?': 'この地域で、成人用刑務所に収容されている未成年がいるかどうかを記載してください', 'Are there children living in boarding schools in this area?': 'この地域で、寄宿舎に居住している未成年がいるかどうかを記載してください', 'Are there children living in homes for disabled children in this area?': 'この地域で、障がいのある子供の世話をするために家にいる未成年がいるかどうかを記載してください', 'Are there children living in juvenile detention in this area?': 'この地域で、少年院に収容されている未成年がいるかどうかを記載してください', 'Are there children living in orphanages in this area?': 'この地域で、孤児となった子供は居ますか?', 'Are there children with chronical illnesses in your community?': '慢性疾患をもった子どもが共同体の中にいるかどうかを記載してください', 'Are there health services functioning for the community since the disaster?': '災害発生後、共同体で医療サービスが機能しているかどうかを記載してください', 'Are there older people living in care homes in this area?': 'この地域で、介護施設に居住している高齢者がいるかどうかを記載してください', 'Are there older people with chronical illnesses in your community?': 'この共同体のなかで、慢性疾患を患っている高齢者がいるかどうかを記載してください', 'Are there people with chronical illnesses in your community?': 'この共同体の中で、慢性疾患を患っている人物がいるかどうかを記載してください', 'Are there separate latrines for women and men available?': 'トイレが男女別になっているかどうかを記載してください', 'Are there staff present and caring for the residents in these institutions?': 'これら施設の居住者に対して、ケアと介護を行えるスタッフが存在するかどうかを記載してください', 'Area': 'エリア', 'Areas inspected': '調査済み地域', 'Assessment Details': 'アセスメントの詳細', 'Assessment Reported': 'アセスメントを報告しました', 'Assessment Summaries': 'アセスメントの要約', 'Assessment Summary Details': 'アセスメント要約の詳細', 'Assessment Summary added': 'アセスメントの要約を追加しました', 'Assessment Summary deleted': 'アセスメントの要約を削除しました', 'Assessment Summary updated': 'アセスメントの要約を更新しました', 'Assessment Type': 'アセスメントタイプ', 'Assessment added': 'アセスメントを追加しました', 'Assessment admin level': 'アセスメントの管理レベル', 'Assessment and Activities Gap Analysis Map': 'アセスメントと活動のギャップについての解析マップ', 'Assessment and Activities Gap Analysis Report': 'アセスメントと支援活動のギャップ解析レポート', 'Assessment deleted': 'アセスメントを削除しました', 'Assessment timeline': 'アセスメントタイムライン', 'Assessment updated': 'アセスメントを更新しました', 'Assessment': 'アセスメント', 'Assessments Needs vs. Activities': '需要アセスメントと支援活動のギャップ', 'Assessments and Activities': 'アセスメントと支援活動', 'Assessments are shown as green, yellow, orange, red.': 'アセスメントは、緑・黄・オレンジ・赤のいずれかの色で表されます。', 'Assessments are structured reports done by Professional Organizations - data includes WFP Assessments': 'アセスメントとは、専門団体によって作成された調査文書のことを指します。データには、WFP(国連世界食糧計画)アセスメントも含まれます', 'Assessments are structured reports done by Professional Organizations': 'アセスメントとは、専門団体によって作成された調査文書のことを指します。', 'Assessments': 'アセスメント', 'Assessments:': 'アセスメント:', 'Assessor': '査定実施者', 'Asset Assigned': '資産割り当て', 'Asset Assignment Details': '資産割り当ての詳細', 'Asset Assignments deleted': '資産の割り当てを削除しました', 'Asset Assignments updated': '物資割り当てを更新しました', 'Asset Assignments': '資産割り当て', 'Asset Details': '資産の詳細', 'Asset Management': '資産管理', 'Asset Number': '資産番号', 'Asset added': '資産を追加しました', 'Asset deleted': '資産を削除しました', 'Asset updated': '資産を更新しました', 'Asset': '資産', 'Assets': '資産', 'Assign Asset': '資産割り当て', 'Assign Storage Location': '蓄積地点の割り当て', 'Assign to Org.': '組織に割り当て', 'Assigned To': '担当者', 'Assigned to': '担当者', 'Assigned': '割り当てられた', 'Assignments': '割り当て', 'Assistance for immediate repair/reconstruction of houses': '緊急の修理/家屋復旧の手伝い', 'Assistant': 'アシスタント', 'At/Visited Location (not virtual)': '実際に訪問した/訪問中のロケーション', 'Attend to information sources as described in <instruction>': '<instruction>に記載されている情報ソースへの参加', 'Attribution': '属性', 'Audit Read': '監査報告書の読み込み', 'Audit Write': '監査報告書の書き込み', 'Author': '作者', 'Automotive': '車両', 'Availability': 'ボランティア期間', 'Available Alternative Inventory Items': '利用可能な他の物資', 'Available Beds': '利用可能なベッド数', 'Available Inventory Items': '利用可能な倉庫内の物資', 'Available Messages': '利用可能なメッセージ', 'Available Records': '利用可能なレコード', 'Available databases and tables': '利用可能なデータベースおよびテーブル', 'Available for Location': '活動可能な地域', 'Available from': 'ボランティア開始日', 'Available in Viewer?': 'ビューワ内で利用可能?', 'Available until': 'ボランティア終了日', 'Availablity': '活動期間', 'Avalanche': '雪崩', 'Avoid the subject event as per the <instruction>': '<instruction>に従って対象の事象を避ける', 'Babies who are not being breastfed, what are they being fed on?': '乳児に対して母乳が与えられない場合、どうやって乳幼児の食事を確保しますか?', 'Baby And Child Care': '乳幼児へのケア', 'Background Color for Text blocks': 'テキストブロックの背景色', 'Background Color': '背景色', 'Bahai': 'バハイ', 'Baldness': '禿部', 'Balochi': 'バロチ語', 'Banana': 'バナナ', 'Bank/micro finance': '銀行/マイクロファイナンス', 'Barricades are needed': 'バリケードが必要', 'Base Layer?': '基本レイヤ?', 'Base Layers': '基本レイヤ', 'Base Location': '基本となるロケーション', 'Base Unit': '基本単位', 'Baseline Number of Beds': '平常時のベッド設置数', 'Baseline Type Details': '基準値タイプの詳細', 'Baseline Type added': '基準値タイプを追加しました', 'Baseline Type deleted': '基準値のタイプを削除しました', 'Baseline Type updated': '基準値タイプを更新しました', 'Baseline Type': '基準値タイプ', 'Baseline Types': '基準値の種類', 'Baseline added': '基準値を追加しました', 'Baseline deleted': '基準値を削除しました', 'Baseline number of beds of that type in this unit.': 'この施設における、通常状態のベッド収容数です。', 'Baseline updated': '基準値を更新しました', 'Baselines Details': '基準値の詳細', 'Baselines': '基準値', 'Basic Assess.': '基本アセスメント', 'Basic Assessment Reported': 'ベーシック・アセスメントを報告しました', 'Basic Assessment': '基本アセスメント', 'Basic Details': '基本情報', 'Basic information on the requests and donations, such as category, the units, contact details and the status.': '支援要請と寄付に関する基本情報です。カテゴリ、単位、連絡先詳細および状態等が記載されています。', 'Basic medical supplies available prior to disaster': '災害発生以前 基本的な医療行為の提供', 'Basic medical supplies available since disaster': '災害発生後  基本的な医療行為の提供', 'Basic reports on the Shelter and drill-down by region': '避難所の基本レポートと、地域による絞り込み', 'Basic': '基本', 'Baud rate to use for your modem - The default is safe for most cases': 'モデムを使用するためのボーレートです。大抵の場合はデフォルトが安全です。', 'Baud': 'ボー値', 'Beam': '梁', 'Bed Capacity per Unit': '施設ごとのベッド最大収容数', 'Bed Capacity': 'ベッド最大収容数', 'Bed Type': 'ベッド種別', 'Bed type already registered': 'ベッドのタイプは既に登録済みです。', 'Bedding materials available': '寝具が利用可能である', 'Below ground level': '地下', 'Beneficiary Type': '受益者タイプ', 'Biological Hazard': '生物災害', 'Biscuits': 'ビスケット', 'Blizzard': '吹雪', 'Blood Type (AB0)': '血液型 (AB0式)', 'Blowing Snow': '地吹雪', 'Boat': 'ボート', 'Bodies found': '未回収の遺体', 'Bodies recovered': '回収済みの遺体', 'Body Recovery Reports': '遺体回収レポート', 'Body Recovery Request': '遺体回収の要請', 'Body Recovery Requests': '遺体回収の要請', 'Body': '本文', 'Bomb Explosion': '爆発が発生', 'Bomb Threat': '爆発の危険性', 'Bomb': '爆発物', 'Border Color for Text blocks': 'テキストブロックの枠色', 'Bounding Box Insets': '領域を指定した枠組みへ差し込む', 'Bounding Box Size': '領域を指定した枠組みのサイズ', 'Boys 13-18 yrs in affected area': '影響地域内の13-18歳の男子数', 'Boys 13-18 yrs not attending school': '学校に来ていなかった13-18歳の男子数', 'Boys 6-12 yrs in affected area': '影響地域内の6-12歳の男子数', 'Boys 6-12 yrs not attending school': '学校に来ていなかった6-12歳の男子数', 'Brand Details': '銘柄の詳細', 'Brand added': '銘柄を追加しました', 'Brand deleted': '銘柄が削除されました', 'Brand updated': '銘柄が更新されました', 'Brand': '銘柄', 'Brands': '銘柄', 'Breast milk substitutes in use since disaster': '災害発生後から母乳代替品を使用している', 'Breast milk substitutes used prior to disaster': '災害前から母乳代替品を使用していた', 'Bricks': 'レンガ', 'Bridge Closed': '橋梁(通行止め)', 'Bucket': 'バケツ', 'Buddhist': '仏教徒', 'Budget Details': '予算の詳細', 'Budget Updated': '予算を更新しました', 'Budget added': '予算を追加しました', 'Budget deleted': '予算を削除しました', 'Budget updated': '予算を更新しました', 'Budget': '予算', 'Budgeting Module': '予算編成モジュール', 'Budgets': '予算編成', 'Buffer': 'バッファ', 'Bug': 'バグ', 'Building Aide': '建設援助', 'Building Assessment': '建物のアセスメント', 'Building Assessments': '建築物アセスメント', 'Building Collapsed': '崩壊した建物', 'Building Name': '建物名', 'Building Safety Assessments': '建物の安全アセスメント', 'Building Short Name/Business Name': '建物の名前 / 会社名', 'Building or storey leaning': '建物または階層が傾いている', 'Built using the Template agreed by a group of NGOs working together as the': '例えばECB等、多くのNGOによって利用されている形式を使っての記録が可能です。', 'Bulk Uploader': 'まとめてアップロード', 'Bundle Contents': '小包の内容', 'Bundle Details': 'Bundleの詳細', 'Bundle Updated': 'バンドルを更新しました', 'Bundle added': 'バンドルを追加しました', 'Bundle deleted': 'バンドルを削除しました', 'Bundle updated': 'バンドル・セットを更新しました', 'Bundle': 'バンドル', 'Bundles': 'バンドル', 'Burn ICU': '熱傷 ICU', 'Burn': '火傷(やけど)', 'Burned/charred': '火傷/炭化', 'Business damaged': 'ビジネスへの損害が発生している', 'By Inventory': '物資の送付元', 'By Person': '人物ごと', 'By Site': 'サイト別', 'By Warehouse': '送付元倉庫', 'CBA Women': 'CBA 女性', 'CN': '貨物運送状', 'CSS file %s not writable - unable to apply theme!': 'CSS ファイル %s が書き込み不可になっているため、テーマを適用することができません。', 'Calculate': '計算', 'Camp Coordination/Management': '仮泊施設間の調整 / 管理', 'Camp': '仮泊施設', 'Can only disable 1 record at a time!': '一度に1つしか無効にできません!', 'Can users register themselves for authenticated login access?': '新規ユーザが、他者の承認なしに自分を新規ユーザとして登録できるか?', 'Cancel Add': '追加を取り消す', 'Cancel Shipment': '輸送をキャンセルする', 'Cancel': 'キャンセル', 'Canceled': 'キャンセル', 'Candidate Matches for Body %s': 'Bodyに適合した候補者は %s', 'Canned Fish': '魚の缶詰', 'Cannot be empty': '必ず入力してください。', 'Cannot delete whilst there are linked records. Please delete linked records first.': 'リンクされたレコードがあるので削除できません。このレコードよりも先に、リンク先のレコードを削除してください。', 'Cannot disable your own account!': '自分自身のアカウントを無効にする事はできません', 'Capacity (Max Persons)': '収容可能数 (最大人数)', 'Capacity (W x D X H)': '収容可能面積 (W x D X H)', 'Capture Information on Disaster Victim groups (Tourists, Passengers, Families, etc.)': '被災者の個々のグループについて、情報を取得する (ツアー旅行者、滞在者、家族、など)', 'Capture Information on each disaster victim': '被災者情報を個別に把握する', 'Capturing organizational information of a relief organization and all the projects they have in the region': '個々の支援団体と、地域内で実行中の全てのプロジェクトを取得します', 'Capturing the essential services each Volunteer is providing and where': '各ボランティアの居場所と、提供している主要なサービスを取得する', 'Capturing the projects each organization is providing and where': '各団体の所在地と、提供している主要なサービスを取得します', 'Cardiology': '心臓病学', 'Cash available to restart business': '事業再開に必要な資金調達が可能', 'Cassava': 'キャッサバ', 'Casual Labor': '一般労働', 'Casualties': '犠牲者', 'Catalog Item added': '救援物資カタログにアイテムを追加しました', 'Catalog Item deleted': 'カタログアイテムを削除しました', 'Catalog Item updated': '救援物資カタログを更新しました', 'Catalog Item': '救援物資カタログ', 'Catalog Items': '物資カタログ', 'Catalog Name': 'カタログ名', 'Catalog': 'カタログ', 'Category': 'カテゴリ', 'Category<>Sub-Category<>Catalog Relation added': 'Category<>Sub-Category<>Catalog 間の関係を追加しました', 'Category<>Sub-Category<>Catalog Relation deleted': 'Category<>Sub-Category<>Catalog 関係を削除しました', 'Category<>Sub-Category<>Catalog Relation updated': 'Category<>Sub-Category<>Catalog 間の関係を更新しました', 'Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog 間の関係', 'Ceilings, light fixtures': '天井、照明あり', 'Central point to record details on People': '被災者や支援者など、関係者情報の集積を行ないます', 'Certificate Status': '認証状態', 'Certification': '有資格者', 'Change Password': 'パスワードの変更', 'Check for errors in the URL, maybe the address was mistyped.': '入力したURLに間違いがないか確認してください。', 'Check if the URL is pointing to a directory instead of a webpage.': 'URLがウェブページではなくディレクトリを指定しているか、確認してください。', 'Check outbox for the message status': '送信箱を調べてメッセージステータスを確認する', 'Check to delete': '削除項目にチェック', 'Check to delete:': '削除項目にチェック:', 'Check': '確認', 'Check-In': 'チェックイン', 'Check-Out': 'チェックアウト', 'Check-in': 'チェックイン', 'Check-out': 'チェックアウト', 'Checklist created': 'チェックリストを作成しました', 'Checklist deleted': 'チェックリストを削除しました', 'Checklist of Operations': '作業項目チェックリスト', 'Checklist updated': 'チェックリストを更新しました', 'Checklist': 'チェックリスト', 'Chemical Hazard': '化学災害', 'Chemical, Biological, Radiological, Nuclear or High-Yield Explosive threat or attack': '兵器による攻撃、脅威(化学兵器、生物兵器、放射能汚染、核兵器、高威力の爆発)', 'Chicken': 'ニワトリ', 'Child (2-11)': '子供 (2-11歳)', 'Child (< 18 yrs)': '子供 (18歳未満)', 'Child Abduction Emergency': '未成年誘拐警報', 'Child headed households (<18 yrs)': '代表者が未成年 (18歳以下)の世帯数', 'Child': '子供', 'Children (2-5 years)': '子供たち (2-5歳)', 'Children (5-15 years)': '子供たち(5-15歳)', 'Children (< 2 years)': '子供たち (2歳未満)', 'Children in adult prisons': '成人用刑務所に未成年がいる', 'Children in boarding schools': '寄宿制学校の児童がいる', 'Children in homes for disabled children': '障がい児施設にいる子ども', 'Children in juvenile detention': '少年院収容者がいる', 'Children in orphanages': '身寄りの無い人がいる', 'Children living on their own (without adults)': '未成年のみで自活(成人無し)', 'Children not enrolled in new school': '新しい学校に入学していない子供', 'Children orphaned by the disaster': '被災のため孤児になった子供たち', 'Children separated from their parents/caregivers': '親(または親相当の後見人)とはぐれた子供の数', 'Children that have been sent to safe places': '安全な地域へ疎開済みの子供数', 'Children who have disappeared since the disaster': '災害発生後に行方不明の子供たち', 'Children with chronical illnesses': '慢性疾患をもつ子供がいる', 'Chinese (Taiwan)': '中国語 (台湾繁体字)', 'Cholera Treatment Capability': 'コレラ治療対応能力', 'Cholera Treatment Center': 'コレラ治療センター', 'Cholera Treatment': 'コレラの治療', 'Cholera-Treatment-Center': 'コレラ治療センター', 'Choose a new posting based on the new evaluation and team judgement. Severe conditions affecting the whole building are grounds for an UNSAFE posting. Localised Severe and overall Moderate conditions may require a RESTRICTED USE. Place INSPECTED placard at main entrance. Post all other placards at every significant entrance.': '新規の評価とチームの判定に基づいた新しいポスターを選択してください。建物全体が深刻な状態の場合「危険」を、一部は使える場合「制限あり」です。主要な出入口に「調査済み」プラカードを設置してください。全ての使用可能な出入口には他のプラカードを設置してください。', 'Choose': '選択', 'Choosing Skill and Resources of Volunteers': 'ボランティアのスキルとリソースを選択してください', 'Christian': 'キリスト教徒', 'Church': '教会', 'Circumstances of disappearance, other victims/witnesses who last saw the missing person alive.': '行方不明時の状況や、この人物の生存を最後に確認した人物についての情報を記載してください。', 'Civil Emergency': '市民緊急事態', 'Cladding, glazing': '被覆・外壁、ガラス板', 'Clear Selection': '選択をクリア', 'Click on the link %(url)s to reset your password': 'リンクをクリックしてください %(url)s パスワードのリセット', 'Click on the link %(url)s to verify your email': 'リンクをクリックしてください %(url)s 登録されたメールアドレスに間違いが無いことが確認されます', 'Client IP': 'クライアントIP', 'Clinical Laboratory': '臨床検査', 'Clinical Operations': '診療の人員数', 'Clinical Status': '診療状況', 'Close map': '地図を閉じる', 'Closed': '閉鎖中', 'Closure': '閉鎖・通行止め', 'Clothing': '衣服', 'Cluster Details': 'クラスタの詳細', 'Cluster Distance': 'クラスタ距離', 'Cluster Subsector Details': 'クラスタのサブクラスタの詳細', 'Cluster Subsector added': 'クラスタのサブセクタを追加しました', 'Cluster Subsector deleted': 'クラスタのサブセクタを削除しました', 'Cluster Subsector updated': 'クラスタのサブセクタを更新しました', 'Cluster Subsector': 'クラスタのサブクラスタ', 'Cluster Subsectors': 'クラスタのサブセクタ', 'Cluster Threshold': 'クラスタのしきい値', 'Cluster added': 'クラスタを追加しました', 'Cluster deleted': 'クラスタを削除しました', 'Cluster updated': 'クラスタを更新しました', 'Cluster': 'クラスタ', 'Cluster(s)': 'クラスタ', 'Clusters': 'クラスタ', 'Code': 'プロジェクトコード', 'Cold Wave': '寒波', 'Collapse, partial collapse, off foundation': '全壊、一部損壊、off foundation', 'Collective center': '収集センター', 'Color for Underline of Subheadings': 'サブヘッダのアンダーラインの色', 'Color of Buttons when hovering': 'ホバー時のボタンの色', 'Color of bottom of Buttons when not pressed': '押されなかった時のボタンの下部の色', 'Color of bottom of Buttons when pressed': 'ボタン押下時の下部の色', 'Color of dropdown menus': 'ドロップダウンメニューの色', 'Color of selected Input fields': '選択中の入力フィールドの色', 'Color of selected menu items': '選択中のメニューアイテムの色', 'Column Choices (One Per Line': 'カラム選択 (一行に一つ', 'Columns, pilasters, corbels': '円柱、付け柱、コーベル', 'Combined Method': '複数証跡の組み合わせ', 'Come back later. Everyone visiting this site is probably experiencing the same problem as you.': '復旧まで少々お待ちください。あなた以外の閲覧者にも、この表示がされています。', 'Come back later.': '復旧まで少々お待ちください', 'Comments': 'コメント', 'Commercial/Offices': '商業 / オフィス', 'Commit Date': '受け入れ日', 'Commit from %s': '%sからのコミット', 'Commit': 'コミット', 'Commit Status': '支援の引き受け状況', 'Commiting a changed spreadsheet to the database': '変更後のスプレッドシートをデータベースに反映します', 'Commitment Added': 'コミットメントを追加しました', 'Commitment Canceled': 'コミットをキャンセルしました', 'Commitment Details': 'コミットの詳細', 'Commitment Item Details': 'コミットされた救援物資の詳細', 'Commitment Item added': 'コミットの物資を追加しました', 'Commitment Item deleted': 'コミットされた救援物資を削除しました', 'Commitment Item updated': 'コミット物資を更新しました', 'Commitment Item': '物資のコミットメント', 'Commitment Items': 'コミットされた物資', 'Commitment Status': '支援の引き受け状況', 'Commitment Updated': 'コミットを更新しました', 'Commitment': 'コミットメント', 'Commitments': 'コミット', 'Committed By': '受け入れ団体/人', 'Committed': 'コミット済み', 'Committing Inventory': '引き受け中の倉庫', 'Communication problems': 'コミュニケーションの問題', 'Community Centre': 'コミュニティセンター', 'Community Health Center': '地域の医療センター', 'Community Member': 'コミュニティの構成員', 'Complete Unit Label for e.g. meter for m.': '単位を表すラベル。例えばメートルなら m など。', 'Complete': '完了', 'Completed': '完了', 'Complexion': '人種、肌色', 'Compose': 'メッセージ作成', 'Compromised': '易感染状態', 'Concrete frame': 'コンクリートのフレーム', 'Concrete shear wall': 'コンクリートせん断壁', 'Config added': '設定を追加しました', 'Config deleted': '設定を削除しました', 'Config updated': '設定を更新しました', 'Config': '設定', 'Configs': '設定', 'Configurations': '設定', 'Configure Run-time Settings': 'ランタイムの設定', 'Confirm Shipment Received': '配送物の受領を確認', 'Confirmed Incidents': '確認済みのインシデント', 'Confirmed': '確認済み', 'Conflict Details': 'コンフリクトの詳細', 'Conflict Resolution': 'データ競合の解決', 'Consignment Note': '出荷通知', 'Constraints Only': '制約のみ', 'Consumable': '消耗品', 'Contact Data': '連絡先データ', 'Contact Details': '連絡先の詳細', 'Contact Information Added': '連絡先情報を追加しました', 'Contact Information Deleted': '連絡先情報を削除しました', 'Contact Information Updated': '連絡先情報を更新しました', 'Contact Information': '連絡先情報', 'Contact Method': '問い合わせ方法', 'Contact Name': '連絡先名', 'Contact Person': '窓口担当者', 'Contact Phone': '連絡先電話番号', 'Contact details': '連絡先の詳細', 'Contact information added': '連絡先情報を追加しました', 'Contact information deleted': '連絡先情報を削除しました', 'Contact information updated': '連絡先情報を更新しました', 'Contact person in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': '詳細事項の質問や連絡を行なう際の連絡担当者を記載します(レポート報告者と異なる場合のみ)。電話番号、住所、電子メールなどを記載してください。', 'Contact person(s) in case of news or further questions (if different from reporting person). Include telephone number, address and email as available.': '情報伝達や追加質問を行う際の代表担当者(報告者と異なる場合のみ記載してください)。電話番号や住所、メールアドレスなどを指定できます。', 'Contact us': '問い合わせ', 'Contact': '連絡先', 'Contacts': '連絡先', 'Contents': '内容', 'Contradictory values!': '値が矛盾しています!', 'Contributor': '投稿者', 'Conversion Tool': '変換ツール', 'Cooking NFIs': '調理用器具', 'Cooking Oil': '調理油', 'Coordinate Conversion': '座標変換', 'Coping Activities': '一時対応活動', 'Copy any data from the one to be deleted into the one to keep': '削除する側の候補地から残す方の候補地へ、必要なデータを転載します。', 'Copy': 'コピー', 'Corn': 'とうもろこし', 'Cost Type': '料金種別', 'Cost per Megabyte': '1メガバイト毎に課金', 'Cost per Minute': '1分毎に課金', 'Country of Residence': '居住国', 'Country': '国', 'Create & manage Distribution groups to receive Alerts': 'アラートの送付先グループを作成・管理する', 'Create Activity Report': '支援活動レポートを追加', 'Create Activity Type': '支援活動タイプを追加', 'Create Activity': '支援活動を追加', 'Create Assessment': 'アセスメントを新規追加', 'Create Asset': '資産の追加', 'Create Bed Type': 'ベッドの種類を追加', 'Create Brand': '銘柄を追加', 'Create Budget': '予算を追加', 'Create Catalog Item': '物資カタログを追加', 'Create Catalog': 'カタログを追加', 'Create Checklist': 'チェックリストの作成', 'Create Cholera Treatment Capability Information': 'コレラ治療能力に関する情報の追加', 'Create Cluster Subsector': 'クラスタのサブセクタを追加', 'Create Cluster': 'クラスタを追加', 'Create Contact': '連絡先を追加', 'Create Dead Body Report': '遺体発見レポートを追加', 'Create Feature Layer': 'Feature Layerを追加', 'Create Group Entry': 'グループエントリの作成', 'Create Group': 'グループを追加', 'Create Hospital': '病院を新規追加', 'Create Identification Report': 'IDレポートを追加', 'Create Impact Assessment': '災害影響範囲アセスメントの作成', 'Create Import Job': 'Import Jobの作成', 'Create Incident Report': 'インシデントレポートを追加', 'Create Incident': 'インシデントを追加', 'Create Item Category': '物資カテゴリを追加', 'Create Item Pack': '救援物資パックの追加', 'Create Item': '救援物資を新規追加', 'Create Kit': 'キットを新規追加', 'Create Layer': 'レイヤを追加', 'Create Location': 'ロケーションを追加', 'Create Map Profile': '地図設定を追加', 'Create Marker': 'マーカーを追加', 'Create Member': 'メンバを追加', 'Create Mobile Impact Assessment': '災害影響範囲アセスメントをモバイル端末から作成', 'Create Office': 'オフィスを追加', 'Create Organization': '団体を追加', 'Create Personal Effects': 'Personal Effectsを追加', 'Create Project': 'プロジェクトを追加', 'Create Projection': '地図投影法を追加', 'Create Rapid Assessment': '被災地の現況アセスメントを作成', 'Create Report': 'レポートを新規追加', 'Create Request': '支援要請を作成', 'Create Resource': 'リソースを追加', 'Create River': '河川情報を追加', 'Create Role': '役割を追加', 'Create Sector': '活動分野を追加', 'Create Service Profile': 'サービスプロファイルを追加', 'Create Shelter Service': '避難所における提供サービスを追加', 'Create Shelter Type': '避難所タイプを追加', 'Create Shelter': '避難所を追加', 'Create Skill Type': 'スキルタイプを追加', 'Create Skill': 'スキルを追加', 'Create Status': '状況を追加', 'Create Task': 'タスクを追加', 'Create Theme': 'テーマを追加', 'Create User': 'ユーザを追加', 'Create Volunteer': 'ボランティアの追加', 'Create Warehouse': '倉庫を追加', 'Create a Person': '人物情報を追加', 'Create a group entry in the registry.': '登録にグループエントリを作成。', 'Create, enter, and manage surveys.': '調査の作成、入力、管理を実施', 'Creation of Surveys': '聞き取り調査の新規作成', 'Credential Details': '証明書の詳細', 'Credential added': '証明書を追加しました', 'Credential deleted': '証明書を削除しました', 'Credential updated': '証明書を更新しました', 'Credentials': '証明書', 'Crime': '犯罪', 'Criteria': '基準', 'Currency': '通貨', 'Current Group Members': '現在のグループメンバ', 'Current Identities': '現在のID', 'Current Location': '現在のロケーション', 'Current Log Entries': '現在のログエントリ', 'Current Memberships': '現在のメンバシップ', 'Current Notes': '現在選択中の追加情報', 'Current Registrations': '現在の登録', 'Current Status': '現在の状況', 'Current Team Members': '現在のチームメンバ', 'Current Twitter account': '現在のTwitterアカウント', 'Current community priorities': '現在のコミュニティの優先順位', 'Current general needs': '現在の需要', 'Current greatest needs of vulnerable groups': '現在、被災者が最も必要としている物資/サービス', 'Current health problems': '現在の健康問題', 'Current main income sources': '現在の主な収入源', 'Current major expenses': '現在の主な支出項目', 'Current number of patients': '現在の患者数', 'Current problems, categories': '現在の問題、カテゴリ', 'Current problems, details': '現在の問題の詳細', 'Current request': '現在の要求', 'Current response': '現在の対応状況', 'Current session': '現在のセッション', 'Current type of health problems, adults': '現在発生中の健康問題(成人)', 'Current type of health problems, children': '現在発生中の健康問題(小児)', 'Current type of source for drinking water': '現在の飲料水確保方法', 'Current type of source for sanitary water': '現在の生活用水確保方法', 'Custom Database Resource (e.g., anything defined as a resource in Sahana)': 'カストマイズされたデータベースのリソース (例:Sahana 内のリソースとして定義された物)', 'Customisable category of aid': 'カスタマイズ可能な支援カテゴリ', 'DC': '寄付の証明(Donation Certificate)', 'DECISION': '決定', 'DNA Profile': 'DNAプロファイル', 'DNA Profiling': 'DNAプロファイリング', 'DVI Navigator': '被災者の検索', 'Daily': '日次', 'Dam Overflow': 'ダム決壊', 'Damage': '損傷', 'Dangerous Person': '危険人物', 'Dashboard': 'ダッシュボード', 'Data import policy': 'データのインポートポリシー', 'Data uploaded': 'データがアップロードされました', 'Database': 'データベース', 'Date & Time': '日付と時刻', 'Date Avaialble': '日付あり', 'Date Available': '可能な日付', 'Date Received': '物資受領日', 'Date Requested': '要請した日', 'Date Required': '物資が必要になる日', 'Date Sent': '送付日', 'Date and Time of Goods receipt. By default shows the current time but can be modified by editing in the drop down list.': '物資を受領した日時を記録します。デフォルトでは現在の時間が入力されます。変更するには、ドロップダウンリストから選択してください。', 'Date and Time': '日付と時刻', 'Date and time this report relates to.': 'このレポートに関連する日付と時刻', 'Date of Birth': '生年月日', 'Date of Latest Information on Beneficiaries Reached': '恩恵を受ける人にたどり着いた最新の情報の日付', 'Date of Report': 'レポートの日付', 'Date': '日付', 'Date/Time of Find': '日付/発見日時', 'Date/Time of disappearance': '行方不明になった日付/時刻', 'Date/Time': '日付/時刻', 'De-duplicator': '重複解消機能', 'Dead Body Details': '遺体の詳細', 'Dead Body Reports': '遺体情報レポート', 'Dead Body': '遺体の管理', 'Dead body report added': '遺体発見レポートを追加しました', 'Dead body report deleted': '遺体報告を削除しました', 'Dead body report updated': '遺体レポートを更新しました', 'Deaths in the past 24h': '過去24時間の死者', 'Deaths/24hrs': '死亡者数/24h', 'Debug': 'デバッグ', 'Deceased': '死亡', 'Decimal Degrees': '十進角', 'Decomposed': '腐乱', 'Default Height of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'マップウィンドウのデフォルトの縦高。ウィンドウレイアウトでは、マップはウィンドウ全体に最大化されるので、大きな値を設定する必要はありません。', 'Default Height of the map window.': '地図ウィンドウの初期の高さ', 'Default Marker': 'デフォルトマーカー', 'Default Width of the map window. In Window layout the map maximises to fill the window, so no need to set a large value here.': 'マップウィンドウのデフォルトの幅。ウィンドウレイアウトでは、マップはウィンドウ全体に最大化されるので、大きな値を設定する必要はありません。', 'Default Width of the map window.': '地図ウィンドウの幅の初期値', 'Default synchronization policy': 'データ同期ポリシーのデフォルト設定', 'Defaults updated': 'デフォルト値を更新しました', 'Defaults': 'デフォルト値', 'Defecation area for animals': '動物排便用の地域', 'Defines the icon used for display of features on handheld GPS.': 'ハンドヘルドGPSに表示するアイコンを決定します。', 'Defines the icon used for display of features on interactive map & KML exports. A Marker assigned to an individual Location is set if there is a need to override the Marker assigned to the Feature Class. If neither are defined, then the Default Marker is used.': '対話型地図および KML の出力上で Feature の表示に使用するアイコンを定義します。Feature Class に割り当てられたマーカーを上書きする必要がある場合、個々の場所に割り当てられたマーカーが設定されます。どちらも定義されていない場合は、デフォルトのマーカーが使用されます。', 'Defines the icon used for display of features on interactive map & KML exports.': 'インタラクティブマップとKMLエクスポートで建物などの表示に使われるアイコン定義', 'Defines the marker used for display & the attributes visible in the popup.': 'ポップアップ時と通常時に表示されるマーカーを指定してください。', 'Degrees must be a number between -180 and 180': '度数は -180 から 180 の間にしてください。', 'Dehydration': '脱水症状', 'Delete Aid Request': '援助要請を削除', 'Delete Alternative Item': '代わりの物資を削除する', 'Delete Assessment Summary': 'アセスメントの要約を削除', 'Delete Assessment': 'アセスメントを削除', 'Delete Asset Assignments': '資産割り当ての削除', 'Delete Asset': '資産の削除', 'Delete Baseline Type': '基準値タイプを削除', 'Delete Baseline': '基準値を削除', 'Delete Brand': 'ブランドを削除してください', 'Delete Budget': '予算を削除', 'Delete Bundle': 'Bundleを削除', 'Delete Catalog Item': '救援物資カタログを削除', 'Delete Cluster Subsector': 'クラスタのサブクラスタを削除', 'Delete Cluster': 'クラスタを削除', 'Delete Commitment Item': 'コミットした物資の削除', 'Delete Commitment': 'コミットメントの削除', 'Delete Config': '設定を削除', 'Delete Contact Information': '連絡先情報の削除', 'Delete Credential': '証明書の削除', 'Delete Distribution Item': '配給物資を削除', 'Delete Distribution': '配給所を削除', 'Delete Document': '文書を削除', 'Delete Donor': '資金提供組織を削除', 'Delete Entry': 'エントリを削除', 'Delete Feature Layer': '機能レイヤを削除', 'Delete Group': 'グループを削除', 'Delete Hospital': '病院を削除', 'Delete Image': '画像を削除', 'Delete Impact Type': '影響範囲のタイプを削除', 'Delete Impact': '影響範囲の削除', 'Delete Incident Report': 'インシデントレポートを削除', 'Delete Incident': 'インシデントを削除', 'Delete Inventory Item': '備蓄物資を削除', 'Delete Inventory Store': '物資集積地点を削除', 'Delete Item Category': 'アイテムカテゴリを削除', 'Delete Item Pack': '救援物資パックの削除', 'Delete Item': '救援物資を削除', 'Delete Key': 'Keyを削除', 'Delete Kit': 'Kitを削除', 'Delete Layer': 'レイヤーを削除', 'Delete Level 1 Assessment': 'レベル1アセスメントの削除', 'Delete Level 2 Assessment': 'レベル2アセスメントの削除', 'Delete Location': 'ロケーションを削除', 'Delete Map Profile': '地図設定を削除', 'Delete Marker': 'マーカーを削除', 'Delete Membership': 'メンバシップを削除', 'Delete Message': 'メッセージを削除', 'Delete Metadata': 'メタデータを削除', 'Delete Need Type': '需要タイプを削除', 'Delete Need': '要求を削除', 'Delete Office': 'オフィスを削除', 'Delete Old': '古いものを削除', 'Delete Organization': '団体情報を削除', 'Delete Peer': 'データ同期先の削除', 'Delete Person': '人物情報を削除', 'Delete Photo': '写真を削除', 'Delete Project': 'プロジェクトを削除', 'Delete Projection': '地図投影法を削除', 'Delete Rapid Assessment': '被災地の現況アセスメントを削除', 'Delete Received Item': '受け取った物資の削除', 'Delete Received Shipment': '受け取った輸送の削除', 'Delete Record': 'レコードを削除', 'Delete Recovery Report': '遺体回収レポートを削除', 'Delete Report': 'レポートを削除', 'Delete Request Item': '物資の要請を削除', 'Delete Request': '支援要請を削除', 'Delete Resource': 'リソースを削除', 'Delete Section': 'Sectionを削除', 'Delete Sector': '活動分野を削除', 'Delete Sent Item': '送付物資を削除', 'Delete Sent Shipment': '輸送物資を削除', 'Delete Service Profile': 'サービスプロファイルを削除', 'Delete Setting': '設定を削除', 'Delete Skill Type': 'スキルタイプを削除', 'Delete Skill': 'スキルを削除', 'Delete Staff Type': 'スタッフタイプを削除', 'Delete Status': '状況を削除しました', 'Delete Subscription': '寄付申し込みを削除', 'Delete Survey Answer': '調査回答削除', 'Delete Survey Question': 'Survey Questionを削除', 'Delete Survey Section': '調査項目を削除', 'Delete Survey Series': '一連の調査を削除', 'Delete Survey Template': '調査用テンプレートを削除', 'Delete Unit': '単位を削除', 'Delete User': 'ユーザを削除', 'Delete Volunteer': 'ボランティアを削除', 'Delete Warehouse Item': '倉庫物資の削除', 'Delete Warehouse': '倉庫を削除', 'Delete from Server?': 'サーバから削除しますか?', 'Delete': '削除', 'Delivered': '配信済み', 'Delphi Decision Maker': 'Delphi意思決定', 'Demographic': '人口情報', 'Demonstrations': 'デモ発生', 'Dental Examination': '歯科検査', 'Dental Profile': '歯の欠損/治療跡', 'Department/Unit Name': '所属部課名', 'Deployment': '展開', 'Describe the condition of the roads to your hospital.': '道路状況|病院までの道路状況を記載してください', "Describe the procedure which this record relates to (e.g. 'medical examination')": 'このレコードに関連する手続きを説明してください。(例えば "検診" です。)', 'Description of Bin Type': 'Binタイプを記載してください', 'Description of Contacts': '連絡先の説明', 'Description of defecation area': '排泄用地についての補足説明', 'Description of drinking water source': '飲料水に関する補足説明', 'Description of sanitary water source': '生活用水に関する説明', 'Description of water source before the disaster': '災害発生前の水の確保方法について補足説明', 'Description': '説明', 'Descriptive Text (e.g., Prose, etc)': '説明文 (例: 文学、等)', 'Designated for': '指定済み', 'Desire to remain with family': '家族との残留を希望', 'Destination': '目的地', 'Detail': '詳細', 'Details': '詳細', 'Dialysis': '透析', 'Diaphragms, horizontal bracing': '仕切り板、水平部材', 'Diarrhea among children under 5': '5歳未満の幼児に下痢が蔓延している', 'Diarrhea': '下痢', 'Dignitary Visit': '要人の訪問', 'Dimensions of the storage bin. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': '物資備蓄スペースの容積。ドロップダウンリストから単位を選び、以下の形式にしたがって入力してください。 1 x 2 x 3 , 横幅 x 奥行き x 縦幅。', 'Dimensions of the storage location. Input in the following format 1 x 2 x 3 for width x depth x height followed by choosing the unit from the drop down list.': '物資備蓄スペースの容積。ドロップダウンリストから単位を選び、以下の形式にしたがって入力してください。 1 x 2 x 3 , 横幅 x 奥行き x 縦幅。', 'Direction': '方向', 'Disable': '無効', 'Disabled participating in coping activities': '障害者が災害対応に従事', 'Disabled': '無効', 'Disabled?': '無効になっているか?', 'Disaster Victim Identification': '被災者の同定', 'Disaster Victim Registry': '被災者登録', 'Disaster clean-up/repairs': '災害の清掃活動や修復', 'Discharge (cusecs)': '流水量 (cusecs)', 'Discharges/24hrs': '退院者数/24h', 'Discussion Forum on item': 'フォーラム(物資について)', 'Discussion Forum': 'フォーラム', 'Disease vectors': '病原媒介者', 'Dispatch Items': 'アイテムの発送', 'Dispatch': '発送', 'Dispensary': '診療所', 'Displaced Populations': '避難者数', 'Displaced': '避難中', 'Display Polygons?': '多角形を表示しますか?', 'Display Routes?': 'ルートを表示しますか?', 'Display Tracks?': 'Tracksを表示しますか?', 'Display Waypoints?': 'ウェイポイントを表示しますか?', 'Dispose Expired/Unusable Items': '期限切れ / 使用できない物資の処分', 'Dispose': '処分', 'Distance between defecation area and water source': '水資源採取場所と排泄場所の間の距離', 'Distance between latrines and temporary shelter in meters': 'トイレと避難所の距離(m)', 'Distance between shelter and latrines': '簡易避難所と排泄場所との間の距離(メートル)', 'Distance(Kms)': '距離(Kms)', 'Distribution Details': '配給所の詳細', 'Distribution Item Details': '配給物資の詳細', 'Distribution Item added': '配給物資を追加しました', 'Distribution Item deleted': '配給物資を削除しました', 'Distribution Item updated': '配給物資を更新しました', 'Distribution Item': '配給物資', 'Distribution Items': '配給物資', 'Distribution added': '配給所を追加しました', 'Distribution deleted': '配給所を削除しました', 'Distribution groups': '配信グループ', 'Distribution updated': '配給所を更新しました', 'Distribution': '配給所', 'Distributions': '配給所', 'District': '地区(行政地区)', 'Do adolescent and youth in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'あなたの地域の青年は、災害に対応するための支援活動に参加しますか?(例: 打ち合わせ、宗教活動、清掃活動ボランティアなど)', 'Do households each have at least 2 containers (10-20 litres each) to hold water?': '1つの世帯ごとに、少なくとも2つ以上の水貯蔵容器(10-20リットル/容器)があるかどうかを記載してください', 'Do households have appropriate equipment and materials to cook their food (stove, pots, dished plates, and a mug/drinking vessel, etc)?': '調理や食事に必要となる道具や器材(コンロ、ポット、皿やプレート、マグカップ、飲料容器など)が世帯に存在するかを記載します', 'Do households have bedding materials available (tarps, plastic mats, blankets)?': 'ベッド、あるいはベッド用部材(例:タープ、プラスチックマット、毛布)が世帯に存在するかを記載します', 'Do households have household water storage containers?': '水貯蔵容器が世帯に存在するかを記載します', 'Do minority members in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': '地域にいるマイノリティ(社会的少数者)の人が、自助的な災害対処につながる活動に参加しているか記載してください。(例 打ち合わせ、宗教活動、地域の清掃ボランティアなど)', 'Do older people in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': '災害復旧活動に従事している高齢者が、共同体の中にいるかどうかを記載してください(例: 打ち合わせ、宗教活動、清掃活動ボランティアなど)', 'Do people have at least 2 full sets of clothing (shirts, pants/sarong, underwear)?': '個人に対して、少なくとも2セット以上の衣服(シャツ、ズボン/腰巻、下着など)があるかどうか記載してください', 'Do people have reliable access to sufficient sanitation/hygiene items (bathing soap, laundry soap, shampoo, toothpaste and toothbrush)?': '十分な量のサニタリ / 衛生用品が、安定して供給されているかどうかを記載します(石鹸、シャンプー、歯ブラシ、洗濯用洗剤など)', 'Do people with disabilities in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'あなたの地域で障害者と一緒にいる方は、災害に対処るための彼らの支援活動に参加しますか?(例: 打ち合わせ、宗教活動、清掃活動ボランティアなど)', 'Do women and girls have easy access to sanitary materials?': '女性用生理用品の入手が容易かどうかを記載してください', 'Do women in your community participate in activities that help them cope with the disaster? (ex. meetings, religious activities, volunteer in the community clean-up, etc)': 'あなたの地域の女性は、災害対応のための支援活動に参加しますか?(例: 打ち合わせ、宗教活動、清掃活動ボランティアなど)', 'Do you have access to cash to restart your business?': 'ビジネス再開に必要な現金が入手可能かどうかを記載してください', 'Do you know of any incidents of violence?': '暴力事件が発生したかどうかを記載してください', 'Do you know of children living on their own (without adults)?': '成人がおらず、未成年のみで生活しているグループがあるかどうかを記載してください', 'Do you know of children separated from their parents or caregivers?': '親や養育者とはぐれた未成年がいるかどうかを記載してください', 'Do you know of children that have been orphaned by the disaster?': '災害によって孤児となった未成年がいるかどうかを記載してください', 'Do you know of children that have been sent to safe places?': '安全な場所に疎開した未成年がいるかどうかを記載してください', 'Do you know of children that have disappeared without explanation in the period since the disaster?': '災害発生後、行き先の説明ないまま連絡が取れなくなった未成年がいるかどうかを記載してください', 'Do you know of older people who are primary caregivers of children?': '未成年に対する介護経験がある高齢者がいるかどうかを記載してください', 'Do you know of parents/caregivers missing children?': '子供と連絡が取れなくなった親や養育者がいるかどうかを記載してください', 'Do you really want to delete these records?': '本当にこれらのデータを削除しますか?', 'Do you want to cancel this received shipment? The items will be removed from the Inventory. This action CANNOT be undone!': 'この輸送の受領をキャンセルしますか?キャンセルするとこの物資は備蓄から削除されます。この操作は *取り消せません!*', 'Do you want to cancel this sent shipment? The items will be returned to the Inventory. This action CANNOT be undone!': '出荷された物資をキャンセルしますか?この物資は、在庫に返されます。このアクションは、元に戻せません。', 'Do you want to over-write the file metadata with new default values?': 'ファイルのメタデータを、新しいデフォルト値で上書きしますか?', 'Do you want to receive this shipment?': 'この輸送物資を受け取られますか?', 'Do you want to send these Committed items?': 'これらコミットされた物資を送付してよいですか?', 'Do you want to send this shipment?': 'この発送情報を送信しますか?', 'Document Details': '文書の詳細', 'Document Scan': '文書のスキャン', 'Document added': '文書を追加しました', 'Document deleted': '文書を削除しました', 'Document updated': '文書を更新しました', 'Document': '文書', 'Documents and Photos': '文書と写真', 'Documents': '文書', 'Does this facility provide a cholera treatment center?': 'コレラ治療センターの機能を提供可能かどうか', 'Doing nothing (no structured activity)': '活動なし(組織立った行動なし)', 'Dollars': 'ドル', 'Domain': 'ドメイン', 'Domestic chores': '家事手伝い', 'Donation Certificate': '寄付証明書', 'Donation Phone #': '寄付受付電話番号', 'Donor Details': '資金提供組織の詳細', 'Donor added': '資金提供組織を追加しました', 'Donor deleted': '資金提供組織を削除しました', 'Donor updated': '資金提供組織を更新しました', 'Donor': '資金提供組織', 'Donors Report': '資金提供レポート', 'Donors': '資金提供組織', 'Door frame': 'ドア枠', 'Download PDF': 'PDFをダウンロード', 'Draft Features': '草案(ドラフト)', 'Draft': 'ドラフト', 'Drainage': '排水', 'Drawing up a Budget for Staff & Equipment across various Locations.': 'ロケーションに対する、スタッフと備品の予算を作成します。', 'Drill Down by Group': 'グループで絞り込み', 'Drill Down by Incident': 'インシデントで絞り込み', 'Drill Down by Shelter': '避難所で絞り込み', 'Driving License': '運転免許', 'Drought': '干ばつ', 'Drugs': '医薬品', 'Dug Well': '丸井戸', 'Duplicate?': '重複?', 'Duration': '活動実施期間', 'Dust Storm': '粉塵嵐', 'Dwelling': '居住施設', 'Dwellings': '住居数', 'EMS Reason': '緊急医療受け入れ状態', 'EMS Status Reason': '救急医療状況の理由', 'EMS Status': 'EMSステータス', 'EMS Traffic Status': '救急医療の混雑状況', 'ER Status Reason': 'ER医療状況の理由', 'ER Status': 'ER ステータス', 'Early Recovery': '早期復旧', 'Earthquake': '地震', 'Easy access to sanitation items for women/girls': '女性用サニタリ用品の入手が容易である', 'Edit Activity': '支援活動を編集', 'Edit Address': '住所の編集', 'Edit Aid Request': '援助要請を編集', 'Edit Alternative Item': '代わりの物資を編集', 'Edit Application': 'アプリケーションの編集', 'Edit Assessment Summary': 'アセスメントの要約を編集', 'Edit Assessment': 'アセスメントを編集', 'Edit Asset Assignment': '資産割り当ての編集', 'Edit Asset': '資産を編集', 'Edit Baseline Type': '基準値のタイプを編集', 'Edit Baseline': 'Baselineの編集', 'Edit Brand': '銘柄の編集', 'Edit Budget': '予算の編集', 'Edit Bundle': 'Bundleの編集', 'Edit Catalog Item': '救援物資カタログの編集', 'Edit Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog 関係の編集', 'Edit Cluster Subsector': 'クラスタのサブセクターの編集', 'Edit Cluster': 'クラスタを編集', 'Edit Commitment Item': 'コミットされた物資の検索', 'Edit Commitment': 'コミットを編集', 'Edit Config': '設定の編集', 'Edit Contact Information': '連絡先情報の編集', 'Edit Contact': '連絡先の編集', 'Edit Contents': '内容の編集', 'Edit Credential': '証明書の編集', 'Edit Dead Body Details': '遺体の詳細を編集', 'Edit Defaults': 'デフォルト値の編集', 'Edit Description': '説明の編集', 'Edit Details': '詳細の編集', 'Edit Disaster Victims': '被災者情報の編集', 'Edit Distribution Item': '配給物資の編集', 'Edit Distribution': '配給所の編集', 'Edit Document': '文書を編集', 'Edit Donor': '資金提供組織の編集', 'Edit Email Settings': '電子メール設定の編集', 'Edit Feature Layer': 'Feature Layerの編集', 'Edit Flood Report': '洪水レポートの編集', 'Edit Gateway Settings': 'ゲートウェイ設定の編集', 'Edit Group': 'グループの編集', 'Edit Hospital': '病院の編集', 'Edit Identification Report': 'IDレポートの編集', 'Edit Identity': 'IDの編集', 'Edit Image Details': '画像の詳細の編集', 'Edit Image': '画像の編集', 'Edit Impact Type': '災害影響のタイプを編集', 'Edit Impact': '被災影響の編集', 'Edit Incident Report': 'インシデントレポートの編集', 'Edit Incident': 'インシデントを編集', 'Edit Inventory Item': '備蓄物資の編集', 'Edit Inventory Store': '物資集積地点の編集', 'Edit Item Catalog Categories': '救援物資カタログのカテゴリを編集', 'Edit Item Catalog': '救援物資カタログの編集', 'Edit Item Category': '救援物資カテゴリの編集', 'Edit Item Pack': '物資パックを編集', 'Edit Item Sub-Categories': '救援物資サブカテゴリの編集', 'Edit Item': '物資の編集', 'Edit Key': 'Keyの編集', 'Edit Kit': 'Kitの編集', 'Edit Layer': 'レイヤの編集', 'Edit Level 1 Assessment': 'レベル1アセスメントを編集する', 'Edit Level 2 Assessment': 'レベル2アセスメントを編集', 'Edit Location': 'ロケーションの編集', 'Edit Log Entry': 'ログエントリの編集', 'Edit Map Profile': '地図設定を編集する', 'Edit Map Services': '地図サービスの編集', 'Edit Marker': 'マーカーの編集', 'Edit Membership': 'メンバシップの編集', 'Edit Message': 'メッセージの編集', 'Edit Messaging Settings': 'メッセージ設定の編集', 'Edit Metadata': 'メタデータの編集', 'Edit Modem Settings': 'モデム設定の編集', 'Edit Need Type': '需要タイプの編集', 'Edit Need': 'ニーズを編集', 'Edit Note': '追加情報を編集', 'Edit Office': 'オフィスの編集', 'Edit Options': 'オプション編集', 'Edit Organization': '団体の編集', 'Edit Parameters': 'パラメータの編集', 'Edit Peer Details': 'データ同期先の詳細を編集', 'Edit Peer': 'データ同期先の編集', 'Edit Person Details': '人物情報の詳細を編集', 'Edit Personal Effects Details': 'Personal Effectsの詳細の編集', 'Edit Photo': '写真の編集', 'Edit Pledge': '寄付の編集', 'Edit Position': '場所の編集', 'Edit Problem': '問題の編集', 'Edit Project': 'プロジェクトの編集', 'Edit Projection': '地図投影法の編集', 'Edit Rapid Assessment': '被災地の現況アセスメントの編集', 'Edit Received Item': '物資の受領を編集', 'Edit Received Shipment': '物資の輸送の受領報告を編集', 'Edit Record': 'レコードの編集', 'Edit Recovery Details': '遺体回収の詳細を編集', 'Edit Registration Details': '登録状況の詳細を編集', 'Edit Registration': '登録の編集', 'Edit Report': 'レポートの編集', 'Edit Request Item': '物資の要請を編集', 'Edit Request': '支援要請の編集', 'Edit Resource': 'リソースの編集', 'Edit Response': '返信を編集', 'Edit River': '河川の編集', 'Edit Role': '役割の編集', 'Edit Sector': '活動分野を編集', 'Edit Sent Item': '送付した物資の編集', 'Edit Setting': '設定の編集', 'Edit Settings': '設定の編集', 'Edit Shelter Service': '避難所提供サービスの編集', 'Edit Shelter Type': '避難所タイプの編集', 'Edit Shelter': '避難所の編集', 'Edit Shipment Transit Log': '輸送履歴の編集', 'Edit Shipment to Send': '送付する輸送を編集', 'Edit Shipment/Way Bills': '輸送費/移動費の編集', 'Edit Shipment<>Item Relation': '輸送<>物資の関係を編集', 'Edit Site': 'Siteを編集', 'Edit Skill Type': 'スキルタイプの編集', 'Edit Skill': 'スキルの編集', 'Edit Solution': '解決案の編集', 'Edit Staff Type': 'スタッフタイプの編集', 'Edit Staff': 'スタッフの編集', 'Edit Storage Bin Type(s)': 'Storage Binタイプを編集', 'Edit Storage Bins': 'Storage Binの編集', 'Edit Storage Location': '備蓄地点の編集', 'Edit Subscription': '寄付申し込みの編集', 'Edit Survey Answer': '調査回答の編集', 'Edit Survey Question': '調査の質問項目を編集', 'Edit Survey Section': 'フィードバック内容を編集します', 'Edit Survey Series': '一連の調査の編集', 'Edit Survey Template': '調査テンプレートを編集', 'Edit Task': 'タスクの編集', 'Edit Team': 'チームの編集', 'Edit Theme': 'テーマの編集', 'Edit Themes': 'テーマの編集', 'Edit Ticket': 'チケットの編集', 'Edit Track': '追跡情報の編集', 'Edit Tropo Settings': 'Tropo 設定の編集', 'Edit Unit': '単位の編集', 'Edit User': 'ユーザの編集', 'Edit Volunteer Details': 'ボランティアの詳細を編集する', 'Edit Volunteer Registration': 'ボランティア登録の編集', 'Edit Warehouse Item': '倉庫物資を編集', 'Edit Warehouse': '倉庫を編集', 'Edit current record': '現在のレコードの編集', 'Edit message': 'メッセージの編集', 'Edit the Application': 'アプリケーションの編集', 'Edit': '編集', 'Editable?': '編集可能?', 'Education materials received': '教育資材を受領した', 'Education materials, source': '教育資材の送付元', 'Education': '教育', 'Effects Inventory': '備蓄物資への影響', 'Eggs': '卵', 'Either a shelter or a location must be specified': '避難所かロケーションのどちらかを特定する必要があります', 'Either file upload or document URL required.': 'ファイルのアップロードと文書のURLの両方が必要です。', 'Either file upload or image URL required.': 'アップロードするファイルか、URLを指定してください。', 'Elderly person headed households (>60 yrs)': '代表者が60歳以上の世帯数', 'Electrical': '電動の', 'Electrical, gas, sewerage, water, hazmats': '電気、ガス、下水道、水、有害物', 'Elevated': '高まる', 'Elevators': 'エレベーター', 'Email Address': 'メールアドレス', 'Email Settings': '電子メール設定', 'Email address verified, however registration is still pending approval - please wait until confirmation received.': '電子メールの認証は完了しましたが、登録はまだ完了していません。確認が完了するまで少々お待ちください。', 'Email settings updated': '電子メールの設定を更新しました', 'Email verification': '利用者登録の確認', 'Email': '電子メール', 'Embalming': '遺体防腐処理', 'Embassy': '大使館', 'Emergency Capacity Building project': 'ECB (緊急時の被災者収容建築プロジェクト)', 'Emergency Department': '救急部門', 'Emergency Shelter': '緊急避難所', 'Emergency Support Facility': '緊急支援施設', 'Emergency Support Service': '緊急支援サービス', 'Emergency Telecommunications': '緊急時電話連絡先', 'Enable/Disable Layers': 'レイヤの有効化/無効化', 'Enabled': '有効', 'End date should be after start date': '終了日付は開始日付より後にしてください', 'End date': '終了日', 'End of Period': '終了期間', 'English': 'English 英語', 'Enter Coordinates': '緯度経度を入力', 'Enter Coordinates:': '座標入力:', 'Enter a GPS Coord': 'GPS Coordを入力', 'Enter a GPS Coordinate': 'GPS座標を入力してください', 'Enter a date before': '以前の日時を入力', 'Enter a few characters of the name to select an existing Location or else simply type the name of the new Location.': '最初の数文字を入力して既存の項目から選ぶか、あるいは新しいロケーション名を入力して、ロケーションを特定してください。', 'Enter a name for the spreadsheet you are uploading (mandatory).': 'アップロードするスプレッドシートの名前を入力してください。(必須項目)', 'Enter a new support request.': '新規の支援要請を登録', 'Enter a summary of the request here.': '要求事項の概要を入力', 'Enter a unique label!': 'そのラベル名は使われています。一意のラベル名を入力してください。', 'Enter a valid date before': 'より前の正しい日付を入力してください', 'Enter a valid email': '正しいメールアドレスを入力してください', 'Enter a valid future date': '正しい未来の日付を入力してください', 'Enter some characters to bring up a list of possible matches': '文字を入力することで、候補の一覧が表示されます', 'Enter some characters to bring up a list of possible matches.': '検索文字列を入力してください', 'Enter tags separated by commas.': 'タグはカンマで区切って入力してください。', 'Enter the same password as above': '確認のため、パスワードを再入力', 'Enter your firstname': 'あなたの名前を入力', 'Entered': '入力された', 'Entering a phone number is optional, but doing so allows you to subscribe to receive SMS messages.': '電話番号の入力は任意です。入力すると、SMS メッセージの受け取り登録ができます。', 'Entering an Organization is optional, but doing so directs you to the appropriate approver & means you automatically get the appropriate permissions.': '選択リストに含まれる団体のメンバーであれば、所属する団体を選択してください。(団体の選択は必須ではありません)', 'Entry deleted': 'エントリを削除しました', 'Environment': '環境', 'Equipment': '備品', 'Error encountered while applying the theme.': 'テーマ適用時にエラーが発生しました。', 'Error in message': 'エラーメッセージ', "Error logs for '%(app)s'": '"%(app)s" に関するエラーログ', 'Errors': 'エラー', 'Estimated # of households who are affected by the emergency': '非常事態の影響を受けた世帯の推定数', 'Estimated # of people who are affected by the emergency': '非常事態の影響を受けた住民の推定数', 'Estimated Overall Building Damage': '建物全体の被害見積り', 'Estimated total number of people in institutions': 'なんらかの施設に収容されている住民の推定数', 'Euros': 'ユーロ', 'Evacuating': '退避中', 'Evaluate the information in this message. (This value SHOULD NOT be used in public warning applications.)': 'このメッセージの情報を評価します。(この値は、公開される警告アプリケーションで使用してはなりません)', 'Event Time': 'イベント発生時刻', 'Event Type': 'イベントタイプ', 'Event type': 'イベントタイプ', 'Example': '例', 'Exceeded': '超過', 'Exclude contents': 'コンテンツを除く', 'Excreta disposal': 'し尿処理', 'Execute a pre-planned activity identified in <instruction>': '事前に準備していた計画 <instruction>を実行する', 'Existing Placard Type': '設置されたポスターのタイプ', 'Existing food stocks': '食糧備蓄あり', 'Existing food stocks, main dishes': '備蓄中の食料(主皿)', 'Existing food stocks, side dishes': '備蓄中の食料(副皿)', 'Exits': '出口', 'Expected In': '予定期間', 'Expected Out': '予期される出力', 'Experience': '熟練者', 'Expiry Date': '有効期限', 'Expiry Time': '有効期限', 'Expiry_Date': '有効期限', 'Explosive Hazard': '爆発災害', 'Export Data': 'データのエクスポート', 'Export Database as CSV': 'データベースをCSV形式でエクスポート', 'Export in GPX format': 'GPXフォーマットでエクスポート', 'Export in KML format': 'KMLフォーマットでエクスポート', 'Export in OSM format': 'OSMフォーマットでエクスポート', 'Export in PDF format': 'PDFフォーマットでエクスポート', 'Export in RSS format': 'RSSフォーマットでエクスポート', 'Export in XLS format': 'XLSフォーマットでエクスポート', 'Export': 'エクスポート', 'Exterior Only': '外装のみ', 'Exterior and Interior': '外装と内装', 'External Features': '外部機能', 'Eye Color': '目の色', 'Facial hair, color': 'ヒゲ, 色', 'Facial hair, type': 'ヒゲ, 形状', 'Facial hear, length': 'ヒゲ, 長さ', 'Facility Operations': '施設の運用', 'Facility Status': '施設の状態', 'Facility Type': '施設タイプ', 'Factors affecting school attendance': '生徒の就学に影響する要因', 'Failed to send mail to Approver - see if you can notify them manually!': '承認依頼メールを送信できませんでした。利用者登録は完了していません。サイト管理者へ連絡してください。', 'Failed!': '失敗しました!', 'Falling Object Hazard': '落下/墜落による災害', 'Families/HH': '家族/世帯', 'Family tarpaulins received': 'タープ(家族用簡易テント)を受領した', 'Family tarpaulins, source': 'タープ(家族用簡易テント)の送付元', 'Family': '家族', 'Family/friends': '家族/友人', 'Farmland/fishing material assistance, Rank': '農業 / 漁業用物資の補助、ランク', 'Fatalities': '死亡者', 'Fax': 'ファックス', 'Feature Layer Details': '機能レイヤの詳細', 'Feature Layer added': '機能レイヤを追加しました', 'Feature Layer deleted': '機能レイヤを削除しました', 'Feature Layer updated': '機能レイヤを更新しました', 'Feature Layers': '機能レイヤ', 'Feature Namespace': 'Feature 名前空間', 'Feature Request': '機能の要求', 'Feature Type': 'Feature タイプ', 'Feature': '機能', 'Features Include': '含まれる機能', 'Female headed households': '代表者が女性の世帯数', 'Female': '女性', 'Few': '少数', 'Field Hospital': '野外病院', 'File': 'ファイル', 'Fill in Latitude': '緯度を記入', 'Fill in Longitude': '経度を記入', 'Fill out Rapid Evaluation Forms': '迅速評価フォームに記入します', 'Fill out detailed Evaluation Forms': '詳細な評価フォームに入力する', 'Filter Field': 'フィールドをフィルタする', 'Filter Value': '値をフィルタ', 'Filter': 'フィルタ', 'Filtered search of aid pledges and requests': '援助申出と要請の検索されたもの', 'Find All Matches': '完全一致', 'Find Dead Body Report': '遺体レポートの発見', 'Find Hospital': '病院を探す', 'Find Person Record': '人物情報を検索', 'Find Recovery Report': '遺体発見レポート', 'Find Volunteers': 'ボランティアを探す', 'Find a Person Record': '人物情報を検索する', 'Find by Name': '名前で検索', 'Find': '検索', 'Finder': '発見者', 'Fingerprint': '指紋', 'Fingerprinting': '指紋', 'Fingerprints': '指紋', 'Finish': '完了', 'Finished Jobs': '完了したジョブ', 'Fire suppression and rescue': '消火・救出活動', 'Fire': '火災', 'First Name': '苗字', 'First name': '苗字', 'Fishing': '漁業', 'Flash Flood': '鉄砲水', 'Flash Freeze': '瞬間凍結', 'Fleet Management': '船舶の管理', 'Flexible Impact Assessments': '災害影響範囲アセスメント', 'Flood Alerts show water levels in various parts of the country': '洪水警報では、国内各所の水位情報を確認することができます。', 'Flood Alerts': '洪水警報', 'Flood Report Details': '洪水レポートの詳細', 'Flood Report added': '洪水レポートを追加しました', 'Flood Report deleted': '洪水レポートを削除しました', 'Flood Report updated': '洪水レポートを更新しました', 'Flood Report': '洪水レポート', 'Flood Reports': '洪水レポート', 'Flood': '洪水', 'Flow Status': '流れの状況', 'Focal Point': '代表者', 'Fog': '濃霧', 'Food Supply': '食料の供給', 'Food assistance available/expected': '食糧援助が利用可能 / 期待できる', 'Food assistance': '食糧援助', 'Food': '食料', 'Footer file %s missing!': 'フッターファイル%sが見つかりません。', 'Footer': 'フッタ', 'For Eden instances enter the application base URL, e.g. http://sync.sahanfoundation.org/eden, for other peers the URL of the synchronization interface.': 'Eden の場合はベースURL(例えば http://sync.sahanfoundation.org/eden)、他のシステムの場合は同期インターフェースのURL。', 'For POP-3 this is usually 110 (995 for SSL), for IMAP this is usually 143 (993 for IMAP).': 'POP-3では通常110 (SSLでは995)で、IMAPでは通常143 (IMAPSでは993)。', 'For Warehouse': '倉庫向け', 'For a country this would be the ISO2 code, for a Town, it would be the Airport Locode.': '国の場合は ISO2 コード、町の場合は 空港コード(Airport Locode)', 'For each sync partner, there is a default sync job that runs after a specified interval of time. You can also set up more sync jobs which could be customized on your needs. Click the link on the right to get started.': 'それぞれの同期パートナーについて、指定した間隔で実行する同期ジョブがデフォルトで存在します。必要に応じて、さらなる同期ジョブを設定し、カスタマイズすることができます。開始するには、リンクをクリックしてください。', 'For enhanced security, you are recommended to enter a username and password, and notify administrators of other machines in your organization to add this username and password against your UUID in Synchronization -> Sync Partners': 'セキュリティ向上のため、ユーザー名とパスワードを入力し、団体の他端末の管理者にユーザー名とパスワードを通知して「データ同期」 -> 「データ同期パートナー」であなたのUUIDに追加してもらうことを推奨します。', 'For live help from the Sahana community on using this application, go to': 'Sahanaの使い方について Sahanaコミュニティからライブヘルプを希望する際は、以下に進んでください。', 'For messages that support alert network internal functions': '警戒(alert)ネットワークの内部機能をサポートするメッセージの場合', 'For more details on the Sahana Eden system, see the': 'Sahana Edenに関する詳細は、以下をごらんください。', 'For more information, see ': '詳細は、以下を参照してください。', 'For other types, the next screen will allow you to enter the relevant details...': 'その他の種類については、次の画面で関連する詳細情報を入力できます…', 'For': ' ', 'For:': '対象:', 'Forest Fire': '森林火災', 'Formal camp': '指定避難所', 'Format': 'フォーマット', 'Forms': 'フォーム', 'Found': '発見された', 'Foundations': '構造基礎', 'Freezing Drizzle': '凍結霧雨', 'Freezing Rain': 'みぞれ', 'Freezing Spray': '冷却スプレー', 'French': 'フランス語', 'Friday': '金曜日', 'From Inventory': '送付元', 'From Location': '送付元ロケーション', 'From Organization': '送付元団体', 'From Person': '送付元の担当者', 'From Warehouse': '倉庫から', 'From': '輸送元', 'Frost': '凍結', 'Fulfil. Status': '確保量は十分か', 'Fulfillment Status': '充足状況', 'Full beard': 'もみあげまでのアゴヒゲ、口髭あり', 'Full': '満員', 'Fullscreen Map': 'フルスクリーン表示', 'Function Permissions': '機能に対する権限', 'Function': '機能', 'Functional Tests': '機能テスト', 'Functions available': '利用可能な機能', 'Funding Organization': '資金提供団体', 'Funeral': '葬儀', 'Further Action Recommended': '更なる対応が推奨されている', 'GIS Reports of Shelter': '避難所のGISレポート', 'GIS integration to view location details of the Shelter': '避難所のロケーション詳細を閲覧するGISインテグレーション', 'GPS Marker': 'GPSマーカー', 'GPS Track File': 'GPS Track ファイル', 'GPS Track': 'GPS トラック', 'GPX Layers': 'GPX レイヤ', 'GPX Track': 'GPX形式の追跡情報', 'GRN Status': 'GRNステータス', 'Gale Wind': '強風', 'Gantt Chart': 'ガントチャート', 'Gap Analysis Map': 'ギャップ解析マップ', 'Gap Analysis Report': 'ギャップ解析報告', 'Gap Analysis': 'ギャップ解析', 'Gap Map': '需給ギャップマップ', 'Gap Report': '需給ギャップの報告', 'Gateway Settings': 'ゲートウェイ設定', 'Gateway settings updated': 'ゲートウェイ設定を更新しました', 'Gender': '性別', 'General Comment': '包括コメント', 'General Medical/Surgical': '一般医学/外科', 'General emergency and public safety': '一般的緊急事態と公共の安全', 'General information on demographics': '人口統計の情報', 'Generator': '発電機', 'Geocoder Selection': 'Geocoder 選択', 'Geometry Name': 'Geometry名', 'Geonames.org search requires Internet connectivity!': 'Geonames.org の検索を行うには、インターネットに接続している必要があります。', 'Geophysical (inc. landslide)': '地球物理 (地滑りを含む)', 'Geotechnical Hazards': '地盤災害', 'Geotechnical': '地質工学', 'Geraldo module not available within the running Python - this needs installing for PDF output!': '実行中のPythonでGeraldoモジュールが利用できません。PDF出力に必要です。', 'Geraldo not installed': 'Geraldoがインストールされていません', 'Get incoming recovery requests as RSS feed': '遺体回収要請をRSSフィードとして取得する', 'Girls 13-18 yrs in affected area': '影響地域内の13-18歳の女子数', 'Girls 13-18 yrs not attending school': '学校に来ていなかった13-18歳の女子数', 'Girls 6-12 yrs in affected area': '影響地域内の6-12歳の女子数', 'Girls 6-12 yrs not attending school': '学校に来ていなかった6-12歳の女子数', 'Give a brief description of the image, e.g. what can be seen where on the picture (optional).': '画像に関する説明。特に、写真のどの箇所に何が確認できるかを記載します (オプション)', 'Give information about where and when you have seen the person': '人物を見かけた場所や時間の情報を提供してください', 'Give information about where and when you have seen them': 'どこで、いつ、彼らを見かけたのか、情報をください', 'Global Messaging Settings': 'メッセージの全般設定', 'Glossary': '用語集', 'Go to Request': '支援要請に行く', 'Goatee': 'やぎヒゲ', 'Goods Received Note': '受諾した物資の注釈', 'Government UID': '政府UID', 'Government building': '政府所管の建物', 'Government': '政府・行政機関', 'Grade': '学年', 'Greek': 'ギリシャ語', 'Green': '緑', 'Ground movement, fissures': '地盤移動、亀裂', 'Ground movement, settlement, slips': '地盤移動、沈下、がけ崩れ', 'Group %(group_id)s created': 'グループ %(group_id)s を作成しました', 'Group Description': 'グループの説明', 'Group Details': 'グループの詳細', 'Group ID': 'グループID', 'Group Member added': 'グループメンバを追加しました', 'Group Members': 'グループメンバ', 'Group Memberships': 'グループメンバシップ', 'Group Name': 'グループ名', 'Group Title': 'グループのタイトル', 'Group Type': 'グループのタイプ', 'Group added': 'グループを追加しました', 'Group deleted': 'グループを削除しました', 'Group description': 'グループの説明', 'Group name': 'グループ名', 'Group type': 'グループタイプ', 'Group updated': 'グループを更新しました', 'Group': 'グループ', 'Groups removed': 'グループを削除しました', 'Groups': 'グループ', 'Guest': 'ゲスト', 'HR Data': '人的資源の情報', 'HR Manager': '人的資源マネージャー', 'Hail': 'あられ', 'Hair Color': '頭髪の色', 'Hair Length': '頭髪の長さ', 'Hair Style': 'ヘアスタイル', 'Has additional rights to modify records relating to this Organization or Site.': 'この団体やサイトに関連するレコードを変更するための権限を追加します', 'Has data from this Reference Document been entered into Sahana?': 'リファレンス文書の内容が Sahanaに登録してあるかどうかを記載してください。', 'Has only read-only access to records relating to this Organization or Site.': 'この団体やサイトに関連するレコードを閲覧のみに制限します', 'Has the safety and security of women and children in your community changed since the emergency?': '緊急事態以来、女性や未成年の生活の危険度が変化したかどうかを記載してください', 'Has your business been damaged in the course of the disaster?': '災害の過程で、ビジネス上の損害を受けているかどうかを記載してください', 'Have households received any shelter/NFI assistance or is assistance expected in the coming days?': '世帯に対して避難所用品や生活必需品が配布されている、あるいは数日以内に配布を実施できるかを記載してください', 'Have normal food sources been disrupted?': '平常時の食料調達源が利用不可能になったかどうかを記載してください', 'Have schools received or are expecting to receive any assistance?': '学校に対してなんらかの支援が行われた、あるいは行われる予定であるかどうかを記載してください', 'Have the people received or are you expecting any medical or food assistance in the coming days?': '医療品や食糧支援を、被災者、あるいはあなたが受領したかどうか、あるいは数日以内に受領できそうかどうかを記載してください。', 'Hazard Pay': '災害補償金', 'Hazardous Material': '危険物', 'Hazardous Road Conditions': '災害発生後の道路状況', 'Header Background': 'ヘッダー背景', 'Header background file %s missing!': 'ヘッダー背景ファイル%sが存在しません。', 'Headquarters': '本部・本社', 'Health care assistance, Rank': '医療 / 介護支援、ランク', 'Health center with beds': '保健所(ベッドあり)', 'Health center without beds': '保健所(ベッドなし)', 'Health center': '保健所', 'Health services functioning prior to disaster': '災害発生以前 ヘルスサービスの提供', 'Health services functioning since disaster': '災害発生後  ヘルスサービスの提供', 'Health services status': '医療サービス状況', 'Health': '保険・介護', 'Healthcare Worker': 'ヘルスケア要員', 'Heat Wave': '熱波', 'Heat and Humidity': '熱と湿度', 'Height (cm)': '身長 (cm)', 'Height': '身長', 'Help': ' ヘルプ ', 'Helps to monitor status of hospitals': '病院の現状把握に役立つ情報を管理します', 'Helps to report and search for Missing Persons': '行方不明者の報告と検索を支援します。', 'Here are the solution items related to the problem.': '問題に関連する解決案です。', 'Heritage Listed': '遺産登録', 'Hide Details': '詳細を隠す', 'Hierarchy Level 0 Name (e.g. Country)': '階層レベル0の名前(例: 国)', 'Hierarchy Level 1 Name (e.g. Province)': '階層レベル1の名前 (例: 都道府県)', 'Hierarchy Level 2 Name': 'ロケーション階層レベル2の名前', 'Hierarchy Level 3 Name': '階層レベル3の名前', 'Hierarchy Level 4 Name': '階層レベル4の名前', 'High Water': '最高水位', 'High': '高', 'Hindu': 'ヒンズー教徒', 'History': '履歴', 'Hit the back button on your browser to try again.': 'ブラウザの「戻る」ボタンを押して、やり直してください。', 'Holiday Address': '休日の住所', 'Home Address': '自宅住所', 'Home Country': '所属国', 'Home Crime': '住居犯罪', 'Home': 'ホーム', 'Hospital Details': '病院の詳細', 'Hospital Status Report': '病院ステータスレポート', 'Hospital information added': '病院情報を追加しました', 'Hospital information deleted': '病院情報を削除しました', 'Hospital information updated': '病院情報を更新しました', 'Hospital status assessment.': '病院ステータスアセスメント', 'Hospital': '病院', 'Hospitals': '病院情報', 'Hot Spot': 'ホットスポット', 'Hour': '時間', 'Hourly': '1時間毎', 'Household kits received': '家事用品を受領しました', 'Household kits, source': '家事用品の送付元', 'How did boys 13-17yrs spend most of their time prior to the disaster?': '災害発生前、13-17歳の男子がよく集まっていた場所と活動は?', 'How did boys <12yrs spend most of their time prior to the disaster?': '災害発生前、12歳以下の男子がよく集まっていた場所と活動は?', 'How did boys girls 13-17yrs spend most of their time prior to the disaster?': '災害発生前、13-17歳の女子がよく集まっていた場所と活動は?', 'How did girls <12yrs spend most of their time prior to the disaster?': '災害発生前、12歳以下の女子がよく集まっていた場所と活動は?', 'How do boys 13-17yrs spend most of their time now?': '現在、13-17歳の男子は普段何をして過ごしていますか?', 'How do boys <12yrs spend most of their time now?': '現在、12歳以下の男子は普段何をして過ごしていますか?', 'How do girls 13-17yrs spend most of their time now?': '現在、13-17歳の女子は普段何をして過ごしていますか?', 'How do girls <12yrs spend most of their time now?': '現在、12歳以下の女子は普段何をして過ごしていますか?', 'How does it work?': 'どのように動きますか?', 'How is this person affected by the disaster? (Select all that apply)': 'この人物の被災状況を記載してください(該当する項目を全て選択)', 'How long does it take you to reach the available water resources? Specify the time required to go there and back, including queuing time, by foot.': '水資源を確保できる地点までの距離を記載します。徒歩で往復し、待ち時間も含めた時間を記載してください。', 'How long does it take you to walk to the health service?': '医療サービスが提供されている場所まで、徒歩で必要な時間を記載します。', 'How long will the food last?': '洪水の残存予測期間', 'How long will this water resource last?': '水の供給が枯渇する時期', 'How many Boys (0-17 yrs) are Dead due to the crisis': '災害で死亡した少年の数(0-17歳)', 'How many Boys (0-17 yrs) are Injured due to the crisis': '災害で負傷した少年の数(0-17歳)', 'How many Boys (0-17 yrs) are Missing due to the crisis': '災害で行方不明となった少年の数(0-17歳)', 'How many Girls (0-17 yrs) are Dead due to the crisis': '災害で死亡した少女の数(0-17歳)', 'How many Girls (0-17 yrs) are Injured due to the crisis': '災害で負傷した少女の数(0-17歳)', 'How many Girls (0-17 yrs) are Missing due to the crisis': '災害で行方不明になった少女の数(0-17歳)', 'How many Men (18 yrs+) are Dead due to the crisis': '災害で死亡した男性の数(18歳以上)', 'How many Men (18 yrs+) are Injured due to the crisis': '災害で負傷した男性の数(18歳以上)', 'How many Men (18 yrs+) are Missing due to the crisis': '災害で行方不明となった男性の数(18歳以上)', 'How many Women (18 yrs+) are Dead due to the crisis': '災害で死亡した女性の数(18歳以上)', 'How many Women (18 yrs+) are Injured due to the crisis': '災害で負傷した女性の数(18歳以上)', 'How many Women (18 yrs+) are Missing due to the crisis': '災害で行方不明となった女性の数(18歳以上)', 'How many days will the supplies last?': '支援物資がなくなるまでの日数', 'How many doctors in the health centers are still actively working?': 'ヘルスセンター内の医師の人数を記載してください', 'How many houses are uninhabitable (uninhabitable = foundation and structure destroyed)?': '居住不可になった家屋数を記載してください(居住不可 = 基礎構造や土台部分の破壊など)', 'How many houses suffered damage but remain usable (usable = windows broken, cracks in walls, roof slightly damaged)?': '災害によって破損したが、まだ利用が可能である住居の数を記載してください(利用可能 = 窓の破壊、壁のヒビ、屋根の軽微な破損など)', 'How many latrines are available in the village/IDP centre/Camp?': '村落/IDPセンター/仮泊施設内で利用可能なトイレの数を記載してください', 'How many midwives in the health centers are still actively working?': '医療センター内の助産師の人数を記載してください', 'How many new cases have been admitted to this facility in the past 24h?': '過去24時間でこの施設で受け入れたケースの数は?', 'How many nurses in the health centers are still actively working?': '保健所で活動可能な看護師は何人居ますか?', 'How many of the patients with the disease died in the past 24h at this facility?': 'この施設で過去24時間で何人の患者がこの病気で亡くなりましたか?', 'How many of the primary school age boys (6-12) in the area are not attending school?': 'この地域の、登校していない学童期男児(6-12歳)の数を記載してください。', 'How many of the primary school age girls (6-12) in the area are not attending school?': 'この地域の、登校していない学童期女児(6-12歳)の数を記載してください。', 'How many of the primary/secondary schools are now open and running a regular schedule of class?': '平常通りの授業を実施できている小学校・中学校・高校の数を記入してください', 'How many of the secondary school age boys (13-18) in the area are not attending school?': 'この地域の、登校していない中高校生年齢男子(13-18歳)の数を記載してください。', 'How many of the secondary school age girls (13-18) in the area are not attending school?': 'この地域の、登校していない女子中高生(13-18歳)の数を記載してください。', 'How many patients with the disease are currently hospitalized at this facility?': 'この病気のためにこの施設に入院している患者は現在何人ですか?', 'How many primary school age boys (6-12) are in the affected area?': '被災地域内の学童期男児(6-12歳)の数を記載してください', 'How many primary school age girls (6-12) are in the affected area?': '被災地域内の学童期女児(6-12歳)の数を記載してください。', 'How many primary/secondary schools were opening prior to the disaster?': '災害発生前に授業が行われていた小学校・中学校・高校の数を記載してください', 'How many secondary school age boys (13-18) are in the affected area?': '被災地域内の男子中学生・男子高校生(13-18歳)の数を記載してください', 'How many secondary school age girls (13-18) are in the affected area?': '被災地域内の中高生年齢女子(13-18歳)の数を記載してください。', 'How many teachers have been affected by the disaster (affected = unable to work)?': '被災し、授業ができない状態の教師の人数を記載してください', 'How many teachers worked in the schools prior to the disaster?': '災害発生前の教師の人数を記載してください', 'How much detail is seen. A high Zoom level means lot of detail, but not a wide area. A low Zoom level means seeing a wide area, but not a high level of detail.': 'どの程度詳細な情報が表示されるかを定義します。ズームすることで詳細が表示されるようになりますが、そのかわり、広域を見渡すことができなくなります。逆に、ズームしないことで広域を表示できますが、詳細情報の確認は行えなくなります。', 'Human Resource Management': '人的資源マネージメント', 'Human Resource': '人的資源', 'Human Resources Management': '人的資源管理', 'Human Resources': '人的資源', 'Humanitarian NGO': '人道支援NGO', 'Hurricane Force Wind': 'ハリケーンの風力', 'Hurricane': 'ハリケーン', 'Hygiene NFIs': '衛生用品', 'Hygiene kits received': '衛生用品を受領した', 'Hygiene kits, source': '衛生用品の送付元', 'Hygiene practice': '衛生習慣', 'Hygiene problems': '衛生上の問題', 'Hygiene': '衛生', 'I am available in the following area(s)': '以下の地域を担当できます', 'ID Label': 'IDラベル', 'ID Label: ': 'IDラベル: ', 'ID Tag Number': 'IDタグ番号', 'ID Tag': 'ID タグ', 'ID type': 'IDタイプ', 'Ice Pressure': '氷結圧力', 'Iceberg': 'アイスバーグ', 'Ideally a full URL to the source file, otherwise just a note on where data came from.': 'できればソースファイルの完全なURLを記載します。難しい場合はデータ入手元のメモでも構いません。', 'Identification Report': 'IDレポート', 'Identification Reports': 'IDレポート', 'Identification Status': 'IDステータス', 'Identification label of the Storage bin.': '備蓄コンテナの区別用ラベル番号。', 'Identification': 'ID', 'Identified as': '判明した身元', 'Identified by': 'によって識別された', 'Identity Details': '身元確認の詳細', 'Identity added': '身元情報を追加しました', 'Identity deleted': '身元確認を削除しました', 'Identity updated': '身元確認を更新しました', 'Identity': '身元確認', 'If Staff have login accounts then they are given access to edit the details of the': 'スタッフがログイン用アカウントを有している場合、以下項目の詳細を編集することができます:', 'If Unit = m, Base Unit = Km, then multiplicator is 0.0001 since 1m = 0.001 km.': '「Unit = m, Base Unit = Km」の場合、「1m = 0.001 km」なので乗数は0.0001 です。', 'If a user verifies that they own an Email Address with this domain, the Approver field is used to determine whether & by whom further approval is required.': 'このドメインの電子メールアドレスを所有するユーザーを認証する場合は、承認がさらに必要かどうか、必要なら誰が承認するか、を決めるのに承認者フィールドを使用します。', 'If enabled then a log is maintained of all records a user accesses. If disabled then it can still be enabled on a per-module basis.': '有効にすると、ユーザーがアクセスしたときに、全てのレコードがログに保存されます。無効にすると、モジュール毎に有効にすることができます。', 'If enabled then a log is maintained of all records a user edits. If disabled then it can still be enabled on a per-module basis.': '有効にすると、ユーザーが編集したすべてのレコードを記録します。無効にすると、モジュール毎に有効にできます。', 'If neither are defined, then the Default Marker is used.': 'もし両方共定義されていない場合、デフォルトマーカーが使われます。', 'If no marker defined then the system default marker is used': 'マーカーが定義されていない場合は、システムのデフォルトマーカーを使用します。', 'If no, specify why': 'いいえ、の場合はその理由を記載してください', 'If none are selected, then all are searched.': 'もしなにも選択しなければ、全てを検索します', 'If the location is a geographic area, then state at what level here.': '場所が地理的に確定できる場所ならば、その場所のレベルを記載してくだい。', 'If the request is for type "Other", you should enter a summary of the request here.': '支援要請が"その他"の場合、概要をここに入力する必要があります', 'If this field is populated then a user with the Domain specified will automatically be assigned as a Staff of this Organization': 'この項目が設定されている場合、ユーザーは、登録の際、この団体のスタッフとして登録されるように指定することができます', 'If this is set to True then mails will be deleted from the server after downloading.': 'Trueに設定されている場合は、メールはダウンロード後にサーバーから削除されます。', 'If this record should be restricted then select which role is required to access the record here.': 'このレコードへのアクセスを制限する際には、アクセスに必要となる権限を選択してください', 'If this record should be restricted then select which role(s) are permitted to access the record here.': 'このレコードを制限したい場合、アクセスを許可する権限を指定してください。', 'If yes, specify what and by whom': '「はい」の場合、供給される食料と供給元', 'If yes, which and how': '「はい」の場合、混乱している場所や原因を記載', 'If you do not enter a Reference Document, your email will be displayed to allow this data to be verified.': '参照文書を入力しない場合は、データ検証のために入力者の電子メールが表示されます。', 'If you know what the Geonames ID of this location is then you can enter it here.': 'このロケーションの Geonames ID がある場合、ここに入力してください。', 'If you know what the OSM ID of this location is then you can enter it here.': 'このロケーションの OSM ID がある場合、ここに入力してください。', 'If you need to add a new document then you can click here to attach one.': '文書の添付はこのページから可能です。', 'If you want several values, then separate with': '複数の値を入力したい場合、この文字で分割してください : ', 'If you would like to help, then please': 'ご協力いただける方は登録をお願いします', 'Illegal Immigrant': '不法移民', 'Image Details': '画像の詳細', 'Image Tags': '画像のタグ', 'Image Type': '画像のタイプ', 'Image Upload': '画像のアップロード', 'Image added': '画像を追加しました', 'Image deleted': '画像を削除しました', 'Image updated': '画像を更新しました', 'Image': '画像', 'Image/Attachment': '画像/添付資料', 'Image/Other Attachment': '画像/その他の添付ファイル', 'Imagery': '画像', 'Images': '画像', 'Immediate reconstruction assistance, Rank': '建築物の緊急修理 / 再建築支援、ランク', 'Impact Assessment Summaries': '災害影響範囲アセスメントの概要', 'Impact Assessments': '災害影響範囲アセスメント', 'Impact Baselines': '影響範囲の基準値', 'Impact Details': '被害の詳細', 'Impact Type Details': '災害影響のタイプ詳細', 'Impact Type added': '災害の影響タイプを追加しました', 'Impact Type deleted': '影響範囲タイプを削除しました', 'Impact Type updated': '災害影響のタイプを更新しました', 'Impact Type': '災害影響タイプ', 'Impact Types': '災害影響のタイプ', 'Impact added': '被災影響を追加しました', 'Impact deleted': '影響範囲を削除しました', 'Impact updated': '被災状況を更新しました', 'Impacts': '影響', 'Import & Export Data': 'データのインポートとエクスポート', 'Import Data': 'データのインポート', 'Import Job': 'Jobのインポート', 'Import Jobs': 'Jobsのインポート', 'Import and Export': 'インポートとエクスポート', 'Import from Ushahidi Instance': 'Ushahidi インスタンスから設定をインポート', 'Import if Master': 'マスターなら取り込む', 'Import job created': 'Import jobを作成しました', 'Import multiple tables as CSV': '複数のテーブルをCSVとしてインポート', 'Import': 'インポート', 'Import/Export': 'インポート/エクスポート', 'Important': '重要', 'Importantly where there are no aid services being provided': '救護サービスが提供されていない地域において重要となります', 'Imported': 'インポートしました', 'Importing data from spreadsheets': 'スプレッドシートからデータをインポートしています', 'Improper decontamination': '不適切な汚染の除去', 'Improper handling of dead bodies': '誤った扱いをされている遺体', 'In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': 'In GeoServerでは、これはレイヤ名です。WFS getCapabilitiesでは、これはコロン(:)後のFeatureType名の部分です。', 'In GeoServer, this is the Workspace Name. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'GeoServer では、これはワークスペース名です。WFS getCapabilities では、これはコロン「:」の前の FeatureType の部分となります。', 'In Inventories': 'この物資の在処', 'In Process': '実行中', 'In Progress': '実行中', 'In Transit': '輸送中', 'In Window layout the map maximises to fill the window, so no need to set a large value here.': 'この地図のウィンドウレイアウトは、全体を覆い隠します。従って、ここで大きな値を入力する必要はありません', 'In general, what are the greatest needs of older people, people with disabilities, children, youth and women in your community?': '一般的に、コミュニティ内の高齢者、障がい者、子供、青年、女性たちが最も必要としている物資やサービスがなんであるかを記載してください', 'Inbound Mail Settings': '着信メール設定', 'Inbox': '受信箱', 'Incident Categories': 'インシデントカテゴリ', 'Incident Details': 'インシデントの詳細', 'Incident Report Details': 'インシデントレポートの詳細', 'Incident Report added': '災害影響範囲レポートを追加しました', 'Incident Report deleted': 'インシデントレポートを削除しました', 'Incident Report updated': 'インシデントレポートを更新しました', 'Incident Report': 'インシデントレポート', 'Incident Reporting System': 'インシデントの報告を行ないます', 'Incident Reporting': 'インシデントレポート', 'Incident Reports': 'インシデントレポート', 'Incident added': 'インシデントを追加しました', 'Incident deleted': 'インシデントを削除しました', 'Incident updated': 'インシデントを更新しました', 'Incident': 'インシデント', 'Incidents': 'インシデント', 'Incoming Shipment canceled': '到着する配送が取消しされました', 'Incoming Shipment updated': '入荷した物資が更新されました', 'Incoming': '入荷', 'Incomplete': '未完了', 'Individuals': '個人', 'Industrial Crime': '産業犯罪', 'Industrial': '産業', 'Industry Fire': '工場から出火', 'Industry close to village/camp': '村落/仮泊施設の周辺に工場が存在', 'Infant (0-1)': '乳児(0-1歳)', 'Infectious Disease': '感染症', 'Infectious Diseases': '感染症', 'Infestation': '感染', 'Informal Leader': '非公式なリーダー', 'Informal camp': '非指定避難所', 'Information gaps': '情報のギャップ', 'Infusion catheters available': '注入カテーテルが利用可能', 'Infusion catheters need per 24h': '24時間毎に必要な注入カテーテル数', 'Infusion catheters needed per 24h': '24時間ごとに、注入カテーテルが必要', 'Infusions available': '点滴が利用可能', 'Infusions needed per 24h': '24時間毎に必要な点滴の数', 'Input Job': 'Jobのインポート', 'Inspected': '調査済み', 'Inspection Date': '調査した日付', 'Inspection date and time': '調査日時', 'Inspection time': '調査した時刻', 'Inspector ID': '調査者ID', 'Instance Type': 'インスタンスタイプ', 'Instant Porridge': 'インスタント粥', 'Institution': 'その他の組織', 'Insufficient Privileges': '権限が足りません', 'Insufficient vars: Need module, resource, jresource, instance': '不十分な変数: module, resource, jresource, instance が必要です', 'Insufficient': '不足', 'Intake Items': 'アイテムの受け入れ', 'Intergovernmental Organization': '国際政府間組織', 'Interior walls, partitions': '室内の壁、仕切り', 'Internal Features': '内部機能', 'Internal State': '内部状態', 'International NGO': '国際NGO', 'International Organization': '国際機関', 'International Staff': '国外からのスタッフ', 'Intervention': '介入', 'Interview taking place at': 'インタビュー実施場所', 'Invalid Query': '無効なクエリ', 'Invalid email': '無効な電子メール', 'Invalid login': '無効なログイン', 'Invalid request!': 'リクエストは無効です。', 'Invalid ticket': '無効なチケット', 'Invalid': '無効な', 'Inventories with Item': '在庫アイテム', 'Inventories': '在庫管理', 'Inventory Item Details': '救援物資の在庫詳細', 'Inventory Item added': '救援物資の在庫を追加しました', 'Inventory Item deleted': '備蓄物資を削除しました', 'Inventory Item updated': '備蓄物資を更新しました', 'Inventory Item': '備蓄物資', 'Inventory Items Available for Request Item': '要求された物資に適合する、倉庫内の物資', 'Inventory Items': '備蓄物資', 'Inventory Management': '物資の管理', 'Inventory Store Details': '物資集積地点の詳細', 'Inventory Store added': '物資集積地点を追加しました', 'Inventory Store deleted': '物資集積地点を削除しました', 'Inventory Store updated': '物資集積地点を更新しました', 'Inventory Store': '物資集積地点', 'Inventory Stores': '物資集積地点', 'Inventory functionality is available for:': '備蓄機能を利用可能:', 'Inventory of Effects': '救援物資の影響', 'Inventory': '在庫', 'Inventory/Ledger': '在庫 / 元帳', 'Is adequate food and water available for these institutions?': '関係者に対して十分な水と食料が供給されていますか?', 'Is it safe to collect water?': '水の確保は安全に行えるか?', 'Is there any industrial or agro-chemical production close to the affected area/village?': '村落/集落の近くに、工場あるいは農業化学プラントなどが存在しますか?', 'Is this a strict hierarchy?': 'これは厳密な階層構造ですか?', 'Issuing Authority': '発行機関', 'It is built using the Template agreed by a group of NGOs working together as the': '聞き取り項目のテンプレートは、以下リンクのNGO組織と協同で作成されています。', 'Item Added to Shipment': '輸送情報に物資を追加する', 'Item Catalog Categories': '物資カタログカテゴリ', 'Item Catalog Category Details': '救援物資カタログのカテゴリ詳細', 'Item Catalog Category added': '救援物資カタログのカテゴリを追加しました', 'Item Catalog Category deleted': '救援物資カタログのカテゴリを削除しました', 'Item Catalog Category updated': '物資カタログカテゴリを更新しました', 'Item Catalog Category': '救援物資カタログのカテゴリ', 'Item Catalog Details': '物資カタログの詳細', 'Item Catalog added': '救援物資カタログを追加しました', 'Item Catalog deleted': '物資カタログを削除しました', 'Item Catalog updated': '物資カタログを更新しました', 'Item Catalogs': '救援物資カタログ', 'Item Categories': '物資カテゴリ', 'Item Category Details': '物資カテゴリの詳細', 'Item Category added': '救援物資カテゴリを追加しました', 'Item Category deleted': '救援物資カテゴリを削除しました', 'Item Category updated': '物資カテゴリを更新しました', 'Item Category': '物資カテゴリ', 'Item Details': '救援物資の詳細', 'Item Pack Details': '救援物資パックの詳細', 'Item Pack added': '物資パックを追加しました', 'Item Pack deleted': '救援物資のパックを削除しました', 'Item Pack updated': '救援物資パックを更新しました', 'Item Packs': '物資パック', 'Item Sub-Categories': '救援物資のサブカテゴリ', 'Item Sub-Category Details': '物資サブカテゴリの詳細', 'Item Sub-Category added': '救援物資のサブカテゴリを追加しました', 'Item Sub-Category deleted': '物資サブカテゴリを削除しました', 'Item Sub-Category updated': '救援物資サブカテゴリを更新しました', 'Item Sub-Category': '物資サブカテゴリ', 'Item added to shipment': '物資が輸送に回りました', 'Item added': '救援物資を追加しました', 'Item already in Bundle!': '物資がすでにバンドルに存在しています。', 'Item already in Kit!': '救援物資は既にキットに存在しています', 'Item already in budget!': '物資は既に予算に登録されています', 'Item deleted': '物資を削除しました', 'Item updated': '救援物資を更新しました', 'Item': '物資', 'Items': '救援物資', 'Japan': '日本', 'Japanese': '日本語', 'Jerry can': 'ジェリ缶', 'Jew': 'ユダヤ教徒', 'Job Market': '求人', 'Job Title': '肩書き', 'Jobs': '職業', 'Just Once': '一度だけ', 'KPIs': 'KPI', 'Key Details': 'Keyの詳細', 'Key added': 'キーを追加しました', 'Key deleted': 'キーを削除しました', 'Key updated': 'キーを更新しました', 'Key': 'キー', 'Keys': 'キー', 'Kit Contents': 'Kitの内容', 'Kit Details': 'Kitの詳細', 'Kit Updated': 'キットを更新しました', 'Kit added': 'キットを追加しました', 'Kit deleted': 'キットを削除しました', 'Kit updated': 'キットを更新しました', 'Kit': 'キット', 'Kits': 'キット', 'Known Identities': '既知のID', 'Known incidents of violence against women/girls': '女性に対する暴力行為が発生した', 'Known incidents of violence since disaster': '災害発生後に暴力行為が発生した', 'LICENSE': 'ライセンス', 'LMS Administration': 'LMSの管理', 'Label': 'ラベル', 'Lack of material': '資材不足', 'Lack of school uniform': '学校制服が不足', 'Lack of supplies at school': '学校用物資の不足', 'Lack of transport to school': '学校への輸送手段の不足', 'Lactating women': '授乳中の女性の数', 'Lahar': 'ラハール', 'Landslide': '地すべり', 'Language': 'Language 言語', 'Last Name': '名前', 'Last known location': '最後に目撃された場所', 'Last name': '名前', 'Last synchronization time': 'データ同期の最終実施時刻', 'Last updated': '最終更新日', 'Last updated by': '最終更新者', 'Last updated on': '直近のアップデート実施時刻', 'Latitude & Longitude': '緯度&経度', 'Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': '緯度は南北方向(上下)を定義します。赤道ではゼロ、北半球ではプラス、南半球ではマイナスとなります。', 'Latitude is North-South (Up-Down).': '緯度は南北(上下)です', 'Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere.': '緯度は赤道では0、北半球ではプラス、南半球ではマイナスになります', 'Latitude should be between': '緯度の値として有効な値は', 'Latitude': '緯度', 'Latrines': 'トイレ', 'Law enforcement, military, homeland and local/private security': '法執行機関、自衛隊、警察および警備会社', 'Layer Details': 'レイヤの詳細', 'Layer added': 'レイヤを追加しました', 'Layer deleted': 'レイヤを削除しました', 'Layer updated': 'レイヤを更新しました', 'Layer': 'レイヤ', 'Layers updated': 'レイヤを更新しました', 'Layers': 'レイヤ', 'Layout': 'レイアウト', 'Legend Format': '凡例形式', 'Length': '長さ', 'Level 1 Assessment Details': 'レベル1アセスメントの詳細', 'Level 1 Assessment added': 'レベル1アセスメントを追加しました', 'Level 1 Assessment deleted': 'レベル1のアセスメントを削除しました', 'Level 1 Assessment updated': 'レベル1アセスメントを更新しました', 'Level 1 Assessments': 'レベル1 アセスメント', 'Level 1': 'レベル1', 'Level 2 Assessment Details': 'レベル2アセスメントの詳細', 'Level 2 Assessment added': 'レベル2アセスメントを追加しました', 'Level 2 Assessment deleted': 'レベル2アセスメントを削除しました', 'Level 2 Assessment updated': 'レベル2アセスメントを更新しました', 'Level 2 Assessments': 'レベル2アセスメント', 'Level 2 or detailed engineering evaluation recommended': 'レベル2あるいは詳細な技術的評価を行うことを推奨します', 'Level 2': 'レベル2', 'Level': 'レベル', 'Library support not available for OpenID': 'OpenIDのライブラリサポートが利用できません', 'License Plate': '個人認証カード', 'Line': '行', 'LineString': '折れ線', 'Link Item & Shipment': 'アイテムと輸送を紐付ける', 'Link an Item & Shipment': 'アイテムと出荷を結び付ける', 'Linked Records': '参照しているレコード', 'Linked records': '関連しているレコード', 'List / Add Baseline Types': '基準値タイプの一覧 / 追加', 'List / Add Impact Types': '災害影響のタイプを表示 / 追加', 'List / Add Services': 'サービスの一覧表示 / 追加', 'List / Add Types': 'タイプの一覧表示 / 追加', 'List Activities': '支援活動一覧', 'List Aid Requests': '援助要請の一覧', 'List All Entries': '全てのエントリ一覧', 'List All Memberships': '全てのメンバシップ一覧', 'List All Reports': '報告すべての一覧', 'List All': '全項目一覧', 'List Alternative Items': '代わりの物資一覧', 'List Assessment Summaries': 'アセスメント要約の一覧', 'List Assessments': 'アセスメント一覧', 'List Asset Assignments': '資産割り当ての一覧', 'List Assets': '資産一覧', 'List Baseline Types': '基準値タイプ一覧', 'List Baselines': '基準値一覧', 'List Brands': '銘柄の一覧', 'List Budgets': '予算の一覧', 'List Bundles': 'Bundleの一覧', 'List Catalog Items': '物資カタログの一覧', 'List Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog 関係一覧', 'List Checklists': 'チェックリスト一覧', 'List Cluster Subsectors': 'クラスタのサブセクタ一覧', 'List Cluster': 'クラスタ一覧', 'List Clusters': 'クラスタ一覧', 'List Commitment Items': 'コミットされた救援物資の一覧', 'List Commitments': 'コミットメントの一覧', 'List Configs': '設定一覧', 'List Conflicts': 'データ競合一覧', 'List Contact Information': '連絡先情報の一覧', 'List Contacts': '連絡先一覧', 'List Credentials': '証明書一覧', 'List Current': '現在の一覧', 'List Distribution Items': '配給物資リスト', 'List Distributions': '配給所リスト', 'List Documents': '文書の一覧', 'List Donors': '資金提供組織一覧', 'List Feature Layers': 'Featureレイヤリスト', 'List Flood Reports': '洪水レポート一覧', 'List GPX Layers': 'GPXレイヤ一覧', 'List Groups': 'グループ一覧', 'List Groups/View Members': 'グループを一覧/メンバーを表示', 'List Hospitals': '病院の一覧', 'List Identities': 'ID一覧', 'List Images': '画像の一覧', 'List Impact Assessments': '災害影響範囲アセスメント一覧', 'List Impact Types': '災害影響のタイプ一覧', 'List Impacts': '被害一覧', 'List Incident Reports': 'インシデントレポート一覧', 'List Incidents': 'インシデント一覧', 'List Inventory Items': '備蓄物資リスト', 'List Inventory Stores': '物資集積地点リスト', 'List Item Catalog Categories': '救援物資カタログのカテゴリ一覧', 'List Item Catalogs': '救援物資カタログ一覧', 'List Item Categories': '物資カテゴリ一覧', 'List Item Packs': '物資パックの一覧', 'List Item Sub-Categories': '物資サブカテゴリ一覧', 'List Items': '救援物資一覧', 'List Keys': 'Keyの一覧', 'List Kits': 'Kit一覧', 'List Layers': 'レイヤ一覧', 'List Level 1 Assessments': 'レベル1アセスメントの一覧', 'List Level 1 assessments': 'レベル1アセスメント一覧', 'List Level 2 Assessments': 'レベル2のアセスメント一覧', 'List Level 2 assessments': 'レベル2アセスメント一覧', 'List Locations': 'ロケーション一覧', 'List Log Entries': 'ログエントリ一覧', 'List Map Profiles': '地図設定の一覧', 'List Markers': 'マーカー一覧', 'List Members': 'メンバ一覧', 'List Memberships': 'メンバシップ一覧', 'List Messages': 'メッセージ一覧', 'List Metadata': 'メタデータ一覧', 'List Missing Persons': '行方不明者リストを表示', 'List Need Types': '需要タイプ一覧', 'List Needs': 'ニーズ一覧', 'List Notes': '追加情報一覧', 'List Offices': 'オフィス一覧', 'List Organizations': '団体一覧', 'List Peers': 'データ同期先一覧', 'List Personal Effects': '携帯品のリスト', 'List Persons': '人物情報一覧', 'List Photos': '写真リスト', 'List Positions': '場所一覧', 'List Problems': '問題一覧', 'List Projections': '地図投影法リスト', 'List Projects': 'プロジェクト一覧', 'List Rapid Assessments': '被災地の現況アセスメント一覧', 'List Received Items': '受領された物資の一覧', 'List Received Shipments': '受領された輸送一覧', 'List Records': 'レコード一覧', 'List Registrations': '登録証明書の一覧', 'List Reports': 'レポート一覧', 'List Request Items': '物資要請リスト', 'List Requests': '支援要請の一覧', 'List Resources': 'リソース一覧', 'List Responses': '回答の一覧', 'List Rivers': '河川リスト', 'List Roles': '役割一覧', 'List Sections': 'Section一覧', 'List Sectors': '活動分野の一覧', 'List Sent Items': '送付した物資一覧', 'List Sent Shipments': '送付済み物資一覧', 'List Service Profiles': 'サービスプロファイル一覧', 'List Settings': '設定一覧', 'List Shelter Services': '避難所での提供サービス一覧', 'List Shelter Types': '避難所タイプ一覧', 'List Shelters': '避難所の一覧', 'List Shipment Transit Logs': '物資輸送履歴の一覧', 'List Shipment/Way Bills': '輸送費/渡航費の一覧', 'List Shipment<>Item Relation': '輸送と物資の関連性一覧', 'List Shipments': '配送の一覧', 'List Sites': 'Site一覧', 'List Skill Types': 'スキルタイプを一覧表示', 'List Skills': 'スキルを一覧表示', 'List Solutions': '解決案一覧', 'List Staff Types': 'スタッフタイプ一覧', 'List Staff': 'スタッフ一覧', 'List Status': '状況一覧', 'List Storage Bin Type(s)': 'Storage Binタイプ一覧', 'List Storage Bins': 'Storage Bin一覧', 'List Storage Location': '備蓄地点の一覧', 'List Subscriptions': '寄付申し込み一覧', 'List Support Requests': '支援要求のリスト', 'List Survey Answers': '調査の回答の一覧', 'List Survey Questions': 'Survey Question一覧', 'List Survey Sections': 'Survey Sectionsの一覧', 'List Survey Series': '一連の調査リスト', 'List Survey Templates': '調査テンプレートの一覧', 'List TMS Layers': 'TMS レイヤの一覧', 'List Tasks': 'タスク一覧', 'List Teams': 'チーム一覧', 'List Themes': 'テーマ一覧', 'List Tickets': 'チケット一覧', 'List Tracks': '追跡情報の一覧', 'List Units': '単位一覧', 'List Users': 'ユーザ一覧', 'List Volunteers': 'ボランティアの表示', 'List WMS Layers': 'WMSレイヤ一覧', 'List Warehouse Items': '倉庫に備蓄中の物資一覧', 'List Warehouses': '倉庫の一覧', 'List all': '全項目を表示', 'List of Items': '物資一覧', 'List of Missing Persons': '行方不明者リスト', 'List of Peers': 'データ同期先一覧', 'List of Reports': 'レポート一覧', 'List of Requests': '支援要請の一覧', 'List of Roles': '権限リスト', 'List of Spreadsheets uploaded': 'アップロード済スプレッドシート一覧', 'List of Spreadsheets': 'スプレッドシート一覧', 'List of Volunteers for this skill set': 'このスキルを所持するボランティアの一覧', 'List of addresses': '住所一覧', 'List unidentified': '身元不明者の一覧', 'List': '一覧', 'List/Add': '一覧/追加', 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities': '救援団体は自身の支援活動の内容と場所を登録し、公開することで、他の組織との活動を調整することが可能となります。', 'Live Help': 'ライブヘルプ', 'Livelihood': '生計', 'Load Cleaned Data into Database': '整形したデータをデータベースへロード', 'Load Details': '詳細情報の読み込み', 'Load Raw File into Grid': 'Rawファイルをグリッドにロードしてください', 'Load the details to help decide which is the best one to keep out of the 2.': '2つのうちどちらを残すほうがよいか判断するため、詳細情報を確認します。', 'Loading Locations': 'ロケーションデータロード中', 'Loading Locations...': '位置を読込みしています ...', 'Loading': '読み込み中', 'Local Name': 'ローカル名', 'Local Names': 'ローカル名', 'Location 1': 'ロケーション 1', 'Location 2': 'ロケーション 2', 'Location De-duplicated': 'ロケーションの重複解消', 'Location Details': 'ロケーションの詳細', 'Location Hierarchy Level 0 Name': 'ロケーション階層レベル0の名前', 'Location Hierarchy Level 1 Name': 'ロケーション階層レベル1の名前', 'Location Hierarchy Level 2 Name': 'ロケーション階層レベル2の名前', 'Location Hierarchy Level 3 Name': 'ロケーション階層レベル3の名前', 'Location Hierarchy Level 4 Name': 'ロケーション階層レベル4の名前', 'Location Hierarchy Level 5 Name': 'ロケーション階層レベル5の名前', 'Location added': 'ロケーションを追加しました', 'Location cannot be converted into a group.': 'ロケーションはグループに変換できません', 'Location deleted': 'ロケーションを削除しました', 'Location details': 'ロケーションの詳細', 'Location group cannot be a parent.': 'ロケーショングループは親にできません', 'Location group cannot have a parent.': 'ロケーショングループに親情報がありません。', 'Location updated': 'ロケーションを更新しました', 'Location': 'ロケーション', 'Location: ': 'ロケーション: ', 'Locations De-duplicator': 'ロケーションの重複解消', 'Locations of this level need to have a parent of level': 'このレベルのロケーションには、親属性となるレベルが必要です', 'Locations should be different!': '異なる位置を設定してください!', 'Locations': 'ロケーション', 'Lockdown': '厳重監禁', 'Log Entry Details': 'ログエントリの詳細', 'Log entry added': 'ログエントリを追加しました', 'Log entry deleted': 'ログエントリを削除しました', 'Log entry updated': 'ログエントリを更新しました', 'Log': 'ログ', 'Logged in': 'ログインしました', 'Logged out': 'ログアウトしました', 'Login': 'ログイン', 'Logistics Management System': '物流管理システム', 'Logistics Management': '物流管理', 'Logistics': '物流', 'Logo file %s missing!': 'ロゴファイル%sが見つかりません。', 'Logo': 'ロゴ', 'Logout': 'ログアウト', 'Long Text': '詳細テキスト', 'Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. These need to be added in Decimal Degrees.': '経度は東西方向(横)の座標軸です。緯度は南北方向(上下)の座標軸です。赤道ではゼロ、北半球ではプラス、南半球ではマイナスとなります。経度は、子午線(グリニッジ標準時)をゼロとして、東(ヨーロッパ、アジア)がプラスとなります。西(大西洋、アメリカ)がマイナスです。10進法で記入してください。', 'Longitude is West - East (sideways). Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': '経度は東西(横)です。経度は子午線(グリニッジ標準時)でゼロ、東(ヨーロッパ、アジア)でプラスです。西(大西洋、アメリカ)でマイナスです。', 'Longitude is West - East (sideways).': '緯度は東西です(横方向)', 'Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas.': '経度はグリニッジ子午線(グリニッジ標準時)上が0度です。東側に向かってヨーロッパやアジアの各地で正の値となります。西に向かって大西洋やアメリカの各地で負の値となります。', 'Longitude should be between': '経度の値の有効な範囲は', 'Longitude': '経度', 'Looking up Parents': '親を検索', 'Looting': '略奪', 'Lost Password': 'パスワードの紛失', 'Lost': '行方不明', 'Low': '低', 'Magnetic Storm': '磁気嵐', 'Main cash source': '主な現金収入源', 'Main income sources before disaster': '災害発生前の主な収入源', 'Major expenses': '主な費用', 'Major outward damage': '大きな損傷あり', 'Make Commitment': 'コミットの作成', 'Make Pledge': '寄付の作成', 'Make Request': '支援を要請する', 'Make a Request for Aid': '援助要請を登録', 'Make a Request': '支援要請を登録', 'Make preparations per the <instruction>': '<instruction>毎に準備作業を行う', 'Male': '男性', 'Malnutrition present prior to disaster': '災害前から栄養が失調発生していた', 'Manage Category': 'カテゴリ管理', 'Manage Item catalog': '物資カタログの管理', 'Manage Kits': 'Kitsの管理', 'Manage Relief Item Catalogue': '救援アイテムカタログの管理', 'Manage Sub-Category': 'サブカテゴリの管理', 'Manage Users & Roles': 'ユーザと役割の管理', 'Manage Warehouses/Sites': '倉庫/Sitesの管理', 'Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.': '支援物資、資産、人員、その他のリソースに対する要求を管理します。支援物資が要求された時に在庫と照合します。', 'Manage requests of hospitals for assistance.': '病院からの支援要請の管理', 'Manage volunteers by capturing their skills, availability and allocation': 'ボランティアのスキル、稼働状況、割り当て状況を管理します', 'Manage': '管理', 'Manager': 'マネージャ', 'Managing Office': 'オフィスの管理', 'Managing, Storing and Distributing Relief Items': '救援物資の保管、流通、配布状況を管理します', 'Managing, Storing and Distributing Relief Items.': '救援物資の管理、保存、配布状況を管理します。', 'Mandatory. In GeoServer, this is the Layer Name. Within the WFS getCapabilities, this is the FeatureType Name part after the colon(:).': '必須項目。GeoServerでのこの項目はレイヤー名となります。WFSの get Capabilitiesでは、コロン( : )の後に付与される FeatureTypeとして表示されます。', 'Mandatory. The URL to access the service.': '省略できません。サービスにアクセスするためのURLです。', 'Manual Synchronization': 'データ手動同期', 'Manual': 'マニュアル', 'Many': '多数', 'Map Profile added': '地図の設定を追加しました', 'Map Profile deleted': '地図設定を削除しました', 'Map Profile updated': '地図設定を更新しました', 'Map Profile': '地図の設定', 'Map Profiles': '地図の設定', 'Map Height': '地図の縦高', 'Map Service Catalog': '地図サービスカタログ', 'Map Settings': '地図の設定', 'Map Viewing Client': '地図閲覧クライアント', 'Map Width': '地図の横幅', 'Map of Hospitals': '病院の地図', 'Map': '地図', 'Mapping': 'マッピング', 'Marine Security': '海上保安', 'Marital Status': '婚姻状況', 'Marker Details': 'マーカーの詳細', 'Marker added': 'マーカーを追加しました', 'Marker deleted': 'マーカーを削除しました', 'Marker updated': 'マーカーを更新しました', 'Marker': 'マーカー', 'Markers': 'マーカー', 'Master Message Log to process incoming reports & requests': '受け取ったレポートと要求を処理するマスターメッセージログ', 'Master Message Log': 'マスターメッセージログ', 'Match Percentage': '一致率', 'Match Requests': '支援要請マッチ', 'Match percentage indicates the % match between these two records': 'マッチの割合は、2つのレコードの間のマッチ状況をあわらします', 'Matching Catalog Items': '適合する救援物資カタログ', 'Matching Records': '一致するレコード', 'Matrix of Choices (Multiple Answers)': '選択肢 (複数可)', 'Matrix of Choices (Only one answer)': '選択肢 (複数選択不可)', 'Matrix of Text Fields': 'テキストフィールドのマトリックス', 'Max Persons per Dwelling': '住居ごとの最大収容人数', 'Maximum Weight': '最大重量', 'Maximum weight capacity of the Storage Location followed by choosing the unit from the drop down list.': '最大重量| ドロップダウンリストで単位を選択してから、備蓄地点の最大重量を指定します。', 'Maximum weight capacity of the items the storage bin can contain. followed by choosing the unit from the drop down list.': 'storage binに収容することができるアイテムの最大重量を指定します。ドロップダウンリストから、単位を選択してください。', 'Measure Area: Click the points around the polygon & end with a double-click': '観測領域: 多角形の角をクリックし、ダブルクリックで終了', 'Measure Length: Click the points along the path & end with a double-click': '距離を計測: 経路上の中継点をクリックして、終点でダブルクリックしてください', 'Medical and public health': '医療、公衆衛生', 'Medicine': '薬品', 'Medium': '中', 'Megabytes per Month': '1月毎のメガバイト数', 'Member removed from Group': 'メンバシップを削除しました', 'Members': 'メンバ', 'Membership Details': 'メンバシップの詳細', 'Membership updated': 'メンバシップを更新しました', 'Membership': 'メンバシップ', 'Memberships': 'メンバシップ', 'Message Details': 'メッセージの詳細', 'Message Sent': 'メッセージが送信されました', 'Message Variable': 'メッセージ変数', 'Message added': 'メッセージを追加しました', 'Message deleted': 'メッセージを削除しました', 'Message field is required!': 'メッセージは必須です', 'Message sent to outbox': 'メッセージを送信箱に送りました', 'Message updated': 'メッセージを更新しました', 'Message variable': 'メッセージ変数', 'Message': 'メッセージ', 'Messages': 'メッセージ', 'Messaging settings updated': 'メッセージング設定を更新しました', 'Messaging': 'メッセージング', 'Metadata Details': 'メタデータの詳細', 'Metadata added': 'メタデータを追加しました', 'Metadata can be supplied here to be applied to all uploaded photos, if desired.': '必要に応じて、アップロードした全ての画像に適用されるメタデータをここで入力できます。', 'Metadata deleted': 'メタデータを削除しました', 'Metadata updated': 'メタデータを更新しました', 'Metadata': 'メタデータ', 'Meteorite': '隕石落下', 'Meteorological (inc. flood)': '気象 (洪水を含む)', 'Method used': '使用されるメソッド', 'Micronutrient malnutrition prior to disaster': '災害前から栄養失調傾向あり', 'Middle Name': 'ミドルネーム', 'Migrants or ethnic minorities': '移民、あるいは少数民族の数', 'Military': '軍隊', 'Minimum Bounding Box': '最小:領域を指定した枠組み', 'Minimum shift time is 6 hours': '最小シフト時間は6時間です。', 'Minor/None': '少数 / なし', 'Minorities participating in coping activities': '少数民族が災害対応に従事', 'Minute': '分', 'Minutes must be a number between 0 and 60': '分には0-60の間の数字を記入してください', 'Minutes must be a number greater than 0 and less than 60': '分数は0から60の間で入力してください', 'Minutes per Month': '一ヶ月に数分間', 'Minutes should be a number greater than 0 and less than 60': '分は0から60の間で入力してください', 'Miscellaneous': 'その他', 'Missing Person Details': '行方不明者の詳細', 'Missing Person Reports': '行方不明者レポート', 'Missing Person': '行方不明者', 'Missing Persons Registry': '行方不明者の登録', 'Missing Persons Report': '行方不明者のレポート', 'Missing Persons': '行方不明者', 'Missing Report': '行方不明レポート', 'Missing Senior Citizen': '高齢者の行方不明', 'Missing Vulnerable Person': '被介護者の行方不明', 'Missing': '行方不明', 'Mobile Assess.': '移動端末アクセス', 'Mobile Basic Assessment': 'モバイルの基本アセスメント', 'Mobile Basic': 'モバイルの基礎', 'Mobile Phone': '携帯番号', 'Mobile': 'モバイル', 'Mode': 'モード', 'Modem Settings': 'モバイル機器の設定', 'Modem settings updated': 'モバイル機器の設定を更新しました', 'Moderate': 'モデレート', 'Moderator': 'モデレータ', 'Modify Feature: Select the feature you wish to deform & then Drag one of the dots to deform the feature in your chosen manner': '地物の変更: 変形する地物を選択し、点の一つをドラッグすることで地物の形を修正可能です。', 'Modify Information on groups and individuals': 'グループと個人の情報更新', 'Modifying data in spreadsheet before importing it to the database': 'データベース登録前に、スプレッドシート内のデータ項目を修正', 'Module Administration': 'モジュール管理', 'Module disabled!': 'モジュールが無効です', 'Module provides access to information on current Flood Levels.': 'このモジュールにより、洪水の現在の水位情報にアクセス可能です', 'Module stores structured reports done by Professional Organizations - currently data includes WFP Assessments.': 'モジュールでは、専門団体によって作成された調査文書を管理します。データには、WFP(国連世界食糧計画)アセスメントも含まれます。', 'Monday': '月曜日', 'Monthly Cost': '月額費用', 'Monthly Salary': '給与(月額)', 'Months': '月', 'Morgue Status': '死体安置所のステータス', 'Morgue Units Available': '死体公示所の収容可能数', 'Mosque': 'モスク', 'Motorcycle': 'オートバイ', 'Moustache': '口ひげ', 'Move Feature: Drag feature to desired location': 'Featureの移動: Feature を希望するロケーションにドラッグしてください', 'Movements (Filter In/Out/Lost)': '活動 (フィルター イン/アウト/ロスト)', 'MultiPolygon': 'マルチポリゴン', 'Multiple Choice (Multiple Answers)': '複数選択(複数回答)', 'Multiple Choice (Only One Answer)': '複数選択(1つだけ回答)', 'Multiple Matches': '複数の結果が適合しました', 'Multiple Text Fields': '複数の入力項目', 'Multiple': '複数', 'Multiplicator': '乗数', 'Muslim': 'イスラム教徒', 'Must a location have a parent location?': 'ある場所にはその親の場所が無ければならないですか?', 'My Current function': '現在登録している機能', 'My Tasks': '自分のタスク', 'N/A': '該当なし', 'NZSEE Level 1': 'NZSEE レベル1', 'NZSEE Level 2': 'NZSEE レベル 2', 'Name and/or ID Label': '名前および/またはIDラベル', 'Name and/or ID': '名前および/またはID', 'Name of Storage Bin Type.': '物資保管タイプの名前です。', 'Name of the file (& optional sub-path) located in static which should be used for the background of the header.': 'ヘッダーの背景に使用される、static にあるファイルの名前 (オプションでサブパス)。', 'Name of the file (& optional sub-path) located in static which should be used for the top-left image.': '左上の画像で静的位置を表すファイル名(サブパス名はオプション)', 'Name of the file (& optional sub-path) located in views which should be used for footer.': 'フッターに使われるビューにあるファイル名 (オプションとしてサブパス)。', 'Name of the person in local language and script (optional).': '現地言語での名前と表記(オプション)', 'Name of the unit or department this report refers to. Leave empty if your hospital has no subdivisions.': 'このレポートに関連する組織や部署の名前。部署をもたない病院の場合は空欄にしてください。', 'Name or Job Title': '名前あるいは役職名', 'Name': '名前', 'Name, Org and/or ID': '名前、組織、IDなど', 'Name/Model/Type': '名前/ モデル/タイプ', 'Name: ': '名前: ', 'Names can be added in multiple languages': '名前は、複数の言語で記述することができます。', 'National ID Card': 'ナショナルIDカード', 'National NGO': '国内NPO', 'National Staff': '現地スタッフ', 'Nationality of the person.': 'この人物の国籍です。', 'Nationality': '国籍', 'Nautical Accident': '船舶事故', 'Nautical Hijacking': '船舶ハイジャック', 'Need Type Details': '需要タイプの詳細', 'Need Type added': '需要タイプを追加しました', 'Need Type deleted': '需要タイプを削除しました', 'Need Type updated': '需要タイプを更新しました', 'Need Type': '需要タイプ', 'Need Types': '需要タイプ', 'Need added': 'ニーズを追加しました', 'Need deleted': 'ニーズを削除しました', 'Need to be logged-in to be able to submit assessments': '評価を確定させるには、ログインが必要です', 'Need to configure Twitter Authentication': 'Twitterの認証を設定する必要があります', 'Need to select 2 Locations': 'ロケーションを2つ指定してください', 'Need to specify a Budget!': '予算を指定する必要があります。', 'Need to specify a Kit!': 'Kitを指定する必要があります。', 'Need to specify a Resource!': 'リソースを指定する必要があります。', 'Need to specify a bundle!': 'bundleを指定する必要があります。', 'Need to specify a group!': 'グループを指定する必要があります。', 'Need to specify a location to search for.': '検索対象となるロケーションを指定する必要があります。', 'Need to specify a role!': '役割を指定する必要があります。', 'Need to specify a service!': 'サービスを指定してください!', 'Need to specify a table!': 'テーブルを指定する必要があります。', 'Need to specify a user!': 'ユーザを指定する必要があります。', 'Need updated': 'ニーズを更新しました', 'Needs Details': '需要の詳細', 'Needs to reduce vulnerability to violence': '暴力行為の対策として必要な物資 / サービス', 'Needs': '要求', 'Negative Flow Isolation': '逆流の分離', 'Neighbourhood': '近隣', 'Neighbouring building hazard': '隣接ビルが危険な状態', 'Neonatal ICU': '新生児ICU', 'Neonatology': '新生児科', 'Network': 'ネットワーク', 'Neurology': '神経科', 'New Assessment reported from': '新規アセスメントの報告元', 'New Checklist': '新規チェックリスト', 'New Peer': '新しいデータ同期先', 'New Record': '新規レコード', 'New Report': '新規レポート', 'New Request': '新規の支援要請', 'New Solution Choice': '新しい解決案を選択', 'New Support Request': '新しい支援要請', 'New Synchronization Peer': '新しい同期先', 'New cases in the past 24h': '過去24時間の新規ケース数', 'New': '新規', 'News': 'ニュース', 'Next View': '次を表示', 'Next': '次へ', 'No Activities Found': '支援活動が見つかりませんでした', 'No Addresses currently registered': '住所は、まだ登録がありません。', 'No Aid Requests have been made yet': '援助要請がまだ作成されていません', 'No Alternative Items currently registered': '代替物資は現在登録されていません', 'No Assessment Summaries currently registered': 'アセスメントの要約が登録されていません', 'No Assessments currently registered': '登録済みのアセスメントがありません', 'No Asset Assignments currently registered': '現在のところ資産割り当ては登録されていません', 'No Assets currently registered': '登録されている資産は現在ありません。', 'No Baseline Types currently registered': '登録済みのBaseline Typesはありません', 'No Baselines currently registered': '登録されている基準値はありません', 'No Brands currently registered': '登録されている銘柄がありません', 'No Budgets currently registered': '予算は、まだ登録がありません。', 'No Bundles currently registered': 'Bundleは、まだ登録がありません。', 'No Catalog Items currently registered': '登録済みのカタログアイテムがありません', 'No Category<>Sub-Category<>Catalog Relation currently registered': 'Category<>Sub-Category<>Catalog間の関係は、まだ登録がありません。', 'No Checklist available': '利用可能なチェックリストがありません', 'No Cluster Subsectors currently registered': 'クラスタのサブセクタはまだ登録がありません', 'No Clusters currently registered': '登録済みのクラスタはありません', 'No Commitment Items currently registered': '現在のところコミット済み物資は登録されていません', 'No Commitments': 'コミットメントがありません', 'No Configs currently defined': '設定は、まだ定義されていません', 'No Credentials currently set': '現在のところ証明書が設定されていません', 'No Details currently registered': '詳細は、まだ登録されていません', 'No Distribution Items currently registered': '配給物資の登録がありません', 'No Distributions currently registered': '配給所の登録がありません', 'No Documents found': '文書が見つかりませんでした。', 'No Donors currently registered': '資金提供組織はまだ登録されていません', 'No Feature Layers currently defined': 'Feature Layersはまだ定義されていません', 'No Flood Reports currently registered': '登録済みの洪水情報はありません', 'No GPX Layers currently defined': 'GPXレイヤはまだ定義されていません', 'No Groups currently defined': 'グループはまだ定義されていません', 'No Groups currently registered': 'グループはまだ登録されていません', 'No Hospitals currently registered': '病院はまだ登録されていません', 'No Identification Report Available': '利用可能なIDレポートはありません', 'No Identities currently registered': '登録されているIDはありません', 'No Image': '画像なし', 'No Images currently registered': '画像の登録はありません', 'No Impact Types currently registered': '被害の種類は未登録です', 'No Impacts currently registered': 'これまでに登録されたImpactはありません', 'No Incident Reports currently registered': '登録されているインシデントレポートはありません', 'No Incidents currently registered': '登録済みのインシデントはありません。', 'No Incoming Shipments': '到着予定の輸送物資', 'No Inventory Items currently registered': '備蓄物資の登録がありません', 'No Inventory Stores currently registered': '現在登録されている物資集積地点はありません', 'No Item Catalog Category currently registered': '救援物資カタログのカテゴリはまだ登録がありません', 'No Item Catalog currently registered': 'アイテムカタログはまだ登録されていません', 'No Item Categories currently registered': '救援物資カテゴリの登録がありません', 'No Item Packs currently registered': '救援物資のパックは、まだ登録がありません', 'No Item Sub-Category currently registered': '救援物資のサブカテゴリはまだ登録されていません', 'No Item currently registered': 'アイテムはまだ登録されていません', 'No Items currently registered': '物資はまだ登録されていません', 'No Items currently requested': '要求されている物資はありません', 'No Keys currently defined': 'Keyはまだ定義されていません', 'No Kits currently registered': 'Kitはまだ登録されていません', 'No Level 1 Assessments currently registered': '現在のところ、レベル1アセスメントは登録されていません', 'No Level 2 Assessments currently registered': '現在のところ、レベル2アセスメントは登録されていません', 'No Locations currently available': '現在利用可能なロケーションはありません', 'No Locations currently registered': 'ロケーションはまだ登録されていません', 'No Map Profiles currently defined': '地図の設定が定義されていません', 'No Markers currently available': '現在利用可能なマーカーはありません', 'No Match': '合致する結果がありません', 'No Matching Catalog Items': '適合する救援物資はありませんでした', 'No Matching Records': '適合する検索結果がありませんでした', 'No Members currently registered': 'メンバはまだ登録されていません', 'No Memberships currently defined': 'メンバシップはまだ登録されていません', 'No Messages currently in Outbox': '送信箱にメッセージがありません', 'No Metadata currently defined': 'メタデータはまだ定義されていません', 'No Need Types currently registered': '現在登録されている需要タイプはありません', 'No Needs currently registered': '現在要求は登録されていません', 'No Offices currently registered': 'オフィスはまだ登録されていません', 'No Offices found!': 'オフィスが見つかりませんでした', 'No Organizations currently registered': '団体はまだ登録されていません', 'No Packs for Item': 'この物資に対する救援物資パックはありません', 'No Peers currently registered': '登録済みのデータ同期先はありません', 'No People currently registered in this shelter': 'この避難所に登録されている人物情報はありません', 'No Persons currently registered': '人物情報はまだ登録されていません', 'No Persons currently reported missing': '現在、行方不明者の登録はありません', 'No Persons found': '該当する人物はいませんでした', 'No Photos found': '写真の登録がありません', 'No Picture': '写真がありません', 'No Presence Log Entries currently registered': '所在地履歴の登録がありません', 'No Problems currently defined': '定義済みの問題がありません', 'No Projections currently defined': '地図投影法は、まだ定義されていません。', 'No Projects currently registered': '定義済みのプロジェクトはありません', 'No Rapid Assessments currently registered': '被災地の現況アセスメントはまだ登録されていません', 'No Received Items currently registered': '受領された救援物資の登録はありません', 'No Received Shipments': '受け取った輸送はありません', 'No Records currently available': '利用可能なレコードはありません', 'No Records matching the query': '条件に当てはまるレコードが存在しません', 'No Request Items currently registered': '物資要請の登録がありません', 'No Requests have been made yet': '支援要請は、まだ行われていません', 'No Requests match this criteria': 'この条件に一致する支援要請はありません', 'No Requests': '支援要請がありません', 'No Responses currently registered': '現在登録されていて返答が無いもの', 'No Rivers currently registered': '河川情報の登録がありません', 'No Roles currently defined': '役割はまだ定義されていません', 'No Sections currently registered': 'このセクションの登録情報がありません', 'No Sectors currently registered': '登録済みの活動分野がありません', 'No Sent Items currently registered': '送付した物資の登録がありません', 'No Sent Shipments': '送付が行われた輸送がありません', 'No Settings currently defined': '設定は、まだ定義されていません', 'No Shelter Services currently registered': '登録されている避難所サービスがありません', 'No Shelter Types currently registered': '登録済みの避難所タイプがありません', 'No Shelters currently registered': '避難所はまだ登録されていません', 'No Shipment Transit Logs currently registered': '物資輸送履歴の登録がありません', 'No Shipment/Way Bills currently registered': '輸送費/Way Billsはまだ登録されていません', 'No Shipment<>Item Relation currently registered': '輸送とアイテムの関連付けはまだ登録されていません', 'No Sites currently registered': '登録されているサイトはありません', 'No Skill Types currently set': '設定済みのスキルタイプはありません', 'No Solutions currently defined': '解決案はまだ定義されていません', 'No Staff Types currently registered': 'スタッフタイプはまだ登録されていません', 'No Staff currently registered': 'スタッフはまだ登録されていません', 'No Storage Bin Type currently registered': '登録済みのStorage Binタイプがありません', 'No Storage Bins currently registered': 'Storage Binはまだ登録されていません', 'No Storage Locations currently registered': '登録されている備蓄地点がありません', 'No Subscription available': '寄付の申し込みがありません', 'No Support Requests currently registered': '現在のところ、支援要請は登録されていません', 'No Survey Answers currently registered': 'これまでに登録されたフィードバックの回答はありません', 'No Survey Questions currently registered': '登録済みのSurvey Questionsはありません', 'No Survey Sections currently registered': '登録済みのSurvey Sectionはありません', 'No Survey Series currently registered': '現在、調査報告は登録されていません', 'No Survey Template currently registered': '登録されている調査テンプレートがありません', 'No TMS Layers currently defined': 'TMS レイヤーがまだ定義されていません', 'No Tasks with Location Data': 'ロケーション情報を持っているタスクがありません', 'No Themes currently defined': 'テーマはまだ定義されていません', 'No Tickets currently registered': 'チケットはまだ定義されていません', 'No Tracks currently available': '利用可能な追跡情報はありません', 'No Units currently registered': '単位はまだ登録されていません', 'No Users currently registered': '登録済みのユーザがありません', 'No Volunteers currently registered': 'ボランティアの登録がありません', 'No Warehouse Items currently registered': '現在登録済みの倉庫物資はありません', 'No Warehouses currently registered': '倉庫が登録されていません', 'No Warehouses match this criteria': '条件に合致する倉庫がありません', 'No access at all': '完全に孤立中', 'No access to this record!': 'このレコードにはアクセスできません', 'No action recommended': 'アクション無しを推奨', 'No calculations made': '見積が作成されていません', 'No conflicts logged': 'コンフリクトのログはありません。', 'No contact information available': '利用可能な連絡先情報はありません', 'No contacts currently registered': '連絡先が登録されていません', 'No data in this table - cannot create PDF!': 'テーブルにデータがありません。PDF を作成できません。', 'No databases in this application': 'このアプリケーションにデータベースはありません', 'No dead body reports available': '遺体情報のレポートはありません', 'No entries found': 'エントリが見つかりません', 'No entries matching the query': 'クエリに一致するエントリはありませんでした。', 'No import jobs': 'インポートされたJobがありません', 'No linked records': 'リンクされているレコードはありません', 'No location known for this person': 'この人物の消息が不明です', 'No locations found for members of this team': 'このチームのメンバーの場所が見つかりませんでした', 'No locations registered at this level': 'この階層に登録されているロケーションはありません', 'No log entries matching the query': '検索に合致するログエントリがありません', 'No matching items for this request': 'この支援要請に適合する物資はありません', 'No matching records found.': '一致するレコードがありませんでした。', 'No messages in the system': 'システム上にメッセージが存在しません', 'No notes available': '追加情報はありません', 'No peers currently registered': '現在登録されているデータ同期先はありません', 'No pending registrations found': '処理保留中の登録申請はありません', 'No pending registrations matching the query': '検索に合致する処理保留登録申請がありません。', 'No person record found for current user.': '現在のユーザの人物情報レコードが見つかりませんでした。', 'No positions currently registered': '登録されているpositionがありません', 'No problem group defined yet': '定義済みの問題グループがありません。', 'No records matching the query': '条件に当てはまるレコードが存在しません', 'No records to delete': '削除するレコードがありません', 'No recovery reports available': '利用可能な遺体回収レポートはありません', 'No report available.': '利用可能なレポートはありません。', 'No reports available.': '利用可能なレポートがありません。', 'No reports currently available': '利用可能なレポートはありません', 'No requests found': '支援要請は見つかりませんでした', 'No resources currently registered': 'リソースはまだ登録されていません', 'No resources currently reported': 'レポート済みのリソースはありません', 'No service profile available': '利用可能なサービスプロファイルはありません', 'No skills currently set': 'スキルが登録されていません', 'No status information available': '状況に関する情報はありません', 'No synchronization': '同期なし', 'No tasks currently registered': 'タスクはまだ登録されていません', 'No template found!': 'テンプレートが見つかりません。', 'No units currently registered': '単位はまだ登録されていません', 'No volunteer information registered': 'ボランティア情報はまだ登録されていません', 'No': 'いいえ', 'Non-structural Hazards': 'その他の災害', 'None (no such record)': 'なし(記録がありません)', 'None': 'なし', 'Noodles': '麺', 'Normal food sources disrupted': '普段の食料供給源が混乱している', 'Normal': '通常どおり', 'Not Applicable': '該当なし', 'Not Authorised!': '認証されていません', 'Not Possible': '対応不可', 'Not Set': '設定されていません', 'Not Authorized': '認証されていません', 'Not installed or incorrectly configured.': 'インストールされていないか、適切な設定がされていません', 'Not yet a Member of any Group': 'メンバシップはまだ登録されていません', 'Note Details': '追加情報の詳細', 'Note Status': '状態を記録', 'Note Type': '追加情報の種類', 'Note added': '追加情報を追加しました', 'Note deleted': '追加情報を削除しました', 'Note that this list only shows active volunteers. To see all people registered in the system, do a search from the home screen instead': '注意:このリストは、活動中のボランティアのみ表示しています。システムに登録しているすべての人をみるには、ホーム・スクリーンから検索してください。', 'Note updated': '追加情報を更新しました', 'Note': '追加情報', 'Notes': '追加情報', 'Notice to Airmen': 'NOTAM (航空従事者用)', 'Number of Columns': '列数', 'Number of Patients': '患者数', 'Number of Rows': '行数', 'Number of Vehicles': '車両数', 'Number of additional beds of that type expected to become available in this unit within the next 24 hours.': 'この施設において、今後24時間以内に利用可能になると予測されている、このタイプの追加ベッド数。', 'Number of alternative places for studying': '授業用に確保できる場所の数', 'Number of available/vacant beds of that type in this unit at the time of reporting.': 'このタイプの利用可能/空きベッド数(報告時点)', 'Number of deaths during the past 24 hours.': '過去24時間以内の死亡者数', 'Number of discharged patients during the past 24 hours.': '退院患者数(過去24時間以内)', 'Number of doctors actively working': '現在活動中の医師の数', 'Number of doctors': '医者の人数', 'Number of houses damaged, but usable': '破損しているが利用可能な家屋の数', 'Number of houses destroyed/uninhabitable': '全壊/居住不可になった家屋数', 'Number of in-patients at the time of reporting.': 'レポート時の患者数です。', 'Number of latrines': 'トイレ総数', 'Number of midwives actively working': '現在活動中の助産師の数', 'Number of newly admitted patients during the past 24 hours.': '入院患者数(過去24時間以内)', 'Number of non-medical staff': '医療従事以外のスタッフ数', 'Number of nurses actively working': '現在活動中の看護師の数', 'Number of nurses': '看護師の人数', 'Number of private schools': '私立学校の数', 'Number of public schools': '公立学校の数', 'Number of religious schools': '宗教学校の数', 'Number of residential units not habitable': '住めなくなった住居の数', 'Number of residential units': '居住施設の数', 'Number of schools damaged but usable': '破損しているが利用可能な校舎の数', 'Number of schools destroyed/uninhabitable': '全壊 / 利用不可能な校舎の数', 'Number of schools open before disaster': '災害前に開校していた学校数', 'Number of schools open now': '現在開校している学校の数', 'Number of teachers affected by disaster': '被災した教師の数', 'Number of teachers before disaster': '災害発生前の教師の数', 'Number of vacant/available beds in this hospital. Automatically updated from daily reports.': '病院に設置されている、現在利用可能なベッドの数。日時レポートにより、自動的に更新されます。', 'Number of vacant/available units to which victims can be transported immediately.': '現在利用可能なユニット数。犠牲者を即座に安置できる数。', 'Number or Label on the identification tag this person is wearing (if any).': 'この人物の衣服につけられているタグの番号、あるいはラベル名(ある場合のみ).', 'Number or code used to mark the place of find, e.g. flag code, grid coordinates, site reference number or similar (if available)': 'この場所をあとで検索するための番号かコード 例: フラグ番号、グリッドの位置、サイトの参照番号など', 'Number': '番号', 'Number/Percentage of affected population that is Female & Aged 0-5': '女性(0-5歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Female & Aged 13-17': '女性(13-17歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Female & Aged 18-25': '女性(18-25歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Female & Aged 26-60': '女性(26-60歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Female & Aged 6-12': '女性(6-12歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Female & Aged 61+': '女性(61歳以上)の被災者数 / 割合', 'Number/Percentage of affected population that is Male & Aged 0-5': '男性(0-5歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Male & Aged 13-17': '男性(13-17歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Male & Aged 18-25': '男性(18-25歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Male & Aged 26-60': '男性(26-60歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Male & Aged 6-12': '男性(6-12歳)の被災者数 / 割合', 'Number/Percentage of affected population that is Male & Aged 61+': '男性(61歳以上)の被災者数 / 割合', 'Numbers Only': '数値のみ', 'Nursery Beds': '看護ベッド', 'Nutrition problems': '栄養問題', 'Nutrition': '食料・栄養', 'OR Reason': '手術室の詳細', 'OR Status Reason': '手術室の状態理由', 'OR Status': '手術室の状態', 'Observer': 'オブザーバ', 'Obsolete': '廃止済み', 'Obstetrics/Gynecology': '産婦人科', 'Office Address': 'オフィスの住所', 'Office Details': 'オフィスの詳細', 'Office added': 'オフィスを追加しました', 'Office deleted': 'オフィスを削除しました', 'Office updated': 'オフィスを更新しました', 'Office': 'オフィス', 'Offices': 'オフィス', 'Offline Sync (from USB/File Backup)': 'データのオフライン同期(USB/バックアップファイル利用)', 'Offline Sync': 'データのオフライン同期', 'Old': '古い', 'Older people as primary caregivers of children': '子供の介護を、高齢者が担当', 'Older people in care homes': '介護施設で生活する高齢者がいる', 'Older people participating in coping activities': '高齢者が災害対応に従事', 'Older people with chronical illnesses': '慢性疾患をもつ高齢者がいる', 'Older person (>60 yrs)': '高齢者(60歳以上)', 'On by default? (only applicable to Overlays)': 'デフォルトでオン(オーバーレイにのみ有効)', 'On by default?': 'デフォルトでON?', 'One Time Cost': '1回毎の費用', 'One time cost': '一回毎の費用', 'One-time costs': '一回毎の費用', 'One-time': '1回毎', 'Oops! Something went wrong...': '申し訳ありません、何か問題が発生しています。', 'Oops! something went wrong on our side.': '申し訳ありません、システム側に問題が発生しています。', 'Opacity (1 for opaque, 0 for fully-transparent)': '不透明度(1は不透明、0は完全に透明)', 'Open Assessment': '未解決のアセスメント', 'Open Map': '地図を開く', 'Open area': '空き地', 'Open recent': '最近使用したものを開く', 'Open': '開く', 'OpenStreetMap Editor': 'OpenStreetMap エディタ', 'Operating Rooms': '手術室', 'Optional link to an Incident which this Assessment was triggered by.': 'このアセスメントの端緒となった事故へのオプション・リンク', 'Optional': '任意', 'Optional. In GeoServer, this is the Workspace Namespace URI. Within the WFS getCapabilities, this is the FeatureType Name part before the colon(:).': 'オプション。GeoServerでは、ワークスペース名前空間のURIです。WFS getCapabilitiesでは、FeatureType名のコロンの前の部分です。', 'Options': 'オプション', 'Organization Details': '団体の詳細', 'Organization Registry': '団体情報の登録', 'Organization added': '団体を追加しました', 'Organization deleted': '団体を削除しました', 'Organization updated': '団体を更新しました', 'Organization': '団体', 'Organizations': '団体', 'Origin of the separated children': '離別した子供たちの出身地', 'Origin': '出身地', 'Other (describe)': 'その他 (要記述)', 'Other (specify)': 'その他(具体的に)', 'Other Evidence': 'その他の証跡', 'Other Faucet/Piped Water': 'その他 蛇口/パイプによる水源', 'Other Isolation': 'その他の孤立', 'Other Name': 'その他の名前', 'Other activities of boys 13-17yrs before disaster': 'その他、災害発生前の13-17歳男子の活動状況', 'Other activities of boys 13-17yrs': 'その他、13-17歳男子の活動状況', 'Other activities of boys <12yrs before disaster': 'その他、災害発生前の12歳以下男子の活動状況', 'Other activities of boys <12yrs': 'その他、12歳以下男子の活動状況', 'Other activities of girls 13-17yrs before disaster': 'その他、災害発生前の13-17歳女子の活動状況', 'Other activities of girls 13-17yrs': 'その他、13-17歳女子の活動状況', 'Other activities of girls<12yrs before disaster': 'その他、災害発生前の12歳以下女子の活動状況', 'Other activities of girls<12yrs': 'その他、12歳以下女子の活動状況', 'Other alternative infant nutrition in use': 'その他、使用されている乳児用代替食', 'Other alternative places for study': 'その他、授業開設に利用可能な施設', 'Other assistance needed': 'その他に必要な援助活動', 'Other assistance, Rank': 'その他の援助、ランク', 'Other current health problems, adults': 'その他の健康問題(成人)', 'Other current health problems, children': 'その他の健康問題(小児)', 'Other events': '他のイベント', 'Other factors affecting school attendance': 'その他、生徒の就学に影響する要因', 'Other major expenses': 'その他の主な支出', 'Other non-food items': '食料以外の救援物資', 'Other recommendations': '他の推薦', 'Other residential': '住宅その他', 'Other school assistance received': 'その他の学校用品を受領した', 'Other school assistance, details': '受領した学校用品の内訳', 'Other school assistance, source': 'その他の学校用品の送付元', 'Other side dishes in stock': '在庫のあるその他食材', 'Other types of water storage containers': 'それ以外の水貯蔵容器タイプ', 'Other ways to obtain food': 'それ以外の食料調達方法', 'Other': 'その他', 'Outbound Mail settings are configured in models/000_config.py.': '送信メール設定は、models/000_config.py で定義されています。', 'Outbox': '送信箱', 'Outgoing SMS Handler': 'SMS 送信ハンドラ', 'Outgoing SMS handler': 'SMS送信ハンドラ', 'Overall Hazards': 'すべての危険', 'Overhead falling hazard': '頭上落下物の危険', 'Overland Flow Flood': '陸上の洪水流量', 'Overlays': 'オーバーレイ', 'Owned Records': '自身のレコード', 'Owned Resources': '保持しているリソース', 'PDAM': '水道会社(PDAM)', 'PIN number ': 'PIN 番号', 'PIN': '暗証番号', 'PL Women': 'PL 女性', 'Pack': 'パック', 'Packs': 'パック', 'Pan Map: keep the left mouse button pressed and drag the map': 'マップをパン: マウスの左ボタンを押したまま、地図をドラッグしてください', 'Parameters': 'パラメータ', 'Parapets, ornamentation': '欄干、オーナメント', 'Parent Office': '親組織のオフィス', 'Parent needs to be of the correct level': '適切なレベルの親属性を指定してください', 'Parent needs to be set for locations of level': 'ロケーションのレベルには親属性が必要です', 'Parent needs to be set': '親情報が設定される必要があります', 'Parent': '親', 'Parents/Caregivers missing children': '親/介護者とはぐれた子供たち', 'Partial': '一部 / 不足', 'Participant': '参加者', 'Pashto': 'パシュトー語', 'Passport': 'パスポート', 'Password for authentication at the peer. Note that only HTTP Basic authentication is supported.': 'Password for authentication at the peer. HTTPベーシック認証のみサポートしています。', 'Password': 'パスワード', 'Path': 'パス', 'Pathology': '病理学', 'Patients': '患者数', 'Pediatric ICU': '小児ICU', 'Pediatric Psychiatric': '小児精神科', 'Pediatrics': '小児科医', 'Peer Details': 'データ同期先の詳細', 'Peer Registration Details': 'データ同期先登録の詳細', 'Peer Registration Request': 'データ同期先の登録要求', 'Peer Registration': 'データ同期先登録', 'Peer Type': '同期先タイプ', 'Peer UID': '同期先UID', 'Peer added': 'データ同期先を追加しました', 'Peer deleted': 'データ同期先を削除しました', 'Peer not allowed to push': '同期先がデータのプッシュを許可していません', 'Peer registration request added': 'データ同期先の登録要求を追加しました', 'Peer registration request deleted': 'データ同期先の登録要求を削除しました', 'Peer registration request updated': 'データ同期先の登録要求を更新しました', 'Peer updated': '同期先を更新しました', 'Peer': 'データ同期先', 'Peers': '同期先', 'Pending Requests': '保留中の支援要請', 'Pending': '中断', 'People Needing Food': '食料不足', 'People Needing Shelter': '避難所が必要', 'People Needing Water': '水が必要', 'People Trapped': '救難者', 'People with chronical illnesses': '慢性疾患をもつ成人がいる', 'People': '人物情報', 'Person 1': '人物 1', 'Person 1, Person 2 are the potentially duplicate records': '人物情報1と人物情報2は重複したレコードの可能性があります。', 'Person 2': '人物 2', 'Person Data': '人物データ', 'Person De-duplicator': '人物情報の重複削除', 'Person Details': '人物情報の詳細', 'Person Finder': '消息情報', 'Person Registry': '人物情報の登録', 'Person added to Group': 'グループメンバを追加しました', 'Person added to Team': 'グループメンバを追加しました', 'Person added': '人物情報を追加しました', 'Person deleted': '人物情報を削除しました', 'Person details updated': '人物情報を更新しました', 'Person interviewed': 'インタビュー担当者', 'Person missing': '行方不明中', 'Person must be specified!': '登録がありません', 'Person reporting': 'レポート報告者', 'Person who has actually seen the person/group.': '人物/グループで実際に目撃された人物情報', 'Person who is reporting about the presence.': 'この所在報告を行った人物です。', 'Person who observed the presence (if different from reporter).': '人物の所在を確認したひとの情報(報告者と異なる場合のみ記入)。', 'Person': '人物情報', 'Person/Group': '人物/グループ', 'Personal Data': '個人情報', 'Personal Effects Details': '個人の影響の詳細', 'Personal Effects': '所持品', 'Personal impact of disaster': 'この人物の被災状況', 'Personal': '個人', 'Persons in institutions': '施設居住中の住人', 'Persons with disability (mental)': '障がい者数(精神的障がい者を含む)', 'Persons with disability (physical)': '肉体的な障がい者の数', 'Persons': '人物情報', 'Phone 1': '電話番号', 'Phone 2': '電話番号(予備)', 'Phone': '電話番号', 'Phone/Business': '電話番号/仕事', 'Phone/Emergency': '電話番号/緊急連絡先', 'Phone/Exchange': '電話/とりつぎ', 'Photo Details': '写真の詳細', 'Photo Taken?': '写真撮影済み?', 'Photo added': '写真を追加しました', 'Photo deleted': '写真を削除しました', 'Photo updated': '写真を更新しました', 'Photo': '写真', 'Photograph': '写真', 'Photos': '写真', 'Physical Description': '身体外見の説明', 'Physical Safety': '身体的安全', 'Picture upload and finger print upload facility': '指紋や写真のアップロード機能', 'Picture': '写真', 'Place for solid waste disposal': '廃棄物の処理を行う場所を記載してください', 'Place of Recovery': '遺体回収場所', 'Place on Map': '地図上の場所', 'Places for defecation': 'トイレ', 'Places the children have been sent to': '子供たちの避難先', 'Planner': '立案者', 'Playing': '家庭内/外で遊ぶ', 'Please correct all errors.': 'すべてのエラーを修正してください。', 'Please enter a First Name': '苗字を入力してください', 'Please enter a valid email address': '有効な電子メールアドレスを入力してください。', 'Please enter the first few letters of the Person/Group for the autocomplete.': '自動入力するには人物あるいはグループの最初の数文字を入力してください', 'Please enter the recipient': '受取担当者を入力してください', 'Please fill this!': 'ここに入力してください', 'Please provide the URL of the page you are referring to, a description of what you expected to happen & what actually happened. If a ticket was issued then please provide the Ticket ID.': '言及先のURLを明示し、期待する結果と実際に発生した結果を記述してください。不具合チケットが発行された場合は、そのチケットIDも記載してください。', 'Please report here where you are:': 'いまあなたが居る場所を入力してください。', 'Please select another level': '別のレベルを選択してください', 'Please select': '選んでください', 'Please sign-up with your Cell Phone as this allows us to send you Text messages. Please include full Area code.': '携帯電話番号でサインアップし、Sahanaからのテキストメッセージを受け取れるようにします。国際電話コードまで含めた形式で入力してください', 'Please specify any problems and obstacles with the proper handling of the disease, in detail (in numbers, where appropriate). You may also add suggestions the situation could be improved.': '病気の治療に当たって問題となる事象の詳細を記載します。状況を改善するための提案も、もしあれば記載してください。', 'Please use this field to record any additional information, including a history of the record if it is updated.': '追加情報はこの項目に記載してください。レコードの変更履歴などにも利用可能です。', 'Please use this field to record any additional information, including any Special Needs.': '特別な要求など、どんな追加情報でも構いませんので、この部分に記録してください', 'Please use this field to record any additional information, such as Ushahidi instance IDs. Include a history of the record if it is updated.': 'UshahidiのインスタンスIDなど、追加情報がある場合はこの項目に記載してください。レコードの変更履歴などにも利用可能です。', 'Pledge Aid to match these Requests': 'これらの要求に一致する支援に寄付する', 'Pledge Aid': '寄付する', 'Pledge Status': '寄付のステータス', 'Pledge Support': '寄付サポート', 'Pledge': '寄付', 'Pledged': '寄付済み', 'Pledges': '寄付', 'Point': 'ポイント', 'Poisoning': '中毒', 'Poisonous Gas': '有毒ガス', 'Police': '警察', 'Pollution and other environmental': '汚染、あるいはその他の環境要因', 'Polygon reference of the rating unit': 'その評価単位への参照ポリゴン', 'Polygon': 'ポリゴン', 'Population and number of households': '人口と世帯数', 'Population': '利用者数', 'Porridge': 'おかゆ', 'Port Closure': '港湾閉鎖', 'Port': 'ポート', 'Position Details': 'ポジションの詳細', 'Position added': 'Position を追加しました', 'Position deleted': 'ポジションを削除しました', 'Position type': '場所のタイプ', 'Position updated': 'ポジションを更新しました', 'Positions': 'ポジション', 'Postcode': '郵便番号', 'Poultry restocking, Rank': '家禽の補充、ランク', 'Poultry': '家禽(ニワトリ)', 'Pounds': 'ポンド', 'Power Failure': '停電', 'Pre-cast connections': 'プレキャスト連結', 'Preferred Name': '呼び名', 'Pregnant women': '妊婦の数', 'Preliminary': '予備', 'Presence Condition': '所在情報', 'Presence Log': '所在履歴', 'Presence': '所在', 'Previous View': '前を表示', 'Previous': '前へ', 'Primary Name': '基本名', 'Primary Occupancy': '主要な従事者', 'Priority Level': '優先度レベル', 'Priority': '優先度', 'Private': '企業', 'Problem Administration': '問題管理', 'Problem Details': '問題の詳細', 'Problem Group': '問題グループ', 'Problem Title': '問題の名称', 'Problem added': '問題を追加しました', 'Problem connecting to twitter.com - please refresh': 'twitter.comに接続できません。更新ボタンを押してください', 'Problem deleted': '問題を削除しました', 'Problem updated': '問題を更新しました', 'Problem': '問題', 'Problems': '問題', 'Procedure': '手続き', 'Procurements': '物資の調達', 'Product Description': '製品の説明', 'Product Name': '製品名', 'Profile': 'プロファイル', 'Project Activities': 'プロジェクト活動状況', 'Project Details': 'プロジェクトの詳細', 'Project Management': 'プロジェクト管理', 'Project Status': 'プロジェクトのステータス', 'Project Tracking': 'プロジェクト追跡', 'Project added': 'プロジェクトを追加しました', 'Project deleted': 'プロジェクトを削除しました', 'Project has no Lat/Lon': 'プロジェクトの緯度/経度情報はありません', 'Project updated': 'プロジェクトを更新しました', 'Project': 'プロジェクト', 'Projection Details': '地図投影法の詳細', 'Projection added': '地図投影法を追加しました', 'Projection deleted': '地図投影法を削除しました', 'Projection updated': '地図投影法を更新しました', 'Projection': '地図投影法', 'Projections': '地図投影法', 'Projects': 'プロジェクト', 'Property reference in the council system': '評議システムで使用されるプロパティリファレンス', 'Protected resource': '保護されたリソース', 'Protection': '被災者保護', 'Provide Metadata for your media files': 'メディアファイルにメタデータを提供', 'Provide a password': 'パスワードを入力', 'Provide an optional sketch of the entire building or damage points. Indicate damage points.': '建物全体か損傷箇所のスケッチを提供し、損傷箇所を明示してください。', 'Province': '都道府県', 'Proxy-server': 'プロキシサーバ', 'Psychiatrics/Adult': '精神病/成人', 'Psychiatrics/Pediatric': '精神病/小児', 'Public Event': '公開イベント', 'Public and private transportation': '公共および民営の交通機関', 'Public assembly': '公会堂', 'Public': '公開', 'Pull tickets from external feed': '外部フィードからのticketの取得', 'Punjabi': 'パンジャブ', 'Push tickets to external system': '外部システムにチケットの発信', 'Put a choice in the box': '箱の中から選んで取る', 'Pyroclastic Flow': '火砕流', 'Pyroclastic Surge': '火砕サージ', 'Python Serial module not available within the running Python - this needs installing to activate the Modem': 'PythonでPython Serial moduleが利用できません。モデムの有効化に必要です。', 'Python needs the ReportLab module installed for PDF export': '実行中のPythonでReportLabモジュールが利用できません。PDF出力に必要です。', 'Quantity Committed': '引き受けた量', 'Quantity Fulfilled': '十分な量がある', 'Quantity in Transit': '運送中の数量', 'Quantity': '数量', 'Quarantine': '隔離施設', 'Queries': 'クエリ', 'Query Feature': '問合せ機能', 'Query': 'クエリ', 'Queryable?': '検索可能?', 'RC frame with masonry infill': '鉄骨入りコンクリートブロック', 'RECORD A': 'レコード A', 'RECORD B': 'レコード B', 'RESPONSE': '対応', 'Race': '人種', 'Radiological Hazard': '放射能災害', 'Radiology': '放射線科', 'Railway Accident': '鉄道事故', 'Railway Hijacking': '鉄道ハイジャック', 'Rain Fall': '降雨', 'Rapid Assessment Details': '被災地の現況アセスメントの詳細', 'Rapid Assessment added': '被災地の現況アセスメントを追加しました', 'Rapid Assessment deleted': '被災地の現況アセスメントを削除しました', 'Rapid Assessment updated': '被災地の現況アセスメントを更新しました', 'Rapid Assessment': '被災地の現況アセスメント', 'Rapid Assessments & Flexible Impact Assessments': '被災地の現況アセスメントと、災害影響範囲アセスメント', 'Rapid Assessments': '被災地の現況アセスメント', 'Rapid Close Lead': '急いで閉め、先導してください。', 'Rapid Data Entry': 'データ入力簡易版', 'Rating Scale': '評価尺度', 'Raw Database access': 'データベースへの直接アクセス', 'Read-Only': '読み込み専用', 'Read-only': '登録内容の編集を禁止', 'Real World Arbitrary Units': '実在の任意単位', 'Receive Items': '物資を受領', 'Receive Shipment': '輸送を受け取る', 'Receive this shipment?': 'この物資送付を受領しますか?', 'Receive': '物資受領', 'Received By': '物資受領責任者', 'Received Item Details': '配送済み物資の詳細', 'Received Item deleted': '受領した物資を削除しました', 'Received Item updated': '受領された物資を更新しました', 'Received Shipment Details': '受け取った輸送の詳細', 'Received Shipment canceled and items removed from Inventory': '受領した輸送をキャンセルしました。物資は備蓄から削除されます', <|fim▁hole|>'Received': '受領済み', 'Receiving and Sending Items': '送付 / 受領した救援物資', 'Recipient': '受け取り担当者', 'Recipients': '受信者', 'Recommendations for Repair and Reconstruction or Demolition': '再築や取り壊し、修繕を推奨', 'Record %(id)s created': 'レコード %(id)s が作成されました', 'Record Created': '作成されたレコード', 'Record Details': 'レコードの詳細', 'Record ID': 'レコードID', 'Record Saved': 'レコードが保存されました', 'Record added': 'レコードを追加しました', 'Record any restriction on use or entry': '利用や入力に当たっての制限事項を記載', 'Record deleted': 'レコードを削除しました', 'Record last updated': '最近更新されたレコード', 'Record not found!': 'レコードが見つかりませんでした', 'Record updated': 'レコードを更新しました', 'Record': 'レコード', 'Recording and Assigning Assets': '物資の割り当てと記録', 'Records': 'レコード', 'Recovery Request added': '遺体の回収要請を追加しました', 'Recovery Request deleted': '遺体回収要請を削除しました', 'Recovery Request updated': '遺体回収要請を更新しました', 'Recovery Request': '遺体回収の要請', 'Recovery Requests': '遺体回収要請', 'Recovery report added': '遺体回収レポートを追加しました', 'Recovery report deleted': '遺体回収レポートを削除しました', 'Recovery report updated': '遺体回収レポートを更新しました', 'Recovery': '遺体回収', 'Recruitment': '人材募集', 'Recurring Cost': '経常費用', 'Recurring cost': '経常費用', 'Recurring costs': '経常費用', 'Recurring': '採用活動', 'Red': '赤', 'Reference Document': '関連文書', 'Region Location': '地域のロケーション', 'Regional': '国際支部', 'Register Person into this Shelter': 'この避難所に人物情報を登録', 'Register Person': '人物情報を登録', 'Register them as a volunteer': 'ボランティアとして登録', 'Register': '登録', 'Registered People': '登録した人物情報', 'Registered users can': '登録済みのユーザは', 'Registering ad-hoc volunteers willing to contribute': '貢献を希望する臨時ボランティアを登録', 'Registration Details': '登録情報詳細', 'Registration Disabled!': '現在アカウント登録は受け付けていません。', 'Registration added': '登録を追加しました', 'Registration entry deleted': '登録を削除しました', 'Registration is still pending approval from Approver (%s) - please wait until confirmation received.': '登録はまだ承認されていません (承認者:(%s)) -- 確認メールが届くまでもうしばらくお待ちください。', 'Registration key': '登録key', 'Registration successful': '登録に成功しました', 'Registration updated': '登録を更新しました', 'Registration': '登録', 'Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': '地域内で活動する全ての支援団体を追跡し、情報を保持します。これにより、各団体が活動している地域の情報だけでなく、それぞれの地域でどのような活動が行われているかも掌握することができます。', 'Rehabilitation/Long Term Care': 'リハビリ/長期介護', 'Reinforced masonry': 'コンクリートブロック壁', 'Rejected': '拒否されました', 'Reliable access to sanitation/hygiene items': 'サニタリ / 衛生用品の安定供給がある', 'Relief Item Catalog': '救援物資カタログ', 'Relief Item': '救援物資', 'Relief Items': '救援物資', 'Relief Team': '救援チーム', 'Relief': '救援', 'Religion': '宗教', 'Religious Leader': '宗教指導者', 'Religious': '宗教', 'Relocate as instructed in the <instruction>': '<instruction>の内容に従って再配置', 'Remove Feature: Select the feature you wish to remove & press the delete key': 'Featureの削除: 削除したいfeatureを選択し、削除キーを押下してください', 'Remove Person from Group': 'メンバシップを削除', 'Remove Person from Team': 'メンバシップを削除', 'Remove': '削除', 'Removed from Group': 'メンバシップを削除しました', 'Removed from Team': 'メンバシップを削除しました', 'Repeat your password': 'パスワードをもう一度入力してください', 'Replace if Master': 'マスターなら置換', 'Replace if Newer': '新しいものがあれば置き換える', 'Replace': '置換', 'Report Another Assessment...': '別のアセスメントをレポートする', 'Report Details': 'レポートの詳細', 'Report Resource': 'レポートリソース', 'Report Type': 'レポートタイプ', 'Report Types Include': 'レポートタイプを含む', 'Report a Problem with the Software': 'ソフトウェアの不具合を報告', 'Report added': 'レポートを追加しました', 'Report deleted': 'レポートを削除しました', 'Report my location': '自分の現在地を報告', 'Report that person missing': '行方不明者の情報を報告', 'Report the contributing factors for the current EMS status.': '現在の緊急受け入れ状態に影響している事由を記載', 'Report the contributing factors for the current OR status.': '現在の手術室の状況報告', 'Report the person as found': '人物の所在情報を報告', 'Report them as found': '発見として報告', 'Report them missing': '行方不明として報告', 'Report updated': 'レポートを更新しました', 'Report': 'レポート', 'Reporter Name': 'レポーターの氏名', 'Reporter': 'レポーター', 'Reporting on the projects in the region': 'この地域で展開しているプロジェクトのレポート', 'Reports': 'レポート', 'Request Added': '支援要請を追加しました', 'Request Canceled': '支援要請をキャンセルしました', 'Request Details': '支援要請の詳細', 'Request Item Details': '救援物資要請の詳細', 'Request Item added': '救援物資の要請を追加しました', 'Request Item deleted': '救援物資の要請を削除しました', 'Request Item updated': '救援物資の要請を更新しました', 'Request Item': '物資を要請', 'Request Items': '物資の要請', 'Request Status': '支援要請の状況', 'Request Type': '支援要請のタイプ', 'Request Updated': '支援要請を更新しました', 'Request added': '支援要請を追加しました', 'Request deleted': '支援要請を削除しました', 'Request for Role Upgrade': '上位権限の取得要求', 'Request updated': '支援要請を更新しました', 'Request': '支援要請', 'Request, Response & Session': '要求、応答、およびセッション', 'Requested By Site': '支援要請を行ったサイト', 'Requested By Warehouse': '倉庫からの要請', 'Requested By': '支援要求元', 'Requested Items': '支援要請が行われた物資', 'Requested by': '要求元', 'Requested on': 'に関する要請', 'Requested': '要求済み', 'Requester': '要請の実施者', 'Requestor': '要請者', 'Requests From': '支援要請フォーム', 'Requests for Item': '物資に関する要請', 'Requests': '支援要請', 'Requires Login!': 'ログインしてください。', 'Requires login': 'ログインが必要です', 'Rescue and recovery': '救出、あるいは遺体回収作業', 'Reset Password': 'パスワードのリセット', 'Reset form': 'フォームをクリア', 'Reset': 'リセット', 'Resize Feature: Select the feature you wish to resize & then Drag the associated dot to your desired size': 'Featureのリサイズ: リサイズしたいfeatureを選択し、適切なサイズになるようドラッグしてください', 'Resolve Conflict': '競合の解決', 'Resolve link brings up a new screen which helps to resolve these duplicate records and update the database.': '"解決"リンクでは、新しい画面を開き、重複している情報を解決してデータベースを更新します', 'Resolve': '解決済みか', 'Resource Details': 'リソースの詳細', 'Resource added': 'リソースを追加しました', 'Resource deleted': 'リソースを削除しました', 'Resource updated': 'リソースを更新しました', 'Resource': 'リソース', 'Resources': 'リソース', 'Respiratory Infections': '呼吸器感染症', 'Response Details': '応答の詳細', 'Response added': '返答を追加しました', 'Response deleted': 'Responseを削除しました', 'Response updated': '返答を更新しました', 'Response': '対応', 'Responses': '対応', 'Restricted Access': 'アクセス制限中', 'Restricted Use': '制限された目的での使用', 'Restrictions': '制限', 'Results': '結果', 'Retail Crime': '小売犯罪', 'Retrieve Password': 'パスワードの取得', 'Rice': '米穀', 'Riot': '暴動', 'River Details': '河川の詳細', 'River added': '河川を追加しました', 'River deleted': '河川を削除しました', 'River updated': '河川を更新しました', 'River': '河川', 'Rivers': '河川', 'Road Accident': '道路障害', 'Road Closed': '道路(通行止め)', 'Road Conditions': '路面の状況', 'Road Delay': '道路遅延', 'Road Hijacking': '道路ハイジャック', 'Road Usage Condition': '道路の路面状況', 'Role Details': '権限の詳細', 'Role Name': '権限の名称', 'Role Required': '権限が必要', 'Role Updated': '権限を更新しました', 'Role added': '権限を追加しました', 'Role deleted': '権限を削除しました', 'Role updated': '権限を更新しました', 'Role': '権限', 'Role-based': '権限に基づいた', 'Roles Permitted': '許可された権限', 'Roles': '権限', 'Roof tile': '屋根瓦', 'Roofs, floors (vertical load)': '屋根、床板 (vertical load)', 'Roster': '名簿', 'Rotate Feature: Select the feature you wish to rotate & then Drag the associated dot to rotate to your desired location': '地物の回転: 回転させたい地物を選択し、目的の位置に回転させるために関連付けられた点をドラッグします。', 'Row Choices (One Per Line)': '行の選択 (One Per Line)', 'Rows in table': 'テーブルの行', 'Rows selected': '行が選択されました', 'Run Functional Tests': '動作テストの実行', 'Run Interval': '実行間隔', 'Running Cost': 'ランニングコスト', 'SITUATION': '状況', 'Safe environment for vulnerable groups': '被災者にとって安全な環境である', 'Safety Assessment Form': '安全性アセスメントフォーム', 'Safety of children and women affected by disaster': '被災した女性と未成年が保護されている', 'Sahana Administrator': 'Sahana管理者', 'Sahana Blue': 'Sahana ブルー', 'Sahana Community Chat': 'Sahanaコミュニティチャット', 'Sahana Eden <=> Other (Sahana Agasti, Ushahidi, etc.)': 'Sahana Eden <=> その他 (Sahana Agasti, Ushahidi 等.)', 'Sahana Eden <=> Other': 'Sahana Eden <=> 他のシステム', 'Sahana Eden Disaster Management Platform': 'Sahana Eden 被災地支援情報共有プラットフォーム', 'Sahana Eden Website': 'Sahana Eden公式ページ', 'Sahana Eden is a family of applications that provide solutions to coordination and collaboration for organizations working in disaster management.': 'Sahana Edenは、災害復旧に関わる様々な支援団体が、お互いに協力しあうために存在します。', 'Sahana FOSS Disaster Management System': 'Sahana オープンソース 被災地情報共有システム', 'Sahana Green': 'Sahana グリーン', 'Sahana Login Approval Pending': 'Sahana ログインは承認待ちです', 'Sahana access granted': 'Sahanaへのアクセス権を付与', 'Sahana: new request has been made. Please login to see if you can fulfil the request.': 'Sahana: 新しい支援要請が行われました。ログインして、支援要請を実現できるか確認してください。', 'Salted Fish': '塩漬けの魚', 'Salvage material usable from destroyed houses': '全壊した家屋から回収した物品(使用可能)', 'Salvage material usable from destroyed schools': '全壊した校舎から回収した物品(使用可能)', 'Sanitation problems': '衛生設備に問題', 'Satellite Office': '現地活動拠点', 'Satellite': '衛星', 'Saturday': '土曜日', 'Save any Changes in the one you wish to keep': '残す方の候補地へ行った変更を保存します。', 'Save': '保存', 'Save: Default Lat, Lon & Zoom for the Viewport': 'デフォルト表示範囲の緯度,経度,ズームレベルを保存', 'Saved.': '保存しました', 'Saving...': '保存しています...', 'Scale of Results': '結果の規模', 'Schedule': 'スケジュール', 'School Closure': '学校閉鎖', 'School Lockdown': '学校の厳重封鎖', 'School Reports': '学校のレポート', 'School Teacher': '学校教師', 'School activities': '学校の活動', 'School assistance received/expected': '学校用支援品を受領済み/受領予定', 'School assistance': '学校の援助', 'School attendance': '学校へ出席者', 'School destroyed': '校舎全壊', 'School heavily damaged': '校舎の深刻な損壊', 'School tents received': '仮校舎用テントを受領', 'School tents, source': '仮校舎用テント、送付元', 'School used for other purpose': '校舎を他目的で利用中', 'School': '学校', 'School/studying': '学校/勉強', 'Schools': '学校', 'Search & List Bin Types': 'Bin Typeを検索して一覧表示', 'Search & List Bins': 'Binsを検索して一覧表示', 'Search & List Catalog': 'カタログを検索して一覧表示', 'Search & List Category': 'カテゴリを検索して一覧表示', 'Search & List Items': '救援物資を検索して一覧表示', 'Search & List Locations': 'ロケーションを検索して一覧表示', 'Search & List Site': 'Siteを検索して一覧表示', 'Search & List Sub-Category': 'サブカテゴリを検索して一覧表示', 'Search & List Unit': '単位を検索して一覧表示', 'Search Activities': '支援活動の検索', 'Search Activity Report': '支援活動レポートの検索', 'Search Addresses': '住所を検索', 'Search Aid Requests': '援助要請を検索', 'Search Alternative Items': 'その他のアイテムを検索', 'Search Assessment Summaries': 'アセスメントの要約を検索', 'Search Assessments': 'アセスメントを検索', 'Search Asset Assignments': '資産割り当ての検索', 'Search Assets': '資産の検索', 'Search Baseline Type': 'Baseline Typeを検索', 'Search Baselines': '基準値の検索', 'Search Brands': '銘柄を検索', 'Search Budgets': '予算を検索', 'Search Bundles': 'Bundleを検索', 'Search Catalog Items': '救援物資カタログを検索', 'Search Category<>Sub-Category<>Catalog Relation': 'Category<>Sub-Category<>Catalog関係の検索', 'Search Checklists': 'チェックリストを検索', 'Search Cluster Subsectors': 'クラスタのサブセクタを検索', 'Search Clusters': 'クラスタを検索', 'Search Commitment Items': 'コミットされた救援物資の検索', 'Search Commitments': 'コミットの検索', 'Search Configs': '設定を検索', 'Search Contact Information': '連絡先情報を検索', 'Search Contacts': '連絡先を検索', 'Search Credentials': '証明書の検索', 'Search Distribution Items': '配給物資を検索', 'Search Distributions': '配給所を検索', 'Search Documents': 'ドキュメントを検索', 'Search Donors': '資金提供組織の検索', 'Search Existing Locations': '既存のロケーションを検索する', 'Search Feature Layers': 'Feature Layersの検索', 'Search Flood Reports': '洪水レポートの検索', 'Search Geonames': 'Geonamesの検索', 'Search Groups': 'グループの検索', 'Search Hospitals': '病院情報の検索', 'Search Identity': 'ID情報の検索', 'Search Images': '画像の検索', 'Search Impact Type': '被害の種類を検索', 'Search Impacts': '影響の検索', 'Search Incident Reports': 'インシデントレポートを検索', 'Search Incidents': 'インシデントの検索', 'Search Inventory Items': '備蓄物資を検索', 'Search Inventory Stores': '物資集積地点の検索', 'Search Item Catalog Category(s)': 'アイテムカタログカテゴリの検索', 'Search Item Catalog(s)': '救援物資カタログの検索', 'Search Item Categories': '救援物資カテゴリを検索', 'Search Item Packs': '物資のパックを検索', 'Search Item Sub-Category(s)': 'アイテムサブカテゴリの検索', 'Search Items': 'アイテムの検索', 'Search Keys': 'Keyの検索', 'Search Kits': 'Kitsの検索', 'Search Layers': 'レイヤの検索', 'Search Level 1 Assessments': 'レベル1アセスメントの検索', 'Search Level 2 Assessments': 'レベル2のアセスメントを検索', 'Search Locations': 'ロケーションの検索', 'Search Log Entry': 'ログエントリの検索', 'Search Map Profiles': '地図設定の検索', 'Search Markers': 'マーカーの検索', 'Search Members': 'メンバーの検索', 'Search Membership': 'メンバシップの検索', 'Search Memberships': 'メンバシップの検索', 'Search Metadata': 'メタデータの検索', 'Search Need Type': '需要タイプの検索', 'Search Needs': '必要な物資を検索', 'Search Notes': '追加情報を検索', 'Search Offices': 'オフィスの検索', 'Search Organizations': '団体の検索', 'Search Peer': '同期先を検索', 'Search Peers': 'データ同期先を検索', 'Search Personal Effects': 'Personal Effectsの検索', 'Search Persons': '人物情報の検索', 'Search Photos': '写真の検索', 'Search Positions': 'Positionsの検索', 'Search Problems': '問題の検索', 'Search Projections': '地図投影法の検索', 'Search Projects': 'プロジェクトの検索', 'Search Rapid Assessments': '被災地の現況アセスメントを検索', 'Search Received Items': '受領済み救援物資の検索', 'Search Received Shipments': '受信済みの出荷の検索', 'Search Records': 'レコードの検索', 'Search Recovery Reports': '遺体回収レポートを検索', 'Search Registations': '登録情報の検索', 'Search Registration Request': '登録要請を検索', 'Search Report': 'レポートの検索', 'Search Reports': 'レポートの検索', 'Search Request Items': '物資の要請を検索', 'Search Request': '支援要請の検索', 'Search Requested Items': '支援要請されている物資を検索', 'Search Requests': '支援要請の検索', 'Search Resources': 'リソースの検索', 'Search Responses': '検索の応答', 'Search Rivers': '河川を検索', 'Search Roles': '役割の検索', 'Search Sections': 'セクションの検索', 'Search Sectors': '活動分野を検索', 'Search Sent Items': '送付した物資を検索', 'Search Sent Shipments': '送信した出荷の検索', 'Search Service Profiles': 'サービスプロファイルの検索', 'Search Settings': '設定の検索', 'Search Shelter Services': '避難所での提供サービスを検索', 'Search Shelter Types': '避難所タイプの検索', 'Search Shelters': '避難所の検索', 'Search Shipment Transit Logs': '輸送履歴の検索', 'Search Shipment/Way Bills': '輸送費/渡航費の検索', 'Search Shipment<>Item Relation': '輸送と救援物資の関係性の検索', 'Search Site(s)': 'Siteの検索', 'Search Skill Types': 'スキルタイプの検索', 'Search Skills': 'スキルを検索', 'Search Solutions': '解決案の検索', 'Search Staff Types': 'スタッフタイプの検索', 'Search Staff': 'スタッフの検索', 'Search Status': '状態の検索', 'Search Storage Bin Type(s)': 'Storage Bin Typeの検索', 'Search Storage Bin(s)': 'Storage Bin(s)の検索', 'Search Storage Location(s)': '備蓄地点の検索', 'Search Subscriptions': '寄付申し込みを検索', 'Search Support Requests': '支援要求の検索', 'Search Tasks': 'タスクの検索', 'Search Teams': 'チームの検索', 'Search Themes': 'テーマの検索', 'Search Tickets': 'チケットの検索', 'Search Tracks': '追跡情報の検索', 'Search Twitter Tags': 'Twitterのタグを検索', 'Search Units': '単位の検索', 'Search Users': 'ユーザの検索', 'Search Volunteer Registrations': 'ボランティア登録の検索', 'Search Volunteers': 'ボランティアの検索', 'Search Warehouse Items': '倉庫の物資を検索', 'Search Warehouses': 'Warehousesの検索', 'Search and Edit Group': 'グループを検索して編集', 'Search and Edit Individual': '人物情報を検索して個別に編集', 'Search by ID Tag': 'IDタグで検索', 'Search for Items': '物資の検索', 'Search for a Hospital': '病院を探す', 'Search for a Location': '検索地域を指定します', 'Search for a Person': '人物を探す', 'Search for a Project': 'プロジェクトを探す', 'Search for a Request': '支援要請の検索', 'Search for a shipment received between these dates': 'ある期間内に受け取られた輸送を検索する', 'Search for an item by category.': 'カテゴリで物資を検索', 'Search for an item by text.': 'テキストで項目を検索', 'Search here for a person record in order to:': '人物情報を検索することで、以下の事柄を行うことができます。', 'Search messages': 'メッセージの検索', 'Search': '検索', 'Searching for different groups and individuals': '他のグループと個人を探す', 'Secondary Server (Optional)': 'セカンダリサーバ(オプション)', 'Seconds must be a number between 0 and 60': '秒には0-60の間の数字を記入してください', 'Seconds must be a number greater than 0 and less than 60': '秒は0から60の間で入力してください', 'Section Details': 'Sectionの詳細', 'Section deleted': 'Sectionを削除しました', 'Section updated': 'セクションを更新しました', 'Sections': 'セクション', 'Sector Details': '活動分野の詳細', 'Sector added': '活動分野を追加しました', 'Sector deleted': '活動分野を削除しました', 'Sector updated': '活動分野を更新しました', 'Sector': '活動分野', 'Sectors': '活動分野', 'Security Policy': 'セキュリティポリシー', 'Security Status': 'セキュリティステータス', 'Security problems': 'セキュリティーの問題', 'See unassigned recovery requests': 'まだ割り当てられていない遺体回収要請を見る', 'Seen': '発見情報あり', 'Select 2 potential locations from the dropdowns.': '候補地を2つ、ドロップダウンから選択します。', 'Select Items from the Request': '支援要請を基にアイテムを選択する', 'Select Items from this Inventory': '備蓄中の物資から選択', 'Select Language': '言語選択', 'Select Organization': '団体の選択', 'Select Photos': '写真の選択', 'Select a location': 'ロケーションを選択', 'Select a question from the list': 'リストから質問を選択してください', 'Select a range for the number of total beds': 'ベッド総数の範囲を選択', 'Select all that apply': '該当する項目を全て選択', 'Select an Organization to see a list of offices': '団体を選択すると、所属するオフィスが表示されます', 'Select an existing Location': '既に登録してあるロケーションを選択してください', 'Select the Cluster Layers for Assessments and Activities to analyse the Gaps:': 'アセスメントと支援活動のギャップを解析するクラスタの層を選択:', 'Select the overlays for Assessments and Activities relating to each Need to identify the gap.': 'オーバーレイを指定し、適切なアセスメントと支援活動を表示させてニーズを明確にします。', 'Select the person assigned to this role for this project.': 'この人物に、プロジェクト内の権限を担当させます。', 'Select the person associated with this scenario.': 'このタスクに関連する人物を選択してください。', 'Select to see a list of subdivisions.': '項目を選択すると、より細かい分類を選択できます。', 'Select to show this configuration in the Regions menu.': '範囲メニューで表示する構成を選択して下さい', 'Select': '選択', 'Selects whether to use a Modem, Tropo or other Gateway for sending out SMS': 'SMS送信時に、モデム、Tropoまたはゲートウェイのどちらを使用するかを選択', 'Selects whether to use the gateway or the Modem for sending out SMS': 'SMS送信時、モデムとゲートウェイのどちらを使用するか選択', 'Self Registration': '本人による登録', 'Self-registration': '本人による登録', 'Send Alerts using Email &/or SMS': '電子メールまたはSMSを使用してアラートを送信', 'Send Items': '物資を送付', 'Send Mail': 'メール送信', 'Send Message': 'メッセージを送る', 'Send Notification': '通知を送信', 'Send Shipment': '輸送を開始する', 'Send from %s': '依頼主( %s )', 'Send message': 'メッセージ送信', 'Send new message': '新規メッセージ送信', 'Send': '物資送付', 'Sends & Receives Alerts via Email & SMS': '電子メール/SMS 経由でアラート送信/受信', 'Senior (50+)': '高齢者 (50+)', 'Sensitivity': '感度', 'Sent Item Details': '送付した物資の詳細', 'Sent Item deleted': '輸送済み物資を削除しました', 'Sent Item updated': '送付した救援物資を更新しました', 'Sent Shipment Details': '送付物資の詳細', 'Sent Shipment canceled and items returned to Inventory': '送付処理した輸送がキャンセルされ、物資は倉庫に戻りました', 'Sent Shipment canceled': '輸送開始をキャンセルしました', 'Sent Shipment updated': '送信した物資が更新されました', 'Sent Shipments': '物資を送付しました', 'Sent': '送信', 'Separate latrines for women and men': 'トイレは男女別である', 'Separated children, caregiving arrangements': '親と離れた子供だちのための保育手配', 'Seraiki': 'セライキ', 'Serial Number': 'シリアルナンバー', 'Series': 'シリーズ', 'Server': 'サーバ', 'Service Catalog': 'サービスカタログ', 'Service or Facility': 'サービス、または施設', 'Service profile added': 'サービスプロファイルを追加しました', 'Service profile deleted': 'サービスプロファイルを削除しました', 'Service profile updated': 'サービスプロファイルを更新しました', 'Service': 'サービス', 'Services Available': '利用可能なサービス', 'Services': 'サービス', 'Setting Details': '設定の詳細', 'Setting added': '設定を追加しました', 'Setting deleted': '設定を削除しました', 'Setting updated': '設定を更新しました', 'Settings updated': '設定を更新しました', 'Settings were reset because authenticating with Twitter failed': 'Twitterの認証に失敗したため、設定をクリアします', 'Settings': '設定', 'Severe': '深刻', 'Severity': '深刻度', 'Severity:': '深刻度:', 'Share a common Marker (unless over-ridden at the Feature level)': 'マーカーの共有 (機能レイヤで上書きされない限り)', 'Shelter & Essential NFIs': '避難所/生活用品', 'Shelter Details': '避難所の詳細', 'Shelter Name': '避難所名称', 'Shelter Registry': '避難所登録', 'Shelter Service Details': '避難所サービスの詳細', 'Shelter Service added': '避難所サービスを追加しました', 'Shelter Service deleted': '避難所サービスを削除しました', 'Shelter Service updated': '避難所サービスを更新しました', 'Shelter Service': '避難所サービス', 'Shelter Services': '避難所サービス', 'Shelter Type Details': '避難所タイプの詳細', 'Shelter Type added': '避難所タイプを追加しました', 'Shelter Type deleted': '避難所タイプを削除しました', 'Shelter Type updated': '避難所サービスを更新しました', 'Shelter Type': '避難所タイプ', 'Shelter Types and Services': '避難所のタイプとサービス', 'Shelter Types': '避難所タイプ', 'Shelter added': '避難所を追加しました', 'Shelter deleted': '避難所を削除しました', 'Shelter updated': '避難所を更新しました', 'Shelter': '避難所', 'Shelter/NFI Assistance': '避難所 / 生活用品支援', 'Shelter/NFI assistance received/expected': '避難所 / 生活必需品の支援を受領済み、あるいは受領予定', 'Shelters': '避難所', 'Shipment Created': '輸送が作成されました', 'Shipment Details': '輸送の詳細', 'Shipment Items received by Inventory': '物資備蓄地点から送付された救援物資', 'Shipment Items sent from Inventory': '備蓄物資から輸送を行いました', 'Shipment Items': '救援物資の輸送', 'Shipment Transit Log Details': '輸送履歴の詳細', 'Shipment Transit Log added': '輸送履歴を追加しました', 'Shipment Transit Log deleted': '輸送履歴を削除しました', 'Shipment Transit Log updated': '輸送履歴を更新しました', 'Shipment Transit Logs': '輸送履歴', 'Shipment/Way Bill added': '輸送/移動費を追加しました', 'Shipment/Way Bills Details': '輸送/移動費の詳細', 'Shipment/Way Bills deleted': '輸送/移動費を削除しました', 'Shipment/Way Bills updated': '輸送/移動費を更新しました', 'Shipment/Way Bills': '輸送/移動費', 'Shipment<>Item Relation added': '輸送<>物資間の関係を追加しました', 'Shipment<>Item Relation deleted': '輸送<>アイテム間の関係を削除しました', 'Shipment<>Item Relation updated': '輸送<>物資間の関係を更新しました', 'Shipment<>Item Relations Details': '輸送<>物資間の関係詳細', 'Shipment<>Item Relations': '輸送<>物資間の関係', 'Shipments To': '輸送先', 'Shipments': '輸送', 'Shooting': '銃撃', 'Short Assessment': '簡易評価', 'Short Description': '概要', 'Show Checklist': 'チェックリストを表示', 'Show Details': '詳細を表示', 'Show Map': '地図の表示', 'Show Region in Menu?': '地域をメニューで表示しますか?', 'Show on map': '地図上に表示', 'Sign-up as a volunteer': 'ボランティアとして登録する', 'Sign-up for Account': 'アカウント登録', 'Sign-up succesful - you should hear from us soon!': '登録できました。すぐに連絡が送られます。', 'Sindhi': 'シンド語', 'Site Address': 'サイトの住所', 'Site Administration': 'このサイト自体の管理', 'Site Description': 'サイトの説明', 'Site Details': 'Siteの詳細', 'Site ID': 'サイトID', 'Site Location Description': 'サイト ロケーションの説明', 'Site Location Name': 'サイトロケーション名', 'Site Manager': 'Site 管理者', 'Site Name': 'Site の名前', 'Site added': 'サイトを追加しました', 'Site deleted': 'サイトを削除しました', 'Site updated': 'サイトを更新しました', 'Site': 'サイト', 'Site/Warehouse': 'サイト/倉庫', 'Sites': 'サイト', 'Situation Awareness & Geospatial Analysis': '広域情報の取得や、地理情報の分析を行ないます', 'Sketch': 'スケッチ', 'Skill Details': 'スキルの詳細', 'Skill Status': 'スキル状況', 'Skill Type Details': 'スキルタイプの詳細', 'Skill Type added': 'スキルタイプを追加しました', 'Skill Type deleted': 'スキルタイプを削除しました', 'Skill Type updated': 'スキルタイプを更新しました', 'Skill Types': 'スキルタイプ', 'Skill added': 'スキルを追加しました', 'Skill deleted': 'スキルを削除しました', 'Skill updated': 'スキルを更新しました', 'Skill': 'スキル', 'Skills': 'スキル', 'Slope failure, debris': '斜面崩壊・崩壊堆積物', 'Small Trade': '小規模取引', 'Smoke': '煙', 'Snapshot Report': 'スナップショットレポート', 'Snapshot': 'スナップショット', 'Snow Fall': '降雪', 'Snow Squall': '豪雪', 'Soil bulging, liquefaction': '土壌隆起・液状化', 'Solid waste': '固形廃棄物', 'Solution Details': '解決案の詳細', 'Solution Item': '解決案項目', 'Solution added': '解決案を追加しました', 'Solution deleted': '解決案を削除しました', 'Solution updated': '解決案を更新しました', 'Solution': '解決案', 'Solutions': '解決案', 'Some': '散見', 'Sorry - the server has a problem, please try again later.': 'すみません、サーバーに問題が発生しています。時間を置いてやり直してください。', 'Sorry that location appears to be outside the area of the Parent.': 'このロケーションは親属性のエリアの外に表示されます。', 'Sorry that location appears to be outside the area supported by this deployment.': 'すいません、この位置は、このデプロイメントでサポートされている領域の外です。', 'Sorry, I could not understand your request': '残念ながら、リクエストが理解できませんでした。', 'Sorry, only users with the MapAdmin role are allowed to create location groups.': '申し訳ありませんが、 MapAdmin 権限を持つユーザだけがロケーションのグループを作れます', 'Sorry, only users with the MapAdmin role are allowed to edit these locations': '申し訳ありませんが、ロケーションの編集を行うにはMapAdmin権限を持ったユーザである必要があります。', 'Sorry, something went wrong.': 'すいません、何か問題が発生しています。', 'Sorry, that page is forbidden for some reason.': 'すいません、都合により、このページは閲覧禁止です。', 'Sorry, that service is temporary unavailable.': 'すいません、このサービスは一時的に利用不可となっています。', 'Sorry, there are no addresses to display': 'すいません、表示する住所がありません', 'Source ID': '情報元ID', 'Source Time': '情報ソース入手時刻', 'Source Type': '情報ソース種別', 'Source': '情報元', 'Sources of income': '収入源', 'Space Debris': '宇宙廃棄物', 'Spanish': 'スペイン語', 'Special Ice': '特別な氷', 'Special Marine': '特別海上', 'Special needs': '特別な要求', 'Specialized Hospital': '専門病院', 'Specific Area (e.g. Building/Room) within the Location that this Person/Group is seen.': 'ある人々やグループが見られるロケーションの中の特別な場所 (建物、部屋等)', 'Specific Location': '特定のロケーション', 'Specific locations need to have a parent of level': 'ロケーションを指定するには、そのロケーションの親属性指定が必要です', 'Specify a descriptive title for the image.': '画像の説明として一言タイトルをつけてください。', 'Specify the bed type of this unit.': 'この施設にある寝具の種別を指定してください', 'Specify the minimum sustainability in weeks or days.': '最短で何週間、あるいは何日以内に枯渇の可能性があるかを記載してください', 'Specify the number of available sets': '利用可能なセットの個数を入力してください', 'Specify the number of available units (adult doses)': '(成人が使用するとして)使用可能な個数を入力してください', 'Specify the number of available units (litres) of Ringer-Lactate or equivalent solutions': '使用可能な乳酸リンゲル液あるいは同等品のリッター数を入力してください', 'Specify the number of sets needed per 24h': '24時間ごとに必要なセットの数を指定する', 'Specify the number of units (adult doses) needed per 24h': '(成人が使用するとして)24時間ごとに必要な個数を入力してください', 'Specify the number of units (litres) of Ringer-Lactate or equivalent solutions needed per 24h': '24時間ごとに必要な乳酸リンゲル液あるいは同等品のリッター数を入力してください', 'Spherical Mercator?': '球面メルカトル?', 'Spreadsheet Importer': 'スプレッドシートの取り込み', 'Spreadsheet uploaded': 'スプレッドシートがアップロードされました', 'Spring': '湧き水', 'Squall': 'スコール', 'Staff 2': 'スタッフ 2', 'Staff Details': 'スタッフの詳細', 'Staff Type Details': 'スタッフタイプの詳細', 'Staff Type added': 'スタッフタイプを追加しました', 'Staff Type deleted': 'スタッフタイプを削除しました', 'Staff Type updated': 'スタッフタイプを更新しました', 'Staff Types': 'スタッフ分類', 'Staff added': 'スタッフを追加しました', 'Staff deleted': 'スタッフを削除しました', 'Staff present and caring for residents': '上記施設にスタッフが配置され、ケアを行っている', 'Staff updated': 'スタッフを更新しました', 'Staff': 'スタッフ', 'Staffing': 'スタッフ配備', 'Stairs': '階段', 'Start date and end date should have valid date values': '開始日と終了日は正しい値である必要があります', 'Start date': '開始日', 'Start of Period': '開始期間', 'Stationery': '文房具', 'Status Report': 'ステータスレポート', 'Status added': '状況が追加されました', 'Status deleted': 'ステータスを削除しました', 'Status of clinical operation of the facility.': '施設で行われている診療の状況を記載してください。', 'Status of general operation of the facility.': '施設の運用状況情報を記載してください。', 'Status of morgue capacity.': '死体安置所の収容状況です。', 'Status of operations of the emergency department of this hospital.': 'この病院の緊急手術室の状態です。', 'Status of security procedures/access restrictions in the hospital.': '病院のアクセス制限/セキュリティ手順の状態。', 'Status of the operating rooms of this hospital.': 'この病院の手術室の状態。', 'Status updated': '状況を更新しました', 'Status': 'ステータス', 'Steel frame': '鉄骨', 'Storage Bin Details': '物資保管場所の詳細', 'Storage Bin Number': 'Storage Bin番号', 'Storage Bin Type Details': '物資保管タイプの詳細', 'Storage Bin Type added': '物資保管タイプを追加しました', 'Storage Bin Type deleted': 'Storage Binタイプを削除しました', 'Storage Bin Type updated': 'Storage Binタイプを更新しました', 'Storage Bin Type': 'Storage Binタイプ', 'Storage Bin Types': '収納箱のタイプ', 'Storage Bin added': 'Storage Binを追加しました', 'Storage Bin deleted': 'Storage Bin を削除しました', 'Storage Bin updated': 'Storage Bin を更新しました', 'Storage Bin': '物資貯蔵容器', 'Storage Bins': '物資保管場所', 'Storage Location Details': '備蓄地点の詳細', 'Storage Location ID': '備蓄地点ID', 'Storage Location Name': '備蓄地点名称', 'Storage Location added': '備蓄地点を追加しました', 'Storage Location deleted': '備蓄地点を削除しました', 'Storage Location updated': '備蓄地点を更新しました', 'Storage Location': '備蓄地点', 'Storage Locations': '備蓄地点', 'Store spreadsheets in the Eden database': 'Edenのデータベースにスプレッドシートを格納', 'Storeys at and above ground level': '階層、あるいは地面より上部', 'Storm Force Wind': '嵐の風の強さ', 'Storm Surge': '高潮', 'Stowaway': '密航者', 'Street (continued)': '住所 (続き)', 'Street Address': '住所', 'Street': 'ストリート', 'Strong Wind': '強風', 'Structural Hazards': '構造破壊', 'Structural': '構造的な', 'Sub Category': 'サブカテゴリ', 'Sub-type': 'サブタイプ', 'Subject': '件名', 'Submission successful - please wait': '送信に成功しました。しばらくお待ちください', 'Submission successful - please wait...': '送信に成功しました。しばらくお待ちください', 'Submit New (full form)': '(完全なフォームで)新しく投稿する', 'Submit New (triage)': '新しい (トリアージ) を追加', 'Submit New': '新規登録', 'Submit a request for recovery': '遺体回収要請を作成する', 'Submit new Level 1 assessment (full form)': 'レベル1のアセスメントを投稿する(完全なフォーム)', 'Submit new Level 1 assessment (triage)': '新しいレベル1アセスメント(トリアージ)を追加', 'Submit new Level 2 assessment': '新しいレベル2アセスメントの登録', 'Submit': '送信', 'Subscription Details': '寄付申し込みの詳細', 'Subscription added': '寄付申し込みを追加しました', 'Subscription deleted': '寄付申し込みを削除しました', 'Subscription updated': '寄付申し込みを更新しました', 'Subscriptions': '寄付申し込み', 'Subsistence Cost': '生存コスト', 'Suburb': '郊外', 'Sufficient care/assistance for chronically ill': '慢性疾患罹患者への十分なケア / 介護がある', 'Suggest not changing this field unless you know what you are doing.': 'よくわからない場合は、この項目を変更しないでください。', 'Summary by Administration Level': '管理レベルの概要', 'Summary': '要約', 'Sunday': '日曜', 'Supervisor': '管理権限を追加', 'Supplies': '支給品', 'Support Request': '支援要請', 'Support Requests': '支援の要請', 'Supports the decision making of large groups of Crisis Management Experts by helping the groups create ranked list.': '危機管理の専門グループの助言を取り入れることで、救援活動の優先順位を作成しやすくします。', 'Sure you want to delete this object?': 'このオブジェクトを削除してもよろしいですか?', 'Surgery': '外科', 'Survey Answer Details': '調査回答詳細', 'Survey Answer added': '調査の回答を追加しました', 'Survey Answer deleted': '調査の回答を削除しました', 'Survey Answer updated': '調査回答を更新しました', 'Survey Answer': '調査回答', 'Survey Module': '調査モジュール', 'Survey Name': 'Survey 名', 'Survey Question Details': '調査項目の詳細', 'Survey Question Display Name': 'フィードバックの質問の表示名', 'Survey Question added': '調査の質問を追加しました', 'Survey Question deleted': '調査の質問を削除しました', 'Survey Question updated': 'Survey Questionを更新しました', 'Survey Question': '調査の質問', 'Survey Section Details': 'フィードバック項目の詳細', 'Survey Section Display Name': '調査項目の表示名', 'Survey Section added': '調査項目を追加しました', 'Survey Section deleted': 'フィードバック項目を削除しました', 'Survey Section updated': 'サーベイセクションを更新しました', 'Survey Section': '調査項目', 'Survey Series Details': 'Survey Seriesの詳細', 'Survey Series Name': 'フィードバックシリーズ名', 'Survey Series added': '一連の調査を追加しました', 'Survey Series deleted': '一連の調査を削除しました', 'Survey Series updated': '連続調査を更新しました', 'Survey Series': '一連の調査', 'Survey Template Details': '調査テンプレートの詳細', 'Survey Template added': 'Surveyテンプレートを追加しました', 'Survey Template deleted': '調査テンプレートを削除しました', 'Survey Template updated': '調査のテンプレートを更新しました', 'Survey Template': '調査テンプレート', 'Survey Templates': '調査のテンプレート', 'Switch this on to use individual CSS/Javascript files for diagnostics during development.': '開発時にこのスイッチをONにすることで、CSS/Javascriptファイルの診断を行なえます。', 'Symbology': 'コード', 'Sync Conflicts': 'データ同期中に競合が発生しました', 'Sync History': 'データ同期履歴', 'Sync Now': 'データ同期中', 'Sync Partners are instances or peers (SahanaEden, SahanaAgasti, Ushahidi, etc.) that you want to sync information with. Click on the link on the right to go the page where you can add sync partners, search for sync partners and modify them.': 'データ同期先とは、情報の同期を行うインスタンスやピアのことを指します。(Sahana EdenやSahanaAgasti、Ushahidiなどと同期可能です) 同期先の登録や検索、登録情報の変更を行う際は、リンクをクリックしてページを表示してください。', 'Sync Partners': 'データ同期パートナー', 'Sync Pools': 'プールの同期', 'Sync Schedule': 'データ同期スケジュール', 'Sync Settings': 'データ同期設定', 'Sync process already started on ': 'データ同期プロセスは既に開始しています', 'Synchronisation History': 'データ同期履歴', 'Synchronisation': '同期', 'Synchronization Conflicts': '同期のコンフリクト', 'Synchronization Details': 'データ同期の詳細', 'Synchronization History': 'データ同期履歴', 'Synchronization Peers': 'データ同期先', 'Synchronization Settings': 'データ同期設定', 'Synchronization allows you to share data that you have with others and update your own database with latest data from other peers. This page provides you with information about how to use the synchronization features of Sahana Eden': 'データ同期を使用すると、他の端末とデータを共有し、自身のデータを最新の状態に更新することができます。このページには、SahanaEdenにおいてデータ同期を行う方法が記載されています。', 'Synchronization not configured.': 'データ同期が設定されていません', 'Synchronization settings updated': 'データ同期設定を更新しました', 'Synchronization': 'データ同期', 'Syncronisation History': 'データ同期履歴', 'Syncronisation Schedules': 'データ同期スケジュール', 'System allows the General Public to Report Incidents & have these Tracked.': 'システムを使うことで、一般市民によるインシデントの報告、および報告されたインシデントの追跡を行うことができます。', 'System allows the tracking & discovery of Items stored in Locations.': 'システムにより、物資がどこで保持されているかを追跡、明確化することができます。', 'System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.': 'このシステムは、支援団体、個々の支援者、政府職員、そして避難所に移動した人々の間で、援助の需要と供給の調整を図るための、オンラインの中央データベースです。このシステムを用いて、利用可能な資源を、需要を満たすように、有効かつ効率的に割り当てることができます。', 'System keeps track of all Volunteers working in the disaster region. It captures not only the places where they are active, but also captures information on the range of services they are providing in each area.': 'この仕組みでは、災害地域の全てのボランティア情報を提供します。ボランティアの活動場所に加え、そこで提供する支援内容も提供します。', 'TMS Layers': 'TMSレイヤ', 'Table name': 'テーブル名', 'Tags': 'タグ', 'Take shelter in place or per <instruction>': '場所や<instruction>ごとに避難してください', 'Task Details': 'タスクの詳細', 'Task List': 'タスク一覧', 'Task Status': 'タスクの状況', 'Task added': 'タスクを追加しました', 'Task deleted': 'タスクを削除しました', 'Task status': 'タスク状況', 'Task updated': 'タスクを更新しました', 'Tasks': 'タスク', 'Team Description': 'チーム概要', 'Team Details': 'チームの詳細', 'Team Head': 'チーム代表者', 'Team Id': 'チームID', 'Team Leader': 'チームリーダー', 'Team Member added': 'チームメンバーを追加しました', 'Team Members': 'チームメンバー', 'Team Name': 'チーム名', 'Team Type': 'チームタイプ', 'Team added': 'チームを追加しました', 'Team deleted': 'チームを削除しました', 'Team updated': 'チームを更新しました', 'Team': 'チーム', 'Teams': 'チーム', 'Technical testing only, all recipients disregard': '技術検証のみで、すべての受取人は無視されます', 'Telecommunications': '通信・情報', 'Telephone': '電話', 'Telephony': '電話', 'Temp folder %s not writable - unable to apply theme!': '一時フォルダ%sが書き込み不可になっています。テーマを適用できません。', 'Template file %s not readable - unable to apply theme!': 'テンプレートファイル %s が読み込み不可になっています。テーマを適用できません。', 'Templates': 'テンプレート', 'Term for the fifth-level within-country administrative division (e.g. a voting or postcode subdivision). This level is not often used.': '国内における第五段階管理部門を示す用語(例: 郵便番号の下位部分)。このレベルは通常使われません。', 'Term for the fourth-level within-country administrative division (e.g. Village, Neighborhood or Precinct).': '国内で第4の行政区域を示す用語 (例えば村、地区)', 'Term for the primary within-country administrative division (e.g. State or Province).': '国内で最大の行政区域を示す用語 (例えば州や都道府県)', 'Term for the secondary within-country administrative division (e.g. District).': '国内で二番目の管理部門の用語 (例: 区)', 'Term for the third-level within-country administrative division (e.g. City or Town).': '国内で三番目の管理部門を示す用語 (例: 市や町)', 'Term for the top-level administrative division (typically Country).': '最上位の統制区域を示す用語 (一般的には国)', 'Territorial Authority': '地方機関', 'Terrorism': 'テロリズム', 'Tertiary Server (Optional)': '三番目のサーバ(オプション)', 'Test Results': 'テスト結果', 'Text Color for Text blocks': 'テキストブロックのテキスト色', 'Text before each Text Field (One per line)': 'テキストフィールドの前のテキスト (一行に一つ)', 'Text in Message': 'メッセージのテキスト', 'Text in Message: ': 'メッセージのテキスト: ', 'Text': 'テキスト', 'Thanks for your assistance': 'ご協力ありがとうございます', 'The "query" is a condition like "db.table1.field1==\'value\'". Something like "db.table1.field1 == db.table2.field2" results in a SQL JOIN.': '"query"は"db.table1.field1==\'value\'"のような条件です。SQL JOINの"db.table1.field1 == db.table2.field2"結果のようなものです。', 'The Area which this Site is located within.': 'このサイトが含まれる地域', 'The Assessments module allows field workers to send in assessments. 2 different options are provided here currently:': 'アセスメントモジュールは、被災現場で活動する人々による現状の査定報告を記録することができます。現在は、2種類のオプションが提供されています。', 'The Assessments module allows field workers to send in assessments.': 'アセスメントモジュールは、被災現場で活動する人々による現状の査定報告を記録することができます。', 'The Author of this Document (optional)': 'この文書の作成者氏名(オプション)', 'The Building Asssesments module allows building safety to be assessed, e.g. after an Earthquake.': 'ビルアセスメントモジュールではビルの安全性評価を行います (例:地震の後など)', 'The Current Location of the Person/Group, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'その人物/グループの現在地は報告用の概要レベルの情報あるいは地図上の表示のため正確な情報いずれの場合もあります。場所名の数文字を入力すると、登録済みの場所から検索できます。', 'The District for this Report.': 'このレポートが関連する地区。', 'The Email Address to which approval requests are sent (normally this would be a Group mail rather than an individual). If the field is blank then requests are approved automatically if the domain matches.': '承認依頼が送信されるメールアドレス(通常は個人のメールアドレスではなく、グループのメールアドレス)。この欄が空白の場合、ドメインが一致すれば依頼は自動的に承認されます', 'The Group whose members can edit data in this record.': 'このグループのメンバーは、レコード上のデータを修正することができます。', 'The Incident Reporting System allows the General Public to Report Incidents & have these Tracked.': '一般ユーザは、インシデント・レポートシステムからインシデントを報告し、その結果を表示させることができます。', 'The Location of this Site, which can be general (for Reporting) or precise (for displaying on a Map).': 'Siteのロケーション、(レポート用で)おおまかな場合と、(地図表示用で)正確な場合とがあります。', 'The Location the Person has come from, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'その人物がやって来たロケーションで、報告のためのだいたいの場所、あるいは地図で表示するための正確な緯度経度です。使用可能なロケーションを検索するには最初の数文字を入力してください', 'The Location the Person is going to, which can be general (for Reporting) or precise (for displaying on a Map). Enter a few characters to search from available locations.': 'その人物が向かう場所は報告用の概要レベルの情報あるいは地図上の表示のため正確な情報いずれの場合もあります。場所名の数文字を入力すると、登録済みの場所から検索できます。', 'The Media Library provides a catalog of digital media.': 'メディア・ライブラリーは、デジタル・メディアの一覧を提供します。', 'The Messaging Module is the main communications hub of the Sahana system. It is used to send alerts and/or messages using SMS & Email to various groups and individuals before, during and after a disaster.': 'メッセージング・モジュールは、SAHANAシステムのコミュニケーション中心となります。災害の前、災害中または災害の後に様々なグループや個人にSMSとeメールで警報やメッセージを送ります。', 'The Office this record is associated with.': 'このレコードに関連するオフィス', 'The Organization Registry keeps track of all the relief organizations working in the disaster region. It captures not only the places where they are active, but also captures information on the range of projects they are providing in each area.': '団体情報を登録することで、被災地域で活動するすべての団体の活動を追跡します。また、それぞれの地域において、彼らがどこで活動しているかという情報だけでなく、彼らが各地で提供しているプロジェクトの範囲についての情報も提供します。', 'The Organization this record is associated with.': 'このレコードに関連する団体', 'The Organization which is funding this Activity.': 'この支援活動に資金を提供する団体', 'The Person currently filling this Role.': '現在この役割に属している人物', 'The Project Tracking module allows the creation of Activities to meet Gaps in Needs Assessments.': 'プロジェクト追跡モジュールでは、支援活動(アクティビティ)を作成し、必要な物資 / サービスのギャップを満たすことを目的とします。', 'The Rapid Assessments Module stores structured reports done by Professional Organizations.': '被災地の現況アセスメントには、専門団体によって行われたレポートの結果が格納されます。', 'The Request this record is associated with.': 'このレコードに関連する支援要請', 'The Requests Management System is a central online repository where all relief organizations, relief workers, government agents and camp sites for displaced personnel can coordinate the supply of aid with their demand. It allows users to allocate the available resources to fulfill the demands effectively and efficiently.': '支援要請管理システムは、全ての支援団体、救援者、政府職員、および避難所に暮らす避難者たち自身が、要求に応じて援助の供給を調整できる中央のオンラインデータベースです。支援要請管理システムは効果的かつ効率的に要求を満たすことができる利用可能な資源の割り当てを可能にします。', 'The Role this person plays within this Office/Project.': 'オフィス/プロジェクトにおける役割', 'The Role this person plays within this hospital.': '病院内における役割', 'The Role to which this Role reports.': 'この権限の報告先となる権限', 'The Shelter Registry tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': '避難所登録は、避難所を追跡し、それらの詳細を蓄積します。避難所に関連付けられた人、利用可能なサービス等の他のモジュールと協業します。', 'The Shelter this Request is from (optional).': '要請を行った避難所(オプション)', 'The Shelter this person is checking into.': 'この人物がチェックインした避難所', 'The URL for the GetCapabilities of a WMS Service whose layers you want accessible via the Map.': '地図を用いてレイヤを利用できる WMS サービスの GetCapabilities の URL。', 'The URL of your web gateway without the post parameters': 'ポストパラメータを指定しないWebゲートウェイのURL', 'The URL to access the service.': 'サービスにアクセスするためのURL', 'The Unique Identifier (UUID) as assigned to this facility by the government.': '政府UUID|政府がこの施設に割り当てている汎用一意識別子(UUID)。', 'The area is ': 'この地域は', 'The attribute within the KML which is used for the title of popups.': 'このKML属性はポップアップのタイトルに使われます。', 'The attribute(s) within the KML which are used for the body of popups. (Use a space between attributes)': 'KMLで定義されている属性はポップアップの本文に使用されます。(各属性ごとに半角スペースで分割して記載してください)', 'The body height (crown to heel) in cm.': '頭頂からかかとまでの身長(単位はcm)', 'The category of the Item.': 'この救援物資のカテゴリです', 'The contact person for this organization.': '団体の代表窓口', 'The country the person usually lives in.': 'この人物が普段の生活を営む国', 'The default policy for data import from this peer.': 'このデータ同期先からデータをインポートする際のデフォルト設定。', 'The descriptive name of the peer.': 'データ同期先のわかりやすい名称', 'The duplicate record will be deleted': '重複したレコードは削除されます', 'The entered unit links to this unit. For e.g. if you are entering m for meter then choose kilometer(if it exists) and enter the value 0.001 as multiplicator.': '入力した単位をこのユニットにリンクします。例えば、mをメートルとする場合、(存在するなら) kilometer を選択して、乗数に値 0.001 を入力します。', 'The first or only name of the person (mandatory).': '人物の苗字(必須)。 外国籍の方等については避難所等での管理上の主たる表記/順に従ってください。', 'The following modules are available': '利用可能なモジュールは以下のとおりです。', 'The hospital this record is associated with.': 'このレコードに関連のある病院。', 'The item is designated to be sent for specific project, population, village or other earmarking of the donation such as a Grant Code.': 'ある特定のプロジェクトや、人々、市町村への物資または、交付コード等のついた特定区域への寄付等のは物資は、送付されることになっています。', 'The language to use for notifications.': '通知に使用する言語', 'The language you wish the site to be displayed in.': 'このサイトを表示するための言語', 'The last known location of the missing person before disappearance.': '行方不明者が最後に目撃された場所', 'The length is ': '長さは', 'The list of Brands are maintained by the Administrators.': '銘柄一覧の整備は、管理者によって可能です', 'The list of Item categories are maintained by the Administrators.': '供給物資カテゴリの一覧は、管理者によってメンテナンスされています。', 'The name to be used when calling for or directly addressing the person (optional).': '電話をかける際など、直接連絡をとりたい場合に使われる名前(オプション)', 'The next screen will allow you to detail the number of people here & their needs.': '次の画面では、人数および必要な物資/サービスの詳細を確認できます。', 'The next screen will allow you to enter a detailed list of items and quantities, if appropriate...': '次のスクリーンで、項目の詳細なリストと量を入力できる場合があります。', 'The number of Units of Measure of the Alternative Items which is equal to One Unit of Measure of the Item': '元の物資一つと同じだけの、代替品の測定単位での数量', 'The number of tiles around the visible map to download. Zero means that the 1st page loads faster, higher numbers mean subsequent panning is faster.': '表示している地図の周辺タイルをダウンロードする数。0は最初のページの読み込みがより早い事を意味し、数字を大きくすると視点をパンした際に表示がより早くなります。', 'The person at the location who is reporting this incident (optional)': '現地からこのインシデントを報告した人物(オプション)', 'The person reporting about the missing person.': '行方不明者情報の提供者。', 'The person reporting the missing person.': '行方不明者を報告した人', 'The post variable containing the phone number': '電話番号を含む post 変数', 'The post variable on the URL used for sending messages': 'メッセージ送信に使用するURLのPOST変数', 'The post variables other than the ones containing the message and the phone number': 'メッセージや電話番号以外を含むpost変数', 'The serial port at which the modem is connected - /dev/ttyUSB0, etc on linux and com1, com2, etc on Windows': 'モデムが接続されているシリアルポート - Linuxでは /dev/ttyUSB0 等、Windowsでは com1, com2 等', 'The server did not receive a timely response from another server that it was accessing to fill the request by the browser.': '要求を満たすためアクセスしていた別のサーバーからの応答がありませんでした。', 'The server received an incorrect response from another server that it was accessing to fill the request by the browser.': '要求を満たすためアクセスしていた別のサーバーから不正な応答が返ってきました。', 'The simple policy allows anonymous users to Read & registered users to Edit. The full security policy allows the administrator to set permissions on individual tables or records - see models/zzz.py.': 'シンプルポリシーでは、匿名ユーザーによるデータの閲覧、および、登録ユーザーによる編集が許可されます。完全版ポリシーでは、個々のテーブルやレコードに対して管理権限を設定することができます。詳細はmodels/zzz.pyを参照してください。', 'The subject event no longer poses a threat or concern and any follow on action is described in <instruction>': '件名のイベントはこれ以上の脅威や懸案事項を引き起こすことはありません。よって、<instruction>には、今後実施すべきアクションが記述されていません。', 'The time difference between UTC and your timezone, specify as +HHMM for eastern or -HHMM for western timezones.': 'あなたのタイムゾーンとUTCとの差を、東では+HHMMで、西では-HHMMで指定してください', 'The title of the WMS Browser panel in the Tools panel.': '[ツール]パネルのWMS Browserパネルのタイトル', 'The token associated with this application on': 'このアプリケーションが関連づけられているトークン', 'The unique identifier of the peer. Leave blank if the peer is no Sahana Eden instance, it will be auto-assigned in that case.': '一意のデータ同期先識別子です。データ同期先がSahana Edenシステムではない場合は、空白にしておくことで自動的に割り当てが行われます。', 'The unique identifier which identifies this instance to other instances.': 'このインスタンスを他のインスタンスと区別するための固有識別子', 'The way in which an item is normally distributed': '物資が配給される際の通常経路', 'The weight in kg.': '重量(単位:kg)', 'The': ' ', 'Theme Details': 'テーマの詳細', 'Theme added': 'テーマを追加しました', 'Theme deleted': 'テーマを削除しました', 'Theme updated': 'テーマを更新しました', 'Theme': 'テーマ', 'Themes': 'テーマ', 'There are errors': 'エラーが発生しました', 'There are multiple records at this location': 'このロケーションに複数のレコードが存在します', 'There are not sufficient items in the Inventory to send this shipment': 'この輸送を開始するために十分な量の物資が備蓄されていません', 'There is no address for this person yet. Add new address.': 'この人物の住所がまだありません。新しい住所を入力してください', 'There was a problem, sorry, please try again later.': '問題が発生しています。すみませんが、時間を置いてからやり直してください。', 'These are settings for Inbound Mail.': '電子メール受信箱の設定です', 'These are the Incident Categories visible to normal End-Users': '普通のユーザーが見ることができるインシデント一覧です', 'These are the default settings for all users. To change settings just for you, click ': 'これらは、全てのユーザーのデフォルト設定です。個人用の設定を変更するには、以下をクリックしてください。', 'These need to be added in Decimal Degrees.': 'これらは、十進角で追加する必要があります。', 'They': 'それら', 'This Group has no Members yet': 'メンバはまだ登録されていません', 'This Team has no Members yet': 'メンバはまだ登録されていません', 'This appears to be a duplicate of ': 'これは、以下のものと重複しているようです。', 'This can either be the postal address or a simpler description (such as `Next to the Fuel Station`).': '住所か、あるいは簡単な記述(ガソリンスタンドの隣、など)を記載しています。', 'This email address is already in use': 'このメールアドレスは使用されています', 'This file already exists on the server as': 'このファイルは別の名前でサーバに既に存在しています : ', 'This form allows the administrator to remove a duplicate location.': '管理者はこのフォームを使うことで、重複したロケーションデータを削除できます。', 'This is the way to transfer data between machines as it maintains referential integrity.': '参照整合性を保ちつつ、端末間でデータを転送する方法が記載されています。', 'This is the way to transfer data between machines as it maintains referential integrity...duplicate data should be removed manually 1st!': '参照整合性を保ちつつ、端末間でデータを転送する方法が記載されています。...重複したデータは最初に手動で削除する必要があります。', 'This might be due to a temporary overloading or maintenance of the server.': 'サーバーが一時的に過負荷状態になっているか、あるいはメンテナンスを行っています。', 'This page shows you logs of past syncs. Click on the link below to go to this page.': '過去に行ったデータ同期履歴を表示します。以下のリンクをクリックしてください。', 'This screen allows you to upload a collection of photos to the server.': 'この画面では、複数の画像をサーバーにアップロードすることができます。', 'This shipment has already been received.': '輸送が開始され、物資が受領されました', 'This shipment has already been sent.': '輸送が開始され、送付されました', 'This shipment has not been received - it has NOT been canceled because it can still be edited.': 'この輸送は受領されていません。 - まだ編集可能であり、キャンセルされてはいません', 'This shipment has not been sent - it has NOT been canceled because it can still be edited.': '輸送はまだ開始されていませんが、キャンセルされてはいません。編集可能です。', 'This shipment will be confirmed as received.': 'この輸送された物資は、受信済み扱いになります', 'This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.': 'この値はその地点の外側までの距離の小さなマウントを追加します。この値が無い場合は、一番外側の地点が境界ボックスになり、表示されない可能性があります。', 'This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.': 'この値はこの地域を表示する時に使う最小の幅と高さを示します。この値がない場合、ある単一の地点を表示するときにその周辺の範囲は表示されません。地図が表示された後では、好きな大きさに拡大・縮小できます。', 'Thunderstorm': '雷雨', 'Thursday': '木曜日', 'Ticket Details': 'チケットの詳細', 'Ticket ID': 'チケットID', 'Ticket added': 'チケットを追加しました', 'Ticket deleted': 'チケットを削除しました', 'Ticket updated': 'チケットを更新しました', 'Ticket': 'チケット', 'Ticketing Module': 'チケット発行モジュール', 'Tickets': 'チケット', 'Tilt-up concrete': 'ティルトアップ式コンクリート', 'Timber frame': '木造', 'Time needed to collect water': '水の確保に必要な時間', 'Time of Request': '要求発生時刻', 'Timeline Report': 'タイムラインレポート', 'Timeline': 'タイムライン', 'Timestamp': 'タイムスタンプ', 'Title': 'タイトル', 'To Location': '送付先ロケーション', 'To Organization': '送付先団体', 'To Person': '送付先人物情報', 'To Site': '送付先サイト', 'To begin the sync process, click the button on the right => ': '右のボタンを押すと、データ同期が開始されます。', 'To begin the sync process, click this button => ': 'このボタンを押すと、データ同期が開始されます。=>', 'To create a personal map configuration, click ': '個人用の地図設定を作成するにはクリックしてください', 'To delete': '削除する側', 'To edit OpenStreetMap, you need to edit the OpenStreetMap settings in models/000_config.py': 'OpenStreetMapを編集する際は、models/000_config.pyで定義されている設定を編集してください', 'To submit a new job, use the': 'jobを新規送信するには、以下を使用してください。', 'To variable': '変数に', 'To': ' ', 'Tools': 'ツール', 'Tornado': '竜巻', 'Total # of Beneficiaries Reached ': '支援が到達した受益者の合計数 ', 'Total # of Target Beneficiaries': '受益対象者の合計人数', 'Total # of households of site visited': '訪問した世帯数', 'Total Beds': '合計ベッド数', 'Total Beneficiaries': '受益者の総数', 'Total Cost per Megabyte': 'メガバイト毎の合計費用', 'Total Cost per Minute': '一分毎の合計費用', 'Total Households': '総世帯数', 'Total Monthly Cost': '月額総計', 'Total Monthly Cost: ': '月毎の費用の合計: ', 'Total Monthly': '月ごとの合計', 'Total One-time Costs': '1回毎の費用総計', 'Total Persons': '合計者数', 'Total Recurring Costs': '経常費用総計', 'Total Unit Cost': '単価合計', 'Total Unit Cost: ': '単価合計: ', 'Total Units': '総数', 'Total gross floor area (square meters)': '延面積(平方メートル)', 'Total number of beds in this hospital. Automatically updated from daily reports.': 'この病院のベッド数総計。日時レポートにより、自動的に更新されます。', 'Total number of houses in the area': 'この地域の家屋総数', 'Total number of schools in affected area': '被災地内の学校総数', 'Total population of site visited': '訪問地域の総人口数', 'Total': '合計数', 'Totals for Budget:': '予算の合計:', 'Totals for Bundle:': 'Bundleの合計:', 'Totals for Kit:': 'Kitの合計:', 'Tourist Group': '旅行者グループ', 'Town': '町', 'Traces internally displaced people (IDPs) and their needs': '国内の避難している人(IDP)と彼らの必要としている物資/サービスの追跡', 'Tracing': '履歴の追跡', 'Track Details': '追跡情報の詳細', 'Track deleted': '追跡情報を削除しました', 'Track updated': '追跡情報を更新しました', 'Track uploaded': '追跡情報をアップデートしました', 'Track': '追跡情報', 'Tracking of Projects, Activities and Tasks': 'プロジェクトや支援活動、タスクの追跡', 'Tracking of basic information on the location, facilities and size of the Shelters': '避難所の基本情報(場所、施設、規模等)を追跡', 'Tracks requests for aid and matches them against donors who have pledged aid': '支援要請を管理し、救援物資の提供者とマッチングします。', 'Tracks the location, distibution, capacity and breakdown of victims in Shelters': '避難所のロケーション、配置、収容能力と被災者の状態を追跡します。', 'Tracks': 'トラック', 'Traffic Report': 'トラフィックレポート', 'Transfer': '輸送', 'Transit Status': '輸送状態', 'Transit': '移動中の立ち寄り', 'Transit. Status': '輸送状態', 'Transition Effect': '推移への影響', 'Transparent?': '透明ですか?', 'Transportation assistance, Rank': '移動 / 輸送支援、ランク', 'Trauma Center': '心的外傷センター', 'Travel Cost': '移動費', 'Tree': '樹木', 'Tropical Storm': '熱帯低気圧', 'Tropo Messaging Token': 'Tropo メッセージのトークン', 'Tropo Settings': 'Tropo 設定', 'Tropo Voice Token': 'Tropo 音声トークン', 'Tropo settings updated': 'Tropo 設定を更新しました', 'Truck': 'トラック', 'Try checking the URL for errors, maybe it was mistyped.': '入力したURLに間違いがないか確認してください。', 'Try hitting refresh/reload button or trying the URL from the address bar again.': 'ページの再読み込みを行うか、あるいはアドレスバーに直接URLを入力してみてください。', 'Try refreshing the page or hitting the back button on your browser.': 'ページを再読込するか、ブラウザの[戻る]ボタンを押してください。', 'Tsunami': '津波', 'Tuesday': '火曜日', 'Twitter ID or #hashtag': 'Twitter ID あるいは #ハッシュタグ', 'Twitter Settings': 'Twitter設定', 'Type of Construction': '建物の種類', 'Type of cause': '原因のタイプ', 'Type of latrines': 'トイレの種類', 'Type of place for defecation': '排泄用地の種類', 'Type of water source before the disaster': '災害発生前の水の確保方法', 'Type': 'タイプ', 'Types of health services available': '利用可能な健康サービスの種別', 'Types of water storage containers available': '利用可能な水貯蔵容器の種別', 'UID': 'ユニークID', 'UN': '国連', 'UTC Offset': 'UTC(世界標準時刻)との差', 'Unable to parse CSV file!': 'CSVファイルをパースできません。', 'Understaffed': '人員不足', 'Unidentified': '詳細不明', 'Unit Bed Capacity': 'ベッド収容数', 'Unit Cost': '単価', 'Unit Details': '単位の詳細', 'Unit Name': '単位名', 'Unit Set': '単位の設定', 'Unit Short Code for e.g. m for meter.': '単位の略称、例えばメートルはmと表記。', 'Unit added': '単位を追加しました', 'Unit deleted': '単位を削除しました', 'Unit of Measure': '1個口の内訳', 'Unit updated': '単位を更新しました', 'Unit': '単位', 'Units of Measure': '測定単位', 'Units': '単位', 'Unknown Peer': '登録に無いデータ同期先', 'Unknown type of facility': '施設規模不明', 'Unknown': '不明', 'Unreinforced masonry': '補強されていない石造建築物', 'Unresolved Conflicts': '未解決のデータ競合', 'Unsafe': '危険な', 'Unselect to disable the modem': 'モデムを無効化するにはチェックを外す', 'Unsent': '未送信', 'Unsupported data format!': 'サポートされていないデータフォーマットです。', 'Unsupported method!': 'サポートされていないメソッドです。', 'Unsupported method': 'サポートされていないメソッドです', 'Update Activity Report': '支援活動レポートの更新', 'Update Cholera Treatment Capability Information': 'コレラ対策能力情報を更新', 'Update Import Job': 'Import Jobの更新', 'Update Request': '支援要請を更新', 'Update Service Profile': 'サービスプロファイルの更新', 'Update Task Status': 'タスク状況の更新', 'Update Unit': '単位の更新', 'Update if Master': 'マスターサイトなら更新する', 'Update if Newer': '新しいものがあれば更新する', 'Update your current ordered list': '現在の順序付きリストの更新', 'Update': '更新', 'Upload Photos': '写真のアップロード', 'Upload Spreadsheet': 'スプレッドシートのアップロード', 'Upload Track': '追跡情報のアップロード', 'Upload a Spreadsheet': 'スプレッドシートをアップロード', 'Upload an image file (bmp, gif, jpeg or png), max. 300x300 pixels!': '画像ファイルをアップロード(bmp,gif,jpeg,png) 最大300x300ピクセル', 'Upload an image file here.': '画像ファイルをここにアップロードしてください', 'Upload an image, such as a photo': '写真などのイメージをアップロードしてください', 'Upload': 'アップロード', 'Urban Fire': '都市火災', 'Urban area': '市街地', 'Urdu': 'ウルドゥー語', 'Urgent': '緊急', 'Use (...)&(...) for AND, (...)|(...) for OR, and ~(...) for NOT to build more complex queries.': '複雑なクエリを構築するには、ANDは (...)&(...) を、ORは (...)|(...) を、NOTは ~(...) を使用してください。', 'Use default': 'デフォルト値を使用', 'Use these links to download data that is currently in the database.': 'これらのリンクを使用して、現在データベースにあるデータをダウンロードします。', 'Use this space to add a description about the Bin Type.': 'Bin Typeに関する説明は、このスペースに記載してください。', 'Use this space to add a description about the site location.': 'このスペースを使って、サイトの位置の説明を追加してください。', 'Use this space to add a description about the warehouse/site.': '倉庫/Siteに関する説明は、このスペースに記載してください。', 'Use this space to add additional comments and notes about the Site/Warehouse.': 'Site/倉庫に関する追加情報を記載するには、このスペースを使用してください。', 'Used to import data from spreadsheets into the database': 'スプレッドシートからデータベースにデータをインポートするために使われます', 'User %(first_name)s %(last_name)s Approved': '%(first_name)s %(last_name)s のユーザー登録が承認されました', 'User %(id)s Logged-in': 'ユーザー %(id)s がログインしています', 'User %(id)s Logged-out': 'ユーザー %(id)s がログアウトしました', 'User %(id)s Profile updated': 'ユーザ %(id)s のプロファイルを更新しました', 'User %(id)s Registered': 'ユーザー%(id)sを登録しました', 'User Account has been Disabled': 'ユーザアカウントが無効になっています', 'User Details': 'ユーザーの詳細', 'User ID': 'ユーザーID', 'User Management': 'ユーザー管理', 'User Profile': 'ユーザープロファイル', 'User Requests': 'ユーザー要求', 'User Updated': 'ユーザーを更新しました', 'User added': 'ユーザーを追加しました', 'User already has this role': 'この権限のあるユーザー', 'User deleted': 'ユーザーを削除しました', 'User updated': 'ユーザーを更新しました', 'User': 'ユーザー', 'Username for authentication at the peer. Note that only HTTP Basic authentication is supported.': 'データ同期先との認証に使うユーザ名。HTTPベーシック認証のみサポートしています。', 'Username': 'ユーザー名', 'Users removed': 'ユーザーを削除しました', 'Users': 'ユーザー', 'Usual food sources in the area': 'この地域の普段の食料調達方法', 'Utilities': 'ユーティリティ', 'Utility, telecommunication, other non-transport infrastructure': 'ユーティリティ、通信、その他のインフラ設備(交通以外)', 'Vacancies': '欠員', 'Value': '値', 'Various Reporting functionalities': '多種多様な報告を行う機能', 'Vehicle Crime': '車両犯罪', 'Vehicle Types': '車両の種別', 'Vehicle': '車両', 'Vendor': 'ベンダー', 'Verification Email sent - please check your email to validate. If you do not receive this email please check you junk email or spam filters': 'メールアドレス確認用のメールを送信しました。メールに記載された確認用URLにアクセスしてください。もしメールが届かない場合迷惑メールフォルダに入ってしまっている可能性がありますのでご確認ください。', 'Verification Status': '認証ステータス', 'Verified': '認証済み', 'Verified?': '認証(ログイン)できません.メールアドレス・パスワードを確認してください.', 'Verify Password': 'パスワード再確認', 'Verify password': 'パスワードの確認', 'Version': 'バージョン', 'Very High': '非常に高い', 'View Alerts received using either Email or SMS': '電子メールまたはSMSで受信したアラートの閲覧', 'View Fullscreen Map': '地図をフルスクリーン表示', 'View Image': '画像の閲覧', 'View On Map': '地図上で閲覧', 'View Outbox': '送信箱の表示', 'View Picture': '写真の表示', 'View Requests for Aid': '援助要請を閲覧', 'View Settings': '設定の確認', 'View Tickets': 'チケットの閲覧', 'View and/or update their details': '詳細の閲覧および更新', 'View or update the status of a hospital.': '病院のステータスの閲覧と更新', 'View pending requests and pledge support.': '処理中の要求と寄付サポートの閲覧', 'View the hospitals on a map.': '病院の場所を地図上で表示します。', 'Village Leader': '村長', 'Village': '村落', 'Visible?': '表示しますか?', 'Visual Recognition': '画像認識', 'Volcanic Ash Cloud': '火山灰雲', 'Volcanic Event': '火山活動', 'Volume - Fluids': '流量 - 液状物', 'Volume - Solids': '流量 - 固形物', 'Volume Capacity': '容量', 'Volume/Dimensions': '容量/外形寸法', 'Volunteer Data': 'ボランティアデータ', 'Volunteer Details': 'ボランティアの詳細', 'Volunteer Management': 'ボランティアの管理', 'Volunteer Project': 'ボランティアプロジェクト', 'Volunteer Registration': 'ボランティア登録', 'Volunteer Registrations': 'ボランティア登録', 'Volunteer Request': 'ボランティア要請', 'Volunteer added': 'ボランティアを追加しました', 'Volunteer deleted': 'ボランティアを削除しました', 'Volunteer details updated': 'ボランティアの詳細を更新しました', 'Volunteer registration added': 'ボランティア登録を追加しました', 'Volunteer registration deleted': 'ボランティア登録を削除しました', 'Volunteer registration updated': 'ボランティア登録を更新しました', 'Volunteers were notified!': 'ボランティアに通知されました', 'Volunteers': 'ボランティア', 'Vote': '投票', 'Votes': '投票', 'WASH': '除染', 'WMS Browser Name': 'WMSブラウザ名', 'WMS Browser URL': 'WMSブラウザのURL', 'Walking Only': '徒歩のみ', 'Walking time to the health service': '医療サービス提供所までの徒歩時間', 'Wall or other structural damage': '壁やその他の構造の損傷', 'Warehouse Details': '倉庫の詳細', 'Warehouse Item Details': '倉庫物資の詳細', 'Warehouse Item added': '倉庫物資を追加しました', 'Warehouse Item deleted': '倉庫内物資を削除しました', 'Warehouse Item updated': '倉庫物資を更新しました', 'Warehouse Items': '倉庫に備蓄中の物資', 'Warehouse Management': '倉庫管理', 'Warehouse added': '倉庫を追加しました', 'Warehouse deleted': '倉庫を削除しました', 'Warehouse updated': '倉庫を更新しました', 'Warehouse': '倉庫', 'Warehouse/Sites Registry': '倉庫/Siteの登録', 'Warehouses': '倉庫', 'WatSan': '給水と衛生', 'Water Level still high?': '水位はまだ高いままですか?', 'Water Sanitation Hygiene': '水質衛生', 'Water collection': '給水', 'Water gallon': 'ガロン容器', 'Water storage containers available for HH': '世帯用の水貯蔵容器が利用可能である', 'Water storage containers in households': '世帯の水貯蔵容器', 'Water storage containers sufficient per HH': '世帯毎に1つ以上の水貯蔵容器が利用可能である', 'Water supply': '水の供給', 'Water': '水', 'Waterspout': '水上竜巻', 'Way Bill(s)': '移動費', 'We have tried': '私達は試行しました', 'Website': 'ウェブサイト', 'Wednesday': '水曜日', 'Weekly': '週次', 'Weight (kg)': '体重 (kg)', 'Weight': '体重', 'Welcome to the Sahana Eden Disaster Management Platform': 'Sahana Eden -災害情報管理プラットフォームへようこそ', 'Welcome to the Sahana Eden Disaster Management System': 'Sahana Eden -災害情報管理システムへようこそ', 'Welcome to the Sahana Portal at ': 'Sahana ポータルへようこそ: ', 'Welcome to the Sahana Portal at': 'Sahanaポータルにようこそ', 'Well-Known Text': '既知の文章', 'Were basic medical supplies available for health services prior to the disaster?': '災害前に、基本的な医療サービスが機能していたかどうかを記載してください', 'Were breast milk substitutes used prior to the disaster?': '災害前に利用していた母乳代用品の入手源を記載してください', 'Were there cases of malnutrition in this area prior to the disaster?': 'この地域で、災害前に栄養失調が発生していたかどうかを記載してください', 'Were there health services functioning for the community prior to the disaster?': '災害前、共同体でヘルスサービスが機能していたかどうかを記載してください', 'Were there reports or evidence of outbreaks of any micronutrient malnutrition disorders before the emergency?': '災害発生前から栄養失調の報告があった、あるいはその証跡があったかどうかを記載します', 'What are the factors affecting school attendance?': '生徒の就学状況に影響する要因を記載してください', 'What are your main sources of cash to restart your business?': 'ビジネス再開に必要な現金の、主な調達源を記載してください', 'What are your main sources of income now?': '現在の主な収入源を記載してください', 'What do you spend most of your income on now?': '現在の主な支出要因を記載してください', 'What food stocks exist? (main dishes)': '備蓄食料の種類(主皿)', 'What food stocks exist? (side dishes)': '備蓄食料の種類(副皿)', 'What is the estimated total number of people in all of these institutions?': '上記施設内の居住者を総計すると、おおよそどの程度になるかを記載してください', 'What is your major source of clean water for daily use (ex: washing, cooking, bathing)?': '洗濯、料理、入浴など、日常生活で必要となる清潔な水の、主な入手源を記載してください', 'What is your major source of drinking water?': '飲料水の主な入手源を記載してください', 'What type of latrines are available in the village/IDP centre/Camp?': '村落/IDPセンター/仮泊施設内で利用可能なトイレのタイプは?', 'What type of salvage material can be used from destroyed houses?': '全壊した家屋から回収した部材が流用可能な用途を記載します', 'What type of salvage material can be used from destroyed schools?': '倒壊した校舎において、再利用できる部材は何ですか?', 'What types of health problems do children currently have?': '小児が現在抱えている健康問題のタイプを記載してください', 'What types of health problems do people currently have?': '住人たちが現在抱えている健康問題のタイプを記載してください', 'What types of health services are still functioning in the affected area?': '現在、被災地で機能しているヘルスサービスの種類を選択してください', 'What types of household water storage containers are available?': '世帯で使っている水貯蔵容器のタイプを選択してください', 'What were your main sources of income before the disaster?': '災害発生前の主な収入源を選択してください', 'Wheat': '小麦', 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value adds a small mount of distance outside the points. Without this, the outermost points would be on the bounding box, and might not be visible.': '地図上に複数のポイントが表示されている場合、それらポイント全てを表示できる縮尺で地図が表示されます。この値は、それらポイントの外に余白を付与します。指定しない場合、表示領域とポイントが重なり、表示範囲から外れてしまう可能性があります。', 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points. This value gives a minimum width and height in degrees for the region shown. Without this, a map showing a single point would not show any extent around that point. After the map is displayed, it can be zoomed as desired.': '地図上に複数のポイントが表示されている場合、それらポイント全てを表示できる縮尺で地図が表示されます。この値は、地域を表示する際の横幅と縦高の最小値となります。指定しない場合、対象の一点のみ表示され、その周辺は表示されません。一度表示された後であれば、縮尺の変更が可能です。', 'When a map is displayed that focuses on a collection of points, the map is zoomed to show just the region bounding the points.': '地点の集合にフォーカスを合わせた地図を表示すると、この地図はそれら地点の集合を表示できる範囲に拡大・縮小します', 'When reports were entered': 'いつ報告が入力されたか', 'Where are the alternative places for studying?': '学校以外で、学習が可能な施設の種類を選択してください', 'Where are the separated children originally from?': '保護者が居ない児童の住居地はどこですか?', 'Where do the majority of people defecate?': 'トイレはどこで済ませますか?', 'Where have the children been sent?': '疎開先の情報がある場合は記載してください', 'Where is solid waste disposed in the village/camp?': '村落/仮泊施設内での、固形廃棄物処理場所を記載してください', 'Whether this is a Sahana Eden, Sahana Agasti, Ushahidi or Other instance.': 'Sahana Eden, Sahana Agasti, Ushahidi あるいは他のシステムの場合も', 'Whiskers': 'ほおひげ', 'Who is doing what and where': '誰がどこで何をしているか', 'Who usually collects water for the family?': '日頃、家族のために水を採取しているのは誰か?', 'Width': '横幅', 'Wild Fire': '野火', 'Wind Chill': '風速冷却', 'Window frame': 'ウィンドウ枠', 'Winter Storm': '吹雪', 'Without mentioning any names or indicating anyone, do you know of any incidents of violence against women or girls occuring since the disaster?': '災害発生後、女性や少女に対する暴力事件が発生したかどうかを記載してください。具体的な人名や場所を記載する必要はありません', 'Women of Child Bearing Age': '出産年齢の女性', 'Women participating in coping activities': '女性が災害対応に従事', 'Women who are Pregnant or in Labour': '妊娠中、あるいは労働中の女性', 'Womens Focus Groups': '女性のフォーカスグループ(Womens Focus Groups)', 'Wooden plank': '木製板', 'Wooden poles': '木製の柱', 'Working hours end': '作業終了時刻', 'Working hours start': '作業開始時刻', 'Working or other to provide money/food': '金銭/食料調達のため就労、あるいは活動を実施', 'Would you like to display the photos on the map?': '地図上に写真を表示しますか?', 'X-Ray': 'X線', 'Year built': '建築年', 'Year of Manufacture': '製造年', 'Yellow': '黄色', 'Yes': 'はい', 'You are a recovery team?': 'あなたが遺体回収チームの場合', 'You are attempting to delete your own account - are you sure you want to proceed?': '自分のアカウントを削除しようとしています。本当に削除しますか?', 'You are currently reported missing!': 'あなたが行方不明者として登録されています!', 'You can change the configuration of synchronization module in the Settings section. This configuration includes your UUID (unique identification number), sync schedules, beacon service and so on. Click the following link to go to the Sync Settings page.': '同期に関する設定は、「設定」セクションで行うことができます。設定には、UUID(unique identification number)、同期スケジュール、ビーコンサービス等が含まれます。同期設定は以下のリンクから変更可能です。', 'You can click on the map below to select the Lat/Lon fields': '下の地図をクリックすることで、緯度経度情報を入力できます', 'You can click on the map below to select the Lat/Lon fields:': '緯度と経度の設定は、以下の地図をクリックすることでも可能です:', 'You can click on the map to select the Lat/Lon fields. Longitude is West - East (sideways). Latitude is North-South (Up-Down). Latitude is zero on the equator and positive in the northern hemisphere and negative in the southern hemisphere. Longitude is zero on the prime meridian (Greenwich Mean Time) and is positive to the east, across Europe and Asia. Longitude is negative to the west, across the Atlantic and the Americas. This needs to be added in Decimal Degrees.': '経度/緯度の項目は、地図を選択することでも登録可能です。経度は東西方向(横)の座標軸です。緯度は南北方向(上下)の座標軸です。赤道ではゼロ、北半球ではプラス、南半球ではマイナスとなります。経度は、子午線(グリニッジ標準時)をゼロとして、東(ヨーロッパ、アジア)がプラスとなります。西(大西洋、アメリカ)がマイナスです。10進法で記入してください。', 'You can select the Draw tool (': '選択可能な描画ツール (', 'You can select the Draw tool': 'ドローツールを選択できます', 'You can set the modem settings for SMS here.': 'SMS用モデムの設定をすることができます。', 'You can use the Conversion Tool to convert from either GPS coordinates or Degrees/Minutes/Seconds.': '変換ツールを使うことで、GPS、あるいはDegrees/Minutes/Seconds形式からデータを変換できます。', 'You do no have permission to cancel this received shipment.': '輸送の受け取りをキャンセルする権限がありません', 'You do no have permission to cancel this sent shipment.': '輸送の送付をキャンセルする権限がありません', 'You do no have permission to make this commitment.': 'このコミットを作成する権限がありません', 'You do no have permission to receive this shipment.': 'この輸送を受け取る権限がありません', 'You do no have permission to send this shipment.': 'この輸送を開始する権限がありません', 'You do not have permission for any site to add an inventory item.': 'あなたには他の場所から在庫アイテムを追加する権限はありません', 'You do not have permission for any site to make a commitment.': 'どの場所にも受け入れを示す権限が有りません。', 'You do not have permission for any site to make a request.': '支援要請を作成する権限がありません', 'You do not have permission for any site to perform this action.': 'この操作をするための権限がありません', 'You do not have permission for any site to receive a shipment.': '物資の輸送を受け取る権限がありません', 'You do not have permission for any site to send a shipment.': '物資の輸送をする権限がありません', 'You do not have permission to send a shipment from this site.': 'あなたはこのサイトから物資を送る権限はありません', 'You have a personal map configuration. To change your personal configuration, click ': '個人用地図設定があります。あなたの個人用地図設定を編集するにはクリックしてください', 'You have found a dead body?': '遺体を発見しましたか?', 'You must be logged in to register volunteers.': 'ボランティアとして登録するには、ログインする必要があります', 'You must be logged in to report persons missing or found.': '行方不明者の発見状況を登録するには、ログインする必要があります。', 'You must provide a series id to proceed.': '処理を行うにはシリーズIDを指定する必要があります。', 'You should edit OpenStreetMap settings in models/000_config.py': 'OpenStreetMapの設定を変更するには、models/000_config.pyを編集してください', 'You should edit Twitter settings in models/000_config.py': 'Twitter設定を変更するには、models/000_config.pyを編集してください。', 'Your Account is Approved - you can now login\n %s%s/': '利用者登録が完了しました。リンク先のログインページで あなたが登録したユーザー名とパスワードを入力してログインしてください。\n %s%s/', 'Your Account is Approved': '利用者登録が完了しました', 'Your action is required. Please approve user %s asap: ': 'あなたの行動が要求されています。ただちにユーザー %s を承認してください。', 'Your action is required. Please approve user': 'ユーザーから承認の依頼が届いています。承諾お願いします', 'Your current ordered list of solution items is shown below. You can change it by voting again.': '解決項目の順番付きリストは以下です。再度投票することによって変更可能です。', 'Your post was added successfully.': '投稿が成功しました', 'Your system has been assigned a unique identification (UUID), which other computers around you can use to identify you. To view your UUID, you may go to Synchronization -> Sync Settings. You can also see other settings on that page.': 'あなたがお使いのシステムには、ユニークID (UUID) が割り当てられており、このIDを用いて他のコンピュータがあなたのシステムを同定します。あなたの UUID を閲覧するには、同期 -> 同期設定と進んでください。そのページでは、他の設定を閲覧することもできます。', 'ZIP/Postcode': '郵便番号', 'Zinc roof': 'トタン屋根', 'Zoom In: click in the map or use the left mouse button and drag to create a rectangle': 'ズームイン: マップをクリックするか、拡大したい場所をドラッグで選択してください', 'Zoom Levels': 'ズームレベル', 'Zoom Out: click in the map or use the left mouse button and drag to create a rectangle': 'ズームアウト: マップをクリックするか、拡大したい地点をマウスの左ボタンでドラッグしてください', 'Zoom to Current Location': '現在の場所を拡大', 'Zoom to maximum map extent': 'マップの最大範囲までズーム', 'Zoom': 'ズーム', 'act': '活動', 'active': 'アクティブ', 'added': '追加しました', 'all records': '全てのレコード', 'allows a budget to be developed based on staff & equipment costs, including any admin overheads.': 'では、スタッフや設備、それらの管理コストまで含めた予算編成を行ないます。', 'allows for creation and management of surveys to assess the damage following a natural disaster.': '自然災害による被災影響調査の作成、および管理を許可する', 'an individual/team to do in 1-2 days': '個人やチーム単位で、1-2日中に実施するべき事柄をさします。', 'approved': '承認された', 'assigned': '担当者・部門が確定', 'average': '平均的', 'black': '黒', 'blond': 'ブロンド', 'blue': '青', 'brown': '茶色', 'business_damaged': 'ビジネスへの損害', 'by': ' ', 'c/o Name': 'c/o 名前', 'can be used to extract data from spreadsheets and put them into database tables.': 'スプレッドシートからデータを抽出して、データベーステーブルに挿入できます。', 'can use this to identify the Location': 'ここからロケーションの特定が可能です', 'caucasoid': '白人', 'check all': '全てチェック', 'click for more details': '詳細はクリック', 'collateral event': '付帯イベント', 'completed': '完了', 'confirmed': '確認済', 'consider': '考慮', 'criminal intent': '犯罪目的', 'crud': '性病', 'curly': '縮れ毛', 'currently registered': '登録済み', 'daily': '日次', 'dark': '濃い', 'data uploaded': 'データがアップロードされました', 'database %s select': 'データベース%sの選択', 'database': 'データベース', 'db': 'データベース', 'delete all checked': 'チェックされた項目を全て削除', 'deleted': '削除されました', 'denied': '拒否されました', 'description': '説明', 'design': 'デザイン', 'diseased': '罹患中', 'displaced': '避難中', 'divorced': '離別', 'done!': '完了!', 'duplicate': '重複', 'edit': '編集', 'editor': '編集者', 'eg. gas, electricity, water': 'ガス、電気、水道など', 'embedded': '埋め込まれた', 'enclosed area': '専用地', 'export as csv file': 'csvファイルとしてエクスポート', 'fat': '肥満', 'feedback': '現地からの要望', 'female': '女性', 'final report': '最終報告書', 'flush latrine with septic tank': '浄化槽つき水洗トイレ', 'follow-up assessment': 'アセスメントのフォローアップ', 'food_sources': '食糧供給源', 'forehead': 'ひたい', 'form data': 'フォームデータ', 'from Twitter': 'Twitter経由', 'full': '完全', 'getting': '取得中', 'green': '緑', 'grey': '灰色', 'here': 'ここ', 'high': '高い', 'hourly': '1時間毎', 'households': '世帯情報', 'human error': 'ヒューマンエラー', 'identified': '身元確認済み', 'ignore': '無視する', 'immediately': '即応', 'in Deg Min Sec format': 'Deg Min Sec フォーマットで', 'in GPS format': 'GPS フォーマットで', 'in Inv.': '個', 'inactive': '休止中', 'initial assessment': '初期アセスメント', 'injured': '負傷中', 'insert new %s': '%sの新規挿入', 'insert new': '新規挿入', 'invalid request': '無効な要求', 'invalid': '無効', 'is a central online repository where information on all the disaster victims and families, especially identified casualties, evacuees and displaced people can be stored. Information like name, age, contact number, identity card number, displaced location, and other details are captured. Picture and finger print details of the people can be uploaded to the system. People can also be captured by group for efficiency and convenience.': 'は、災害犠牲者とその家族、特に身元の判明した遺体、避難者、難民など、全ての情報を集約可能な中央オンラインレポジトリです。名前、年齢、連絡先番号、IDカード番号、避難した場所、その他の詳細が記録されます。人物の写真や指紋をアップロードすることができます。効率性と利便性のため、人物をグループ分けすることができます。', 'is envisioned to be composed of several sub-modules that work together to provide complex functionality for the management of relief and project items by an organization. This includes an intake system, a warehouse management system, commodity tracking, supply chain management, fleet management, procurement, financial tracking and other asset and resource management capabilities': 'は、支援団体による救援活動や復興プロジェクトの作業を管理するために、複数のサブモジュールを組み合わせて高度な機能を実現しようと考えており、物資の受け入れ、貯蔵設備の管理、必要な物資の記録、サプライチェーン・マネジメント、輸送管理、調達、財務記録、その他様々な資産やリソースの管理といった機能を備えています', 'keeps track of all incoming tickets allowing them to be categorised & routed to the appropriate place for actioning.': '全ての入荷伝票を追跡することで、カテゴリー分けや適切な実行場所への配分を行う', 'kilogram': 'キログラム', 'kit': 'キット', 'latrines': 'トイレ', 'leave empty to detach account': 'アカウントを取り外すには空欄のままにしてください', 'legend URL': '凡例の URL', 'light': '淡い', 'liter': 'リットル', 'locations': 'ロケーション', 'login': 'ログイン', 'long': '長い', 'long>12cm': '12cm以上', 'low': '低い', 'male': '男性', 'manual': 'マニュアル', 'married': '既婚', 'max': '最大', 'maxExtent': '最大範囲', 'maxResolution': '最高分解能', 'medium': '中', 'medium<12cm': '12cm未満', 'menu item': 'メニューアイテム', 'message_id': 'メッセージID', 'meter cubed': '立方メートル', 'meter': 'メートル', 'meters': 'メートル', 'min': '最小', 'module allows the an inspector to fill information for buildings.': 'モジュールでは、建築物の調査情報を記録できます。', 'module allows the site administrator to configure various options.': 'モジュールを使うことで、サイト管理者が様々な項目を設定する際の手間を省くことができます。', 'module helps monitoring the status of hospitals.': 'モジュールでは、病院の状態をモニタできます。', 'module provides a mechanism to collaboratively provide an overview of the developing disaster, using online mapping (GIS).': 'モジュールでは、オンラインマッピング(GIS)を使用して、現在の災害地域の状態を俯瞰することができます。', 'mongoloid': '黄色人種', 'more': 'その他の項目  ', 'n/a': 'データなし', 'natural hazard': '自然災害', 'negroid': '黒人', 'never': 'まだ', 'new ACL': '新規ACL', 'new record inserted': '新規レコードを挿入しました', 'new': '新規登録', 'next 100 rows': '次の100行', 'no': ' ', 'none': 'なし', 'normal': '通常', 'not accessible - no cached version available!': 'アクセスできません - キャッシュされたバージョンがありません!', 'not accessible - using cached version from': 'アクセス不可 - キャッシュ版を使用しています', 'not specified': '未指定', 'num Zoom Levels': 'ズーム倍率', 'obsolete': '孤立中', 'on': ' ', 'once': '一度', 'open defecation': '野外', 'operational intent': '運用目的', 'or import from csv file': 'またはcsvファイルからインポート', 'other': 'その他', 'over one hour': '1時間以上', 'pack of 10': '10のパック', 'people': '居住者情報', 'piece': 'ピース(単位)', 'pit latrine': '穴掘りトイレ', 'pit': '堀穴', 'postponed': '実施を延期', 'preliminary template or draft, not actionable in its current form': '現行フォーム内で実用的でない予備テンプレートまたはドラフト', 'previous 100 rows': '前の100行', 'primary incident': '優先すべきインシデント', 'problem connecting to twitter.com - please refresh': 'twitter.comへの接続に問題が発生しました。再読込を行ってください', 'provides a catalogue of digital media.': 'デジタルメディアのカタログを提供します', 'record does not exist': 'レコードが存在しません', 'record id': 'レコードID', 'records deleted': 'レコードを削除しました', 'red': '赤い', 'reported': '報告済み', 'reports successfully imported.': 'レポートは正しくインポートできました', 'representation of the Polygon/Line.': 'Polygon/Lineの表現', 'retired': '終了', 'retry': '再試行', 'river': '河川', 'sack 20kg': '袋 20kg', 'sack 50kg': '袋 50kg', 'secondary effect': '副次効果', 'see comment': 'コメント参照', 'selected': '選択された', 'separated from family': '家族とはぐれた', 'separated': '別居', 'shaved': '坊主', 'shift_start': 'シフト開始', 'short': '小柄', 'short<6cm': '6cm未満', 'sides': '側面', 'sign-up now': '今すぐ登録', 'simple': '単純な', 'single': '独身', 'slim': 'やせ型', 'specify': '明記してください', 'staff': 'スタッフ', 'state location': 'ステートロケーション', 'state': '状態', 'straight': '直毛', 'suffered financial losses': '経済的損失', 'table': 'テーブル', 'table_name': 'テーブル名', 'tall': '大柄', 'technical failure': '技術的な原因', 'this': 'この', 'times and it is still not working. We give in. Sorry.': '回繰り返しましたが、処理を完了できません。ご迷惑をおかけしますが、処理を中止します。', 'to access the system': 'してシステムにアクセスしてください', 'ton': 'トン', 'tonsure': '剃髪', 'total': '合計', 'tracks all shelters and stores basic details regarding them. It collaborates with other modules to track people associated with a shelter, the services available etc.': '避難所を追跡し、それらの詳細を蓄積します。避難所に関連付けられた人、利用可能なサービス等の他のモジュールと協業します。', 'tweepy module not available within the running Python - this needs installing for non-Tropo Twitter support!': '実行中のPythonで tweepyモジュールが利用できません。Tropo以外でのTwitter機能利用で必要です', 'unable to parse csv file': 'csvファイルをパースできません。', 'unapproved': '承認されていない', 'uncheck all': 'チェックをすべて外す', 'unidentified': '詳細不明', 'uninhabitable = foundation and structure destroyed': '利用不可能 = 基礎構造や土台部分の破壊など', 'unknown': '不明', 'unspecified': 'その他', 'unverified': '未検証', 'updated': '更新しました', 'updates only': '更新のみ', 'urgent': '緊急', 'using default': '標準値を使用', 'verified': '確認済み', 'volunteer': 'ボランティア', 'wavy': '波状', 'weekly': '週次', 'white': '白', 'wider area, longer term, usually contain multiple Activities': '活動範囲が広く、長期的目標を有しており、複数の支援活動を包括します。', 'widowed': '死別', 'window': '窓', 'windows broken, cracks in walls, roof slightly damaged': '窓破損、壁にひび割れ、屋根の一部損傷', 'within human habitat': '人間の居住地域内', 'xlwt module not available within the running Python - this needs installing for XLS output!': '実行中のPythonでxlwtモジュールが利用できません。XLS出力に必要です。', 'yes': 'はい', }<|fim▁end|>
'Received Shipment canceled': '受け取った輸送をキャンセルしました', 'Received Shipment updated': '受領済みの配送物の情報が更新されました', 'Received Shipments': '受諾した輸送物資',
<|file_name|>sites.py<|end_file_name|><|fim▁begin|>from functools import update_wrapper from django.http import Http404, HttpResponseRedirect from django.contrib.admin import ModelAdmin, actions from django.contrib.admin.forms import AdminAuthenticationForm from django.contrib.auth import logout as auth_logout, REDIRECT_FIELD_NAME from django.contrib.contenttypes import views as contenttype_views from django.views.decorators.csrf import csrf_protect from django.db.models.base import ModelBase from django.core.exceptions import ImproperlyConfigured from django.core.urlresolvers import reverse, NoReverseMatch from django.template.response import TemplateResponse from django.utils import six from django.utils.text import capfirst from django.utils.translation import ugettext as _ from django.views.decorators.cache import never_cache from django.conf import settings LOGIN_FORM_KEY = 'this_is_the_login_form' class AlreadyRegistered(Exception): pass class NotRegistered(Exception): pass class AdminSite(object): """ An AdminSite object encapsulates an instance of the Django admin application, ready to be hooked in to your URLconf. Models are registered with the AdminSite using the register() method, and the get_urls() method can then be used to access Django view functions that present a full admin interface for the collection of registered models. """ login_form = None index_template = None app_index_template = None login_template = None logout_template = None password_change_template = None password_change_done_template = None def __init__(self, name='admin', app_name='admin'): self._registry = {} # model_class class -> admin_class instance self.name = name self.app_name = app_name self._actions = {'delete_selected': actions.delete_selected} self._global_actions = self._actions.copy() def register(self, model_or_iterable, admin_class=None, **options): """ Registers the given model(s) with the given admin class. The model(s) should be Model classes, not instances. If an admin class isn't given, it will use ModelAdmin (the default admin options). If keyword arguments are given -- e.g., list_display -- they'll be applied as options to the admin class. If a model is already registered, this will raise AlreadyRegistered. If a model is abstract, this will raise ImproperlyConfigured. """ if not admin_class: admin_class = ModelAdmin if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model._meta.abstract: raise ImproperlyConfigured('The model %s is abstract, so it ' 'cannot be registered with admin.' % model.__name__) if model in self._registry: raise AlreadyRegistered('The model %s is already registered' % model.__name__) # Ignore the registration if the model has been # swapped out. if not model._meta.swapped: # If we got **options then dynamically construct a subclass of # admin_class with those **options. if options: # For reasons I don't quite understand, without a __module__ # the created class appears to "live" in the wrong place, # which causes issues later on. options['__module__'] = __name__ admin_class = type("%sAdmin" % model.__name__, (admin_class,), options) if admin_class is not ModelAdmin and settings.DEBUG: admin_class.validate(model) # Instantiate the admin class to save in the registry self._registry[model] = admin_class(model, self) def unregister(self, model_or_iterable): """ Unregisters the given model(s). If a model isn't already registered, this will raise NotRegistered. """ if isinstance(model_or_iterable, ModelBase): model_or_iterable = [model_or_iterable] for model in model_or_iterable: if model not in self._registry: raise NotRegistered('The model %s is not registered' % model.__name__) del self._registry[model] def add_action(self, action, name=None): """ Register an action to be available globally. """ name = name or action.__name__ self._actions[name] = action self._global_actions[name] = action def disable_action(self, name): """ Disable a globally-registered action. Raises KeyError for invalid names. """ del self._actions[name] def get_action(self, name): """ Explicitly get a registered global action whether it's enabled or not. Raises KeyError for invalid names. """ return self._global_actions[name] @property def actions(self): """ Get all the enabled actions as an iterable of (name, func). """ return six.iteritems(self._actions) def has_permission(self, request): """ Returns True if the given HttpRequest has permission to view *at least one* page in the admin site. """ return request.user.is_active and request.user.is_staff def check_dependencies(self): """ Check that all things needed to run the admin have been correctly installed. The default implementation checks that LogEntry, ContentType and the auth context processor are installed. """ from django.contrib.admin.models import LogEntry from django.contrib.contenttypes.models import ContentType if not LogEntry._meta.installed: raise ImproperlyConfigured("Put 'django.contrib.admin' in your " "INSTALLED_APPS setting in order to use the admin application.") if not ContentType._meta.installed: raise ImproperlyConfigured("Put 'django.contrib.contenttypes' in " "your INSTALLED_APPS setting in order to use the admin application.") if not ('django.contrib.auth.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS or 'django.core.context_processors.auth' in settings.TEMPLATE_CONTEXT_PROCESSORS): raise ImproperlyConfigured("Put 'django.contrib.auth.context_processors.auth' " "in your TEMPLATE_CONTEXT_PROCESSORS setting in order to use the admin application.") def admin_view(self, view, cacheable=False): """ Decorator to create an admin view attached to this ``AdminSite``. This wraps the view and provides permission checking by calling ``self.has_permission``.<|fim▁hole|> class MyAdminSite(AdminSite): def get_urls(self): from django.conf.urls import patterns, url urls = super(MyAdminSite, self).get_urls() urls += patterns('', url(r'^my_view/$', self.admin_view(some_view)) ) return urls By default, admin_views are marked non-cacheable using the ``never_cache`` decorator. If the view can be safely cached, set cacheable=True. """ def inner(request, *args, **kwargs): if LOGIN_FORM_KEY in request.POST and request.user.is_authenticated(): auth_logout(request) if not self.has_permission(request): if request.path == reverse('admin:logout', current_app=self.name): index_path = reverse('admin:index', current_app=self.name) return HttpResponseRedirect(index_path) return self.login(request) return view(request, *args, **kwargs) if not cacheable: inner = never_cache(inner) # We add csrf_protect here so this function can be used as a utility # function for any view, without having to repeat 'csrf_protect'. if not getattr(view, 'csrf_exempt', False): inner = csrf_protect(inner) return update_wrapper(inner, view) def get_urls(self): from django.conf.urls import patterns, url, include if settings.DEBUG: self.check_dependencies() def wrap(view, cacheable=False): def wrapper(*args, **kwargs): return self.admin_view(view, cacheable)(*args, **kwargs) return update_wrapper(wrapper, view) # Admin-site-wide views. urlpatterns = patterns('', url(r'^$', wrap(self.index), name='index'), url(r'^logout/$', wrap(self.logout), name='logout'), url(r'^password_change/$', wrap(self.password_change, cacheable=True), name='password_change'), url(r'^password_change/done/$', wrap(self.password_change_done, cacheable=True), name='password_change_done'), url(r'^jsi18n/$', wrap(self.i18n_javascript, cacheable=True), name='jsi18n'), url(r'^r/(?P<content_type_id>\d+)/(?P<object_id>.+)/$', wrap(contenttype_views.shortcut), name='view_on_site'), url(r'^(?P<app_label>\w+)/$', wrap(self.app_index), name='app_list') ) # Add in each model's views. for model, model_admin in six.iteritems(self._registry): urlpatterns += patterns('', url(r'^%s/%s/' % (model._meta.app_label, model._meta.model_name), include(model_admin.urls)) ) return urlpatterns @property def urls(self): return self.get_urls(), self.app_name, self.name def password_change(self, request): """ Handles the "change password" task -- both form display and validation. """ from django.contrib.auth.views import password_change url = reverse('admin:password_change_done', current_app=self.name) defaults = { 'current_app': self.name, 'post_change_redirect': url } if self.password_change_template is not None: defaults['template_name'] = self.password_change_template return password_change(request, **defaults) def password_change_done(self, request, extra_context=None): """ Displays the "success" page after a password change. """ from django.contrib.auth.views import password_change_done defaults = { 'current_app': self.name, 'extra_context': extra_context or {}, } if self.password_change_done_template is not None: defaults['template_name'] = self.password_change_done_template return password_change_done(request, **defaults) def i18n_javascript(self, request): """ Displays the i18n JavaScript that the Django admin requires. This takes into account the USE_I18N setting. If it's set to False, the generated JavaScript will be leaner and faster. """ if settings.USE_I18N: from django.views.i18n import javascript_catalog else: from django.views.i18n import null_javascript_catalog as javascript_catalog return javascript_catalog(request, packages=['django.conf', 'django.contrib.admin']) @never_cache def logout(self, request, extra_context=None): """ Logs out the user for the given HttpRequest. This should *not* assume the user is already logged in. """ from django.contrib.auth.views import logout defaults = { 'current_app': self.name, 'extra_context': extra_context or {}, } if self.logout_template is not None: defaults['template_name'] = self.logout_template return logout(request, **defaults) @never_cache def login(self, request, extra_context=None): """ Displays the login form for the given HttpRequest. """ from django.contrib.auth.views import login context = { 'title': _('Log in'), 'app_path': request.get_full_path(), REDIRECT_FIELD_NAME: request.get_full_path(), } context.update(extra_context or {}) defaults = { 'extra_context': context, 'current_app': self.name, 'authentication_form': self.login_form or AdminAuthenticationForm, 'template_name': self.login_template or 'admin/login.html', } return login(request, **defaults) @never_cache def index(self, request, extra_context=None): """ Displays the main admin index page, which lists all of the installed apps that have been registered in this site. """ app_dict = {} user = request.user for model, model_admin in self._registry.items(): app_label = model._meta.app_label has_module_perms = user.has_module_perms(app_label) if has_module_perms: perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True in perms.values(): info = (app_label, model._meta.model_name) model_dict = { 'name': capfirst(model._meta.verbose_name_plural), 'object_name': model._meta.object_name, 'perms': perms, } if perms.get('change', False): try: model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name) except NoReverseMatch: pass if perms.get('add', False): try: model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name) except NoReverseMatch: pass if app_label in app_dict: app_dict[app_label]['models'].append(model_dict) else: app_dict[app_label] = { 'name': app_label.title(), 'app_label': app_label, 'app_url': reverse('admin:app_list', kwargs={'app_label': app_label}, current_app=self.name), 'has_module_perms': has_module_perms, 'models': [model_dict], } # Sort the apps alphabetically. app_list = list(six.itervalues(app_dict)) app_list.sort(key=lambda x: x['name']) # Sort the models alphabetically within each app. for app in app_list: app['models'].sort(key=lambda x: x['name']) context = { 'title': _('Site administration'), 'app_list': app_list, } context.update(extra_context or {}) return TemplateResponse(request,self.index_template or 'admin/index.html', context, current_app=self.name) def app_index(self, request, app_label, extra_context=None): user = request.user has_module_perms = user.has_module_perms(app_label) app_dict = {} for model, model_admin in self._registry.items(): if app_label == model._meta.app_label: if has_module_perms: perms = model_admin.get_model_perms(request) # Check whether user has any perm for this module. # If so, add the module to the model_list. if True in perms.values(): info = (app_label, model._meta.model_name) model_dict = { 'name': capfirst(model._meta.verbose_name_plural), 'object_name': model._meta.object_name, 'perms': perms, } if perms.get('change', False): try: model_dict['admin_url'] = reverse('admin:%s_%s_changelist' % info, current_app=self.name) except NoReverseMatch: pass if perms.get('add', False): try: model_dict['add_url'] = reverse('admin:%s_%s_add' % info, current_app=self.name) except NoReverseMatch: pass if app_dict: app_dict['models'].append(model_dict), else: # First time around, now that we know there's # something to display, add in the necessary meta # information. app_dict = { 'name': app_label.title(), 'app_label': app_label, 'app_url': '', 'has_module_perms': has_module_perms, 'models': [model_dict], } if not app_dict: raise Http404('The requested admin page does not exist.') # Sort the models alphabetically within each app. app_dict['models'].sort(key=lambda x: x['name']) context = { 'title': _('%s administration') % capfirst(app_label), 'app_list': [app_dict], } context.update(extra_context or {}) return TemplateResponse(request, self.app_index_template or [ 'admin/%s/app_index.html' % app_label, 'admin/app_index.html' ], context, current_app=self.name) # This global object represents the default admin site, for the common case. # You can instantiate AdminSite in your own code to create a custom admin site. site = AdminSite()<|fim▁end|>
You'll want to use this from within ``AdminSite.get_urls()``:
<|file_name|>main_design.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'main.ui' # # Created by: PyQt4 UI code generator 4.12.1 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui try: _fromUtf8 = QtCore.QString.fromUtf8 except AttributeError: def _fromUtf8(s): return s try: _encoding = QtGui.QApplication.UnicodeUTF8 def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig, _encoding) except AttributeError: def _translate(context, text, disambig): return QtGui.QApplication.translate(context, text, disambig) class Ui_MainWindow(object): def setupUi(self, MainWindow): MainWindow.setObjectName(_fromUtf8("MainWindow")) MainWindow.resize(764, 593) MainWindow.setMinimumSize(QtCore.QSize(650, 500)) self.centralwidget = QtGui.QWidget(MainWindow) self.centralwidget.setObjectName(_fromUtf8("centralwidget")) self.mediaView = QtGui.QFrame(self.centralwidget) self.mediaView.setGeometry(QtCore.QRect(0, 0, 461, 231)) self.mediaView.setStyleSheet(_fromUtf8("")) self.mediaView.setFrameShape(QtGui.QFrame.StyledPanel) self.mediaView.setFrameShadow(QtGui.QFrame.Raised) self.mediaView.setObjectName(_fromUtf8("mediaView")) self.subtitle = QtGui.QLabel(self.centralwidget) self.subtitle.setGeometry(QtCore.QRect(250, 240, 261, 17)) font = QtGui.QFont() font.setPointSize(12) self.subtitle.setFont(font) self.subtitle.setStyleSheet(_fromUtf8("color:white;")) self.subtitle.setText(_fromUtf8("")) self.subtitle.setObjectName(_fromUtf8("subtitle")) self.controlView = QtGui.QWidget(self.centralwidget) self.controlView.setGeometry(QtCore.QRect(30, 270, 661, 130)) self.controlView.setMinimumSize(QtCore.QSize(510, 130)) self.controlView.setMaximumSize(QtCore.QSize(16777215, 130)) self.controlView.setObjectName(_fromUtf8("controlView")) self.verticalLayout = QtGui.QVBoxLayout(self.controlView) self.verticalLayout.setMargin(0) self.verticalLayout.setObjectName(_fromUtf8("verticalLayout")) self.gridLayout_8 = QtGui.QGridLayout() self.gridLayout_8.setMargin(1) self.gridLayout_8.setObjectName(_fromUtf8("gridLayout_8")) self.timeDone = QtGui.QLabel(self.controlView) self.timeDone.setMinimumSize(QtCore.QSize(60, 0)) self.timeDone.setMaximumSize(QtCore.QSize(60, 16777215)) self.timeDone.setAlignment(QtCore.Qt.AlignCenter) self.timeDone.setObjectName(_fromUtf8("timeDone")) self.gridLayout_8.addWidget(self.timeDone, 0, 0, 1, 1) self.seekBar = QtGui.QSlider(self.controlView) self.seekBar.setMinimumSize(QtCore.QSize(365, 18)) self.seekBar.setMaximumSize(QtCore.QSize(16777215, 18)) self.seekBar.setOrientation(QtCore.Qt.Horizontal) self.seekBar.setObjectName(_fromUtf8("seekBar")) self.gridLayout_8.addWidget(self.seekBar, 0, 1, 1, 1) self.timeLeft = QtGui.QLabel(self.controlView) self.timeLeft.setMinimumSize(QtCore.QSize(60, 18)) self.timeLeft.setMaximumSize(QtCore.QSize(60, 18)) self.timeLeft.setAlignment(QtCore.Qt.AlignCenter) self.timeLeft.setObjectName(_fromUtf8("timeLeft")) self.gridLayout_8.addWidget(self.timeLeft, 0, 2, 1, 1) self.verticalLayout.addLayout(self.gridLayout_8) self.gridLayout_4 = QtGui.QGridLayout() self.gridLayout_4.setMargin(1) self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4")) self.muteButton = QtGui.QPushButton(self.controlView) self.muteButton.setMinimumSize(QtCore.QSize(30, 30)) self.muteButton.setMaximumSize(QtCore.QSize(30, 30)) self.muteButton.setText(_fromUtf8("")) self.muteButton.setObjectName(_fromUtf8("muteButton")) self.gridLayout_4.addWidget(self.muteButton, 0, 4, 1, 1) self.expansionWidget_3 = QtGui.QWidget(self.controlView) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.expansionWidget_3.sizePolicy().hasHeightForWidth()) self.expansionWidget_3.setSizePolicy(sizePolicy) self.expansionWidget_3.setObjectName(_fromUtf8("expansionWidget_3")) self.gridLayout_7 = QtGui.QGridLayout(self.expansionWidget_3) self.gridLayout_7.setMargin(0)<|fim▁hole|> self.gridLayout_4.addWidget(self.expansionWidget_3, 0, 1, 1, 1) self.volumeBar = QtGui.QSlider(self.controlView) self.volumeBar.setMinimumSize(QtCore.QSize(175, 0)) self.volumeBar.setMaximumSize(QtCore.QSize(100, 16777215)) self.volumeBar.setOrientation(QtCore.Qt.Horizontal) self.volumeBar.setObjectName(_fromUtf8("volumeBar")) self.gridLayout_4.addWidget(self.volumeBar, 0, 5, 1, 1) self.mediaSettingsWidget = QtGui.QWidget(self.controlView) self.mediaSettingsWidget.setMinimumSize(QtCore.QSize(140, 60)) self.mediaSettingsWidget.setMaximumSize(QtCore.QSize(140, 60)) self.mediaSettingsWidget.setObjectName(_fromUtf8("mediaSettingsWidget")) self.horizontalLayout_6 = QtGui.QHBoxLayout(self.mediaSettingsWidget) self.horizontalLayout_6.setMargin(0) self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6")) self.fullscreenButton = QtGui.QPushButton(self.mediaSettingsWidget) self.fullscreenButton.setMinimumSize(QtCore.QSize(30, 30)) self.fullscreenButton.setMaximumSize(QtCore.QSize(30, 30)) self.fullscreenButton.setText(_fromUtf8("")) self.fullscreenButton.setObjectName(_fromUtf8("fullscreenButton")) self.horizontalLayout_6.addWidget(self.fullscreenButton) self.playlistButton = QtGui.QPushButton(self.mediaSettingsWidget) self.playlistButton.setMinimumSize(QtCore.QSize(30, 30)) self.playlistButton.setMaximumSize(QtCore.QSize(30, 30)) self.playlistButton.setText(_fromUtf8("")) self.playlistButton.setObjectName(_fromUtf8("playlistButton")) self.horizontalLayout_6.addWidget(self.playlistButton) self.stopButton = QtGui.QPushButton(self.mediaSettingsWidget) self.stopButton.setMinimumSize(QtCore.QSize(30, 30)) self.stopButton.setMaximumSize(QtCore.QSize(30, 30)) self.stopButton.setText(_fromUtf8("")) self.stopButton.setObjectName(_fromUtf8("stopButton")) self.horizontalLayout_6.addWidget(self.stopButton) self.gridLayout_4.addWidget(self.mediaSettingsWidget, 0, 0, 1, 1) self.mediaControlWidget = QtGui.QWidget(self.controlView) self.mediaControlWidget.setMinimumSize(QtCore.QSize(225, 70)) self.mediaControlWidget.setMaximumSize(QtCore.QSize(225, 70)) self.mediaControlWidget.setObjectName(_fromUtf8("mediaControlWidget")) self.horizontalLayout_7 = QtGui.QHBoxLayout(self.mediaControlWidget) self.horizontalLayout_7.setMargin(0) self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7")) self.previous = QtGui.QPushButton(self.mediaControlWidget) self.previous.setMinimumSize(QtCore.QSize(40, 40)) self.previous.setMaximumSize(QtCore.QSize(40, 40)) self.previous.setText(_fromUtf8("")) self.previous.setObjectName(_fromUtf8("previous")) self.horizontalLayout_7.addWidget(self.previous) self.playState = QtGui.QPushButton(self.mediaControlWidget) self.playState.setMinimumSize(QtCore.QSize(50, 50)) self.playState.setMaximumSize(QtCore.QSize(50, 50)) self.playState.setText(_fromUtf8("")) icon = QtGui.QIcon.fromTheme(_fromUtf8("play-2.svg")) self.playState.setIcon(icon) self.playState.setObjectName(_fromUtf8("playState")) self.horizontalLayout_7.addWidget(self.playState) self.next = QtGui.QPushButton(self.mediaControlWidget) self.next.setMinimumSize(QtCore.QSize(40, 40)) self.next.setMaximumSize(QtCore.QSize(40, 40)) self.next.setText(_fromUtf8("")) self.next.setObjectName(_fromUtf8("next")) self.horizontalLayout_7.addWidget(self.next) self.gridLayout_4.addWidget(self.mediaControlWidget, 0, 2, 1, 1) self.expansionWidget_4 = QtGui.QWidget(self.controlView) sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Preferred) sizePolicy.setHorizontalStretch(0) sizePolicy.setVerticalStretch(0) sizePolicy.setHeightForWidth(self.expansionWidget_4.sizePolicy().hasHeightForWidth()) self.expansionWidget_4.setSizePolicy(sizePolicy) self.expansionWidget_4.setObjectName(_fromUtf8("expansionWidget_4")) self.gridLayout_4.addWidget(self.expansionWidget_4, 0, 3, 1, 1) self.verticalLayout.addLayout(self.gridLayout_4) MainWindow.setCentralWidget(self.centralwidget) self.menubar = QtGui.QMenuBar(MainWindow) self.menubar.setGeometry(QtCore.QRect(0, 0, 764, 29)) self.menubar.setObjectName(_fromUtf8("menubar")) self.menuFile = QtGui.QMenu(self.menubar) self.menuFile.setObjectName(_fromUtf8("menuFile")) self.menuPlayback = QtGui.QMenu(self.menubar) self.menuPlayback.setObjectName(_fromUtf8("menuPlayback")) self.menuSpeed = QtGui.QMenu(self.menuPlayback) self.menuSpeed.setObjectName(_fromUtf8("menuSpeed")) self.menu_Subtitles = QtGui.QMenu(self.menubar) self.menu_Subtitles.setObjectName(_fromUtf8("menu_Subtitles")) self.menu_Audio = QtGui.QMenu(self.menubar) self.menu_Audio.setObjectName(_fromUtf8("menu_Audio")) self.menu_Video = QtGui.QMenu(self.menubar) self.menu_Video.setObjectName(_fromUtf8("menu_Video")) MainWindow.setMenuBar(self.menubar) self.actionOpen_File = QtGui.QAction(MainWindow) self.actionOpen_File.setShortcutContext(QtCore.Qt.WindowShortcut) self.actionOpen_File.setObjectName(_fromUtf8("actionOpen_File")) self.actionExit = QtGui.QAction(MainWindow) self.actionExit.setObjectName(_fromUtf8("actionExit")) self.actionOpen_Multiple_Files = QtGui.QAction(MainWindow) self.actionOpen_Multiple_Files.setObjectName(_fromUtf8("actionOpen_Multiple_Files")) self.actionAdd_Subtitle_File = QtGui.QAction(MainWindow) self.actionAdd_Subtitle_File.setObjectName(_fromUtf8("actionAdd_Subtitle_File")) self.actionJump_Forward = QtGui.QAction(MainWindow) self.actionJump_Forward.setObjectName(_fromUtf8("actionJump_Forward")) self.actionJump_Backward = QtGui.QAction(MainWindow) self.actionJump_Backward.setObjectName(_fromUtf8("actionJump_Backward")) self.actionX0_5 = QtGui.QAction(MainWindow) self.actionX0_5.setObjectName(_fromUtf8("actionX0_5")) self.actionX_1 = QtGui.QAction(MainWindow) self.actionX_1.setObjectName(_fromUtf8("actionX_1")) self.actionX_2 = QtGui.QAction(MainWindow) self.actionX_2.setObjectName(_fromUtf8("actionX_2")) self.actionX_4 = QtGui.QAction(MainWindow) self.actionX_4.setObjectName(_fromUtf8("actionX_4")) self.actionX_8 = QtGui.QAction(MainWindow) self.actionX_8.setObjectName(_fromUtf8("actionX_8")) self.actionAdd_Subtitle_Track = QtGui.QAction(MainWindow) self.actionAdd_Subtitle_Track.setObjectName(_fromUtf8("actionAdd_Subtitle_Track")) self.actionPlay = QtGui.QAction(MainWindow) self.actionPlay.setObjectName(_fromUtf8("actionPlay")) self.actionPause = QtGui.QAction(MainWindow) self.actionPause.setObjectName(_fromUtf8("actionPause")) self.actionStop = QtGui.QAction(MainWindow) self.actionStop.setObjectName(_fromUtf8("actionStop")) self.actionPrevious = QtGui.QAction(MainWindow) self.actionPrevious.setObjectName(_fromUtf8("actionPrevious")) self.actionNext = QtGui.QAction(MainWindow) self.actionNext.setObjectName(_fromUtf8("actionNext")) self.actionJump_to_specific_time = QtGui.QAction(MainWindow) self.actionJump_to_specific_time.setObjectName(_fromUtf8("actionJump_to_specific_time")) self.actionIncrease_Volume = QtGui.QAction(MainWindow) self.actionIncrease_Volume.setObjectName(_fromUtf8("actionIncrease_Volume")) self.actionDecrease_Volume = QtGui.QAction(MainWindow) self.actionDecrease_Volume.setObjectName(_fromUtf8("actionDecrease_Volume")) self.actionMute = QtGui.QAction(MainWindow) self.actionMute.setObjectName(_fromUtf8("actionMute")) self.actionFullscreen = QtGui.QAction(MainWindow) self.actionFullscreen.setCheckable(False) self.actionFullscreen.setObjectName(_fromUtf8("actionFullscreen")) self.actionShift_forward_by_1_second = QtGui.QAction(MainWindow) self.actionShift_forward_by_1_second.setObjectName(_fromUtf8("actionShift_forward_by_1_second")) self.actionShift_backward_by_1_second = QtGui.QAction(MainWindow) self.actionShift_backward_by_1_second.setObjectName(_fromUtf8("actionShift_backward_by_1_second")) self.menuFile.addAction(self.actionOpen_File) self.menuFile.addAction(self.actionOpen_Multiple_Files) self.menuFile.addSeparator() self.menuFile.addAction(self.actionExit) self.menuSpeed.addAction(self.actionX0_5) self.menuSpeed.addAction(self.actionX_1) self.menuSpeed.addAction(self.actionX_2) self.menuSpeed.addAction(self.actionX_4) self.menuSpeed.addAction(self.actionX_8) self.menuPlayback.addAction(self.actionJump_Forward) self.menuPlayback.addAction(self.actionJump_Backward) self.menuPlayback.addAction(self.menuSpeed.menuAction()) self.menuPlayback.addSeparator() self.menuPlayback.addAction(self.actionPlay) self.menuPlayback.addAction(self.actionStop) self.menuPlayback.addSeparator() self.menuPlayback.addAction(self.actionPrevious) self.menuPlayback.addAction(self.actionNext) self.menuPlayback.addSeparator() self.menuPlayback.addAction(self.actionJump_to_specific_time) self.menu_Subtitles.addAction(self.actionAdd_Subtitle_Track) self.menu_Subtitles.addSeparator() self.menu_Subtitles.addAction(self.actionShift_forward_by_1_second) self.menu_Subtitles.addAction(self.actionShift_backward_by_1_second) self.menu_Audio.addAction(self.actionIncrease_Volume) self.menu_Audio.addAction(self.actionDecrease_Volume) self.menu_Audio.addAction(self.actionMute) self.menu_Audio.addSeparator() self.menu_Video.addAction(self.actionFullscreen) self.menubar.addAction(self.menuFile.menuAction()) self.menubar.addAction(self.menuPlayback.menuAction()) self.menubar.addAction(self.menu_Subtitles.menuAction()) self.menubar.addAction(self.menu_Audio.menuAction()) self.menubar.addAction(self.menu_Video.menuAction()) self.retranslateUi(MainWindow) QtCore.QMetaObject.connectSlotsByName(MainWindow) def retranslateUi(self, MainWindow): MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow", None)) self.timeDone.setText(_translate("MainWindow", "00:00:00", None)) self.timeLeft.setText(_translate("MainWindow", "00:00:00", None)) self.muteButton.setToolTip(_translate("MainWindow", "volume", None)) self.fullscreenButton.setToolTip(_translate("MainWindow", "Fullscreen", None)) self.playlistButton.setToolTip(_translate("MainWindow", "Playlist", None)) self.stopButton.setToolTip(_translate("MainWindow", "Stop", None)) self.previous.setToolTip(_translate("MainWindow", "Previous", None)) self.playState.setToolTip(_translate("MainWindow", "Play/Pause", None)) self.next.setToolTip(_translate("MainWindow", "Next", None)) self.menuFile.setTitle(_translate("MainWindow", "&Media", None)) self.menuPlayback.setTitle(_translate("MainWindow", "P&layback", None)) self.menuSpeed.setTitle(_translate("MainWindow", "&Speed", None)) self.menu_Subtitles.setTitle(_translate("MainWindow", "&Subtitles", None)) self.menu_Audio.setTitle(_translate("MainWindow", "&Audio ", None)) self.menu_Video.setTitle(_translate("MainWindow", "&Video", None)) self.actionOpen_File.setText(_translate("MainWindow", "&Open File", None)) self.actionOpen_File.setShortcut(_translate("MainWindow", "Ctrl+O", None)) self.actionExit.setText(_translate("MainWindow", "&Exit", None)) self.actionExit.setShortcut(_translate("MainWindow", "Ctrl+Q", None)) self.actionOpen_Multiple_Files.setText(_translate("MainWindow", "Open &Multiple Files", None)) self.actionOpen_Multiple_Files.setShortcut(_translate("MainWindow", "Ctrl+Shift+O", None)) self.actionAdd_Subtitle_File.setText(_translate("MainWindow", "&Add Subtitle File", None)) self.actionJump_Forward.setText(_translate("MainWindow", "&Jump Forward", None)) self.actionJump_Forward.setShortcut(_translate("MainWindow", "Ctrl+Shift++", None)) self.actionJump_Backward.setText(_translate("MainWindow", "Jump &Backward", None)) self.actionJump_Backward.setShortcut(_translate("MainWindow", "Ctrl+Shift+-", None)) self.actionX0_5.setText(_translate("MainWindow", "&x 0.5", None)) self.actionX_1.setText(_translate("MainWindow", "&Normal Speed", None)) self.actionX_2.setText(_translate("MainWindow", "x &2", None)) self.actionX_4.setText(_translate("MainWindow", "x &4", None)) self.actionX_8.setText(_translate("MainWindow", "x &8", None)) self.actionAdd_Subtitle_Track.setText(_translate("MainWindow", "&Add Subtitle Track", None)) self.actionPlay.setText(_translate("MainWindow", "&Play/Pause", None)) self.actionPlay.setShortcut(_translate("MainWindow", "Space", None)) self.actionPause.setText(_translate("MainWindow", "Pause", None)) self.actionPause.setShortcut(_translate("MainWindow", "Space", None)) self.actionStop.setText(_translate("MainWindow", "St&op", None)) self.actionStop.setShortcut(_translate("MainWindow", "Ctrl+Shift+S", None)) self.actionPrevious.setText(_translate("MainWindow", "P&revious", None)) self.actionPrevious.setShortcut(_translate("MainWindow", "Ctrl+Shift+Left", None)) self.actionNext.setText(_translate("MainWindow", "&Next", None)) self.actionNext.setShortcut(_translate("MainWindow", "Ctrl+Shift+Right", None)) self.actionJump_to_specific_time.setText(_translate("MainWindow", "J&ump to specific time", None)) self.actionJump_to_specific_time.setShortcut(_translate("MainWindow", "Ctrl+T", None)) self.actionIncrease_Volume.setText(_translate("MainWindow", "&Increase Volume", None)) self.actionIncrease_Volume.setShortcut(_translate("MainWindow", "Ctrl+Up", None)) self.actionDecrease_Volume.setText(_translate("MainWindow", "&Decrease Volume", None)) self.actionDecrease_Volume.setShortcut(_translate("MainWindow", "Ctrl+Down", None)) self.actionMute.setText(_translate("MainWindow", "&Mute", None)) self.actionMute.setShortcut(_translate("MainWindow", "M", None)) self.actionFullscreen.setText(_translate("MainWindow", "&Fullscreen", None)) self.actionFullscreen.setShortcut(_translate("MainWindow", "F", None)) self.actionShift_forward_by_1_second.setText(_translate("MainWindow", "&Shift Forward By 1 Second", None)) self.actionShift_forward_by_1_second.setShortcut(_translate("MainWindow", "H", None)) self.actionShift_backward_by_1_second.setText(_translate("MainWindow", "Shift &Backward By 1 Second", None)) self.actionShift_backward_by_1_second.setShortcut(_translate("MainWindow", "G", None))<|fim▁end|>
self.gridLayout_7.setObjectName(_fromUtf8("gridLayout_7"))
<|file_name|>snow.js<|end_file_name|><|fim▁begin|>(function () { var g = void 0, k = !0, m = null, o = !1, p, q = this, r = function (a) { var b = typeof a; if ("object" == b) if (a) { if (a instanceof Array) return "array"; if (a instanceof Object) return b; var c = Object.prototype.toString.call(a); if ("[object Window]" == c) return "object"; if ("[object Array]" == c || "number" == typeof a.length && "undefined" != typeof a.splice && "undefined" != typeof a.propertyIsEnumerable && !a.propertyIsEnumerable("splice")) return "array"; if ("[object Function]" == c || "undefined" != typeof a.call && "undefined" != typeof a.propertyIsEnumerable && !a.propertyIsEnumerable("call")) return "function" } else return "null"; else if ("function" == b && "undefined" == typeof a.call) return "object"; return b }, aa = function (a) { var b = r(a); return "array" == b || "object" == b && "number" == typeof a.length }, u = function (a) { return "string" == typeof a }, ba = function (a) { a = r(a); return "object" == a || "array" == a || "function" == a }, ca = function (a, b, c) { return a.call.apply(a.bind, arguments) }, da = function (a, b, c) { if (!a) throw Error(); if (2 < arguments.length) { var d = Array.prototype.slice.call(arguments, 2); return function () { var c = Array.prototype.slice.call(arguments); Array.prototype.unshift.apply(c, d); return a.apply(b, c) } } return function () { return a.apply(b, arguments) } }, v = function (a, b, c) { v = Function.prototype.bind && -1 != Function.prototype.bind.toString().indexOf("native code") ? ca : da; return v.apply(m, arguments) }, ea = function (a, b) { function c() {} c.prototype = b.prototype; a.Sa = b.prototype; a.prototype = new c }; Function.prototype.bind = Function.prototype.bind || function (a, b) { if (1 < arguments.length) { var c = Array.prototype.slice.call(arguments, 1); c.unshift(this, a); return v.apply(m, c) } return v(this, a) }; var w = function (a) { this.stack = Error().stack || ""; if (a) this.message = "" + a }; ea(w, Error); w.prototype.name = "CustomError"; var fa = function (a, b) { for (var c = 1; c < arguments.length; c++) var d = ("" + arguments[c]).replace(/\$/g, "$$$$"), a = a.replace(/\%s/, d); return a }, ma = function (a) { if (!ga.test(a)) return a; - 1 != a.indexOf("&") && (a = a.replace(ha, "&amp;")); - 1 != a.indexOf("<") && (a = a.replace(ia, "&lt;")); - 1 != a.indexOf(">") && (a = a.replace(ka, "&gt;")); - 1 != a.indexOf('"') && (a = a.replace(la, "&quot;")); return a }, ha = /&/g, ia = /</g, ka = />/g, la = /\"/g, ga = /[&<>\"]/; var x = function (a, b) { b.unshift(a); w.call(this, fa.apply(m, b)); b.shift(); this.Ra = a }; ea(x, w); x.prototype.name = "AssertionError"; var y = function (a, b, c) { if (!a) { var d = Array.prototype.slice.call(arguments, 2), f = "Assertion failed"; if (b) var f = f + (": " + b), e = d; throw new x("" + f, e || []); } }; var z = Array.prototype, na = z.indexOf ? function (a, b, c) { y(a.length != m); return z.indexOf.call(a, b, c) } : function (a, b, c) { c = c == m ? 0 : 0 > c ? Math.max(0, a.length + c) : c; if (u(a)) return !u(b) || 1 != b.length ? -1 : a.indexOf(b, c); for (; c < a.length; c++) if (c in a && a[c] === b) return c; return -1 }, oa = z.forEach ? function (a, b, c) { y(a.length != m); z.forEach.call(a, b, c) } : function (a, b, c) { for (var d = a.length, f = u(a) ? a.split("") : a, e = 0; e < d; e++) e in f && b.call(c, f[e], e, a) }, pa = function (a) { return z.concat.apply(z, arguments) }, qa = function (a) { if ("array" == r(a)) return pa(a); for (var b = [], c = 0, d = a.length; c < d; c++) b[c] = a[c]; return b }, ra = function (a, b, c) { y(a.length != m); return 2 >= arguments.length ? z.slice.call(a, b) : z.slice.call(a, b, c) }; var A = function (a, b) { this.x = a !== g ? a : 0; this.y = b !== g ? b : 0 }; A.prototype.toString = function () { return "(" + this.x + ", " + this.y + ")" }; var sa = function (a, b) { for (var c in a) b.call(g, a[c], c, a) }, ta = "constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf".split(","), ua = function (a, b) { for (var c, d, f = 1; f < arguments.length; f++) { d = arguments[f]; for (c in d) a[c] = d[c]; for (var e = 0; e < ta.length; e++) c = ta[e], Object.prototype.hasOwnProperty.call(d, c) && (a[c] = d[c]) } }; var B, va, C, wa, xa = function () { return q.navigator ? q.navigator.userAgent : m }; wa = C = va = B = o; var D; if (D = xa()) { var ya = q.navigator; B = 0 == D.indexOf("Opera"); va = !B && -1 != D.indexOf("MSIE"); C = !B && -1 != D.indexOf("WebKit"); wa = !B && !C && "Gecko" == ya.product } var za = B, E = va, F = wa, G = C, Aa; a: { var H = "", I; if (za && q.opera) var Ba = q.opera.version, H = "function" == typeof Ba ? Ba() : Ba; else if (F ? I = /rv\:([^\);]+)(\)|;)/ : E ? I = /MSIE\s+([^\);]+)(\)|;)/ : G && (I = /WebKit\/(\S+)/), I) var Ca = I.exec(xa()), H = Ca ? Ca[1] : ""; if (E) { var Da, Ea = q.document; Da = Ea ? Ea.documentMode : g; if (Da > parseFloat(H)) { Aa = "" + Da; break a } } Aa = H } var Fa = Aa, Ga = {}, Ha = function (a) { var b; if (!(b = Ga[a])) { b = 0; for (var c = ("" + Fa).replace(/^[\s\xa0]+|[\s\xa0]+$/g, "").split("."), d = ("" + a).replace(/^[\s\xa0]+|[\s\xa0]+$/g, "").split("."), f = Math.max(c.length, d.length), e = 0; 0 == b && e < f; e++) { var h = c[e] || "", n = d[e] || "", j = RegExp("(\\d*)(\\D*)", "g"), t = RegExp("(\\d*)(\\D*)", "g"); do { var i = j.exec(h) || ["", "", ""], l = t.exec(n) || ["", "", ""]; if (0 == i[0].length && 0 == l[0].length) break; b = ((0 == i[1].length ? 0 : parseInt(i[1], 10)) < (0 == l[1].length ? 0 : parseInt(l[1], 10)) ? -1 : (0 == i[1].length ? 0 : parseInt(i[1], 10)) > (0 == l[1].length ? 0 : parseInt(l[1], 10)) ? 1 : 0) || ((0 == i[2].length) < (0 == l[2].length) ? -1 : (0 == i[2].length) > (0 == l[2].length) ? 1 : 0) || (i[2] < l[2] ? -1 : i[2] > l[2] ? 1 : 0) } while (0 == b) } b = Ga[a] = 0 <= b } return b }, Ia = {}, J = function (a) { return Ia[a] || (Ia[a] = E && document.documentMode && document.documentMode >= a) }; var Ja, Ka = !E || J(9); !F && !E || E && J(9) || F && Ha("1.9.1"); E && Ha("9"); var La = function (a, b) { var c; c = (c = a.className) && "function" == typeof c.split ? c.split(/\s+/) : []; var d = ra(arguments, 1), f; f = c; for (var e = 0, h = 0; h < d.length; h++) 0 <= na(f, d[h]) || (f.push(d[h]), e++); f = e == d.length; a.className = c.join(" "); return f }; var Ma = function (a) { return a ? new K(L(a)) : Ja || (Ja = new K) }, Na = function (a, b) { var c = b && "*" != b ? b.toUpperCase() : ""; return a.querySelectorAll && a.querySelector && (!G || "CSS1Compat" == document.compatMode || Ha("528")) && c ? a.querySelectorAll(c + "") : a.getElementsByTagName(c || "*") }, Pa = function (a, b) { sa(b, function (b, d) { "style" == d ? a.style.cssText = b : "class" == d ? a.className = b : "for" == d ? a.htmlFor = b : d in Oa ? a.setAttribute(Oa[d], b) : 0 == d.lastIndexOf("aria-", 0) ? a.setAttribute(d, b) : a[d] = b }) }, Oa = { cellpadding: "cellPadding", cellspacing: "cellSpacing", colspan: "colSpan", rowspan: "rowSpan", valign: "vAlign", height: "height", width: "width", usemap: "useMap", frameborder: "frameBorder", maxlength: "maxLength", type: "type" }, Qa = function (a, b, c) { function d(c) { c && b.appendChild(u(c) ? a.createTextNode(c) : c) } for (var f = 2; f < c.length; f++) { var e = c[f]; if (aa(e) && !(ba(e) && 0 < e.nodeType)) { var h; a: { if (e && "number" == typeof e.length) { if (ba(e)) { h = "function" == typeof e.item || "string" == typeof e.item; break a } if ("function" == r(e)) { h = "function" == typeof e.item; break a } } h = o } oa(h ? qa(e) : e, d) } else d(e) } }, L = function (a) { return 9 == a.nodeType ? a : a.ownerDocument || a.document }, K = function (a) { this.C = a || q.document || document }; K.prototype.za = function (a, b, c) { var d = this.C, f = arguments, e = f[0], h = f[1]; if (!Ka && h && (h.name || h.type)) { e = ["<", e]; h.name && e.push(' name="', ma(h.name), '"'); if (h.type) { e.push(' type="', ma(h.type), '"'); var n = {}; ua(n, h); h = n; delete h.type } e.push(">"); e = e.join("") } e = d.createElement(e); if (h) u(h) ? e.className = h : "array" == r(h) ? La.apply(m, [e].concat(h)) : Pa(e, h); 2 < f.length && Qa(d, e, f); return e }; K.prototype.createElement = function (a) { return this.C.createElement(a) }; K.prototype.createTextNode = function (a) { return this.C.createTextNode(a) }; K.prototype.appendChild = function (a, b) { a.appendChild(b) }; var M = function (a) { var b; a: { b = L(a); if (b.defaultView && b.defaultView.getComputedStyle && (b = b.defaultView.getComputedStyle(a, m))) { b = b.position || b.getPropertyValue("position"); break a } b = "" } return b || (a.currentStyle ? a.currentStyle.position : m) || a.style && a.style.position }, Ra = function (a) { if (E && !J(8)) return a.offsetParent; for (var b = L(a), c = M(a), d = "fixed" == c || "absolute" == c, a = a.parentNode; a && a != b; a = a.parentNode) if (c = M(a), d = d && "static" == c && a != b.documentElement && a != b.body, !d && (a.scrollWidth > a.clientWidth || a.scrollHeight > a.clientHeight || "fixed" == c || "absolute" == c || "relative" == c)) return a; return m }, N = function (a) { var b, c = L(a), d = M(a), f = F && c.getBoxObjectFor && !a.getBoundingClientRect && "absolute" == d && (b = c.getBoxObjectFor(a)) && (0 > b.screenX || 0 > b.screenY), e = new A(0, 0), h; b = c ? 9 == c.nodeType ? c : L(c) : document; if (h = E) if (h = !J(9)) h = "CSS1Compat" != Ma(b).C.compatMode; h = h ? b.body : b.documentElement; if (a == h) return e; if (a.getBoundingClientRect) { b = a.getBoundingClientRect(); if (E) a = a.ownerDocument, b.left -= a.documentElement.clientLeft + a.body.clientLeft, b.top -= a.documentElement.clientTop + a.body.clientTop; a = Ma(c).C; c = !G && "CSS1Compat" == a.compatMode ? a.documentElement : a.body; a = a.parentWindow || a.defaultView; c = new A(a.pageXOffset || c.scrollLeft, a.pageYOffset || c.scrollTop); e.x = b.left + c.x; e.y = b.top + c.y } else if (c.getBoxObjectFor && !f) b = c.getBoxObjectFor(a), c = c.getBoxObjectFor(h), e.x = b.screenX - c.screenX, e.y = b.screenY - c.screenY; else { b = a; do { e.x += b.offsetLeft; e.y += b.offsetTop; b != a && (e.x += b.clientLeft || 0, e.y += b.clientTop || 0); if (G && "fixed" == M(b)) { e.x += c.body.scrollLeft; e.y += c.body.scrollTop; break } b = b.offsetParent } while (b && b != a); if (za || G && "absolute" == d) e.y -= c.body.offsetTop; for (b = a; (b = Ra(b)) && b != c.body && b != h;) if (e.x -= b.scrollLeft, !za || "TR" != b.tagName) e.y -= b.scrollTop } return e }, Ta = function () { var a = Ma(g), b = m; if (E) b = a.C.createStyleSheet(), Sa(b); else { var c = Na(a.C, "head")[0]; c || (b = Na(a.C, "body")[0], c = a.za("head"), b.parentNode.insertBefore(c, b)); b = a.za("style"); Sa(b); a.appendChild(c, b) } }, Sa = function (a) { E ? a.cssText = "canvas:active{cursor:pointer}" : a[G ? "innerText" : "innerHTML"] = "canvas:active{cursor:pointer}" }; var O = function (a, b) { var c = {}, d; for (d in b) c[d] = a.style[d], a.style[d] = b[d]; return c }, P = function (a, b) { this.M = a || m; this.fa = m; this.ya = b || function () {}; this.Q = 0; this.ta = 0.05 }, Ua = function (a, b) { a.ya = b }; P.prototype.s = function () { if (this.M && this.ta) { this.Q += this.ta; if (1 < this.Q) this.Q = 1, this.ta = 0, this.ya(); var a = "0 0 2px rgba(255,0,0," + this.Q + ")", a = O(this.M, { boxShadow: a, MozBoxShadow: a, webkitBoxShadow: a, oBoxShadow: a, msBoxShadow: a, opacity: this.Q }); if (!this.fa) this.fa = a } }; P.prototype.restore = function () { this.M && this.fa && O(this.M, this.fa) }; var Va = function () { this.H = [] }; Va.prototype.k = function (a, b, c) { if (a) { this.H.push(arguments); var d = a, f = b, e = c; d.addEventListener ? d.addEventListener(f, e, o) : d.attachEvent("on" + f, e) } }; var Wa = function (a, b, c) { a && (a.removeEventListener ? a.removeEventListener(b, c, o) : a.detachEvent("on" + b, c)) }; var Xa = Math.PI / 2, Q = function (a, b, c) { this.ca = a; this.P = document.createElement("div"); this.P.style.position = "absolute"; var a = Math.floor(3 * Math.random() + 0), d = "\u2744"; 1 < a ? d = "\u2745" : 2 < a && (d = "\u2746"); this.P.innerHTML = d; this.ca.appendChild(this.P); this.Y = c; this.X = b; this.reset() }; Q.prototype.reset = function () { this.x = Math.random() * this.X; this.ea = 4.5 * Math.random() + 1; this.y = -this.ea; this.B = 2 * Math.random() + -1; this.xa = this.ea; var a = Math.floor(255 * (0.4 * Math.random() + 0.5)).toString(16); O(this.P, { fontSize: 2.5 * this.ea + "px", left: this.x + "px", top: this.y + "px", color: "#" + a + a + a }) }; Q.prototype.move = function (a, b) { this.y += this.B * b + this.xa * a; this.B += 0.2 * Math.random() + -0.1; if (-1 > this.B) this.B = -1; else if (1 < this.B) this.B = 1; this.x += this.B * a + this.xa * b; this.y > this.Y + this.ea && this.reset() }; Q.prototype.s = function () { this.P.style.left = this.x + "px"; this.P.style.top = this.y + "px" }; var R = function (a) { this.ca = a; this.X = a.offsetWidth; this.Y = a.offsetHeight; this.da = []; this.ra = 1; this.sa = 0; this.Ma = !! navigator.userAgent.match(/(iPod|iPhone|iPad)/) }; R.prototype.s = function () { 200 > this.da.length && 0.5 > Math.random() && this.da.push(new Q(this.ca, this.X, this.Y)); for (var a = 0, b; b = this.da[a]; a++) b.move(this.ra, this.sa), b.s() }; R.prototype.Ca = function (a) { if (this.Ma) { var b = window.orientation & 2, c = b ? a.beta : a.gamma / 2, a = b ? 0 > a.gamma ? 1 : -1 : 0 > a.beta ? -1 : 1; if (c && 45 > Math.abs(c)) c = a * Xa * (c / 45), this.ra = Math.cos(c), this.sa = Math.sin(c) } else { if (!a.gamma && a.x) a.gamma = -(a.x * (180 / Math.PI)); if (a.gamma && 90 > Math.abs(a.gamma)) c = Xa * (a.gamma / 90), this.ra = Math.cos(c), this.sa = Math.sin(c) } }; R.prototype.N = function (a, b) { O(this.ca, { width: a + "px", height: b + "px" }); this.X = a; this.Y = b; for (var c = 0, d; d = this.da[c]; c++) { var f = b; d.X = a; d.Y = f } }; var Ya = function (a, b, c, d) { this.oa = b; this.g = a; this.x = c; this.y = d; this.width = b.width; this.height = b.height; this.la = (this.width + 120) / 88; this.Ea = (this.height + 120) / 66; this.ga = 0; this.$ = o; this.w = []; this.G = []; for (a = 0; 66 > a; a++) { this.w[a] = []; this.G[a] = []; b = Math.min(a, 65 - a); for (c = 0; 88 > c; c++) d = Math.min(c, 87 - c), 8 > d || 8 > b ? (d = 1 - Math.min(d, b) / 8, this.G[a].push(4 * Math.random() * d * d)) : this.G[a].push(0), this.w[a].push(0) } this.S = 0; this.L = []; this.v = m; this.ka = []; this.W = this.aa = 0; this.I = m; this.ma = o }, Za = function (a, b, c) { b = (b + 60) / a.la | 0; c = (c + 60) / a.Ea | 0; return [b, c] }, $a = function (a, b, c) { for (var d = 0, f = c - 1; f <= c + 1; f++) for (var e = b - 1; e <= b + 1; e++) var h = a, n = e, j = f, n = Math.max(0, Math.min(87, n)), j = Math.max(0, Math.min(65, j)), d = d + h.w[j][n]; return d / 9 }, ab = function (a, b) { a.$ = k; b.fillStyle = "rgba(240,246,246,0.8)"; b.fillRect(0, 0, b.canvas.width, b.canvas.height) }; Ya.prototype.s = function (a) { if (!(this.$ || 88 < this.ga || 5808 < this.S)) { a.fillStyle = "rgba(240,246,246,0.08)"; for (var b = 0; 200 > b; b++) { var c = Math.random() * (this.width + 120) - 60, d = Math.random() * (this.height + 120) - 60, f = Za(this, c, d), e = this.w[f[1]][f[0]]; e >= 4 * (1 + Math.random()) / 2 && (c |= 0, d |= 0, a.beginPath(), a.arc(c, d, e / 4 * this.la | 0, 0, 2 * Math.PI, k), a.fill(), a.closePath(), this.S++) } for (b = 0; 200 > b; b++) if (c = Math.random() * (this.width + 120) - 60, d = Math.random() * (this.height + 120) - 60, f = Za(this, c, d), e = this.w[f[1]][f[0]], f = this.G[f[1]][f[0]], e = 2 > e ? Math.max(e, f) : f, e >= Math.random()) e = 3 * Math.min(1, e) * this.la | 0, c |= 0, d |= 0, f = a.createRadialGradient(c, d, 0, c, d, e), f.addColorStop(0, "rgba(240,246,246,0.16)"), f.addColorStop(1, "rgba(240,246,246,0)"), a.fillStyle = f, a.fillRect(c - e + 1, d - e + 1, 2 * e - 1, 2 * e - 1), this.S++ } bb(this); bb(this); for (b = 0; b < this.L.length; b++) this.L[b].s(a) }; var cb = function (a, b) { a.ka = b; a.aa = 0; a.W = 0; a.I = m }, bb = function (a) { if (!(a.v || a.aa >= a.ka.length)) { var b = a.ka[a.aa].O; if (!a.I) a.I = new S, a.L.push(a.I); a.I.ba.apply(a.I, b[a.W]); a.W++; if (a.W >= b.length) a.W = 0, a.aa++, a.I = m } }, T = function (a, b) { a.v && a.v.ba(b[0], b[1]) }, db = function (a) { a.g.k(window, "touchmove", v(a.Ka, a)); a.g.k(window, "touchstart", v(a.La, a)); a.g.k(window, "touchend", v(a.Ja, a)); a.g.k(window, "mousemove", v(a.Ha, a)); a.g.k(window, "mousedown", v(a.Ga, a)); a.g.k(window, "mouseup", v(a.Ia, a)) }; p = Ya.prototype; p.Ha = function (a) { this.v && !this.ma && T(this, [a.clientX - this.x | 0, a.clientY - this.y | 0]) }; p.Ga = function (a) { if ((a.target || a.srcElement) == this.oa && (0 == a.button || 1 == a.button)) { var b = [a.clientX - this.x | 0, a.clientY - this.y | 0]; this.v = new S; this.L.push(this.v); T(this, b); a.preventDefault(); return o } }; p.Ia = function () { this.v = m }; p.La = function (a) { this.ma = k; a = a.touches.item(0); a = [a.clientX - this.x | 0, a.clientY - this.y | 0]; this.v = new S; this.L.push(this.v); T(this, a) }; p.Ja = function (a) { this.ma = o; this.v = m; a.preventDefault(); return o }; p.Ka = function (a) { var b = a.touches.item(0); T(this, [b.clientX - this.x | 0, b.clientY - this.y | 0]); a.preventDefault(); return o }; p.N = function (a, b, c) { this.oa.width = a; this.oa.height = b; this.width = a; this.height = b; ab(this, c) }; var S = function () { this.Qa = this.Oa = 30; this.O = [] }; S.prototype.ba = function (a, b) { this.O.push([a, b]) }; S.prototype.s = function (a) { if (0 != this.O.length) { var b = a.globalCompositeOperation; a.globalCompositeOperation = "destination-out"; a.lineWidth = this.Oa; a.lineCap = "round"; a.lineJoin = "round"; a.beginPath(); var c = this.O[0]; a.moveTo(c[0], c[1] - 1); for (var d = 0; c = this.O[d]; d++) a.lineTo(c[0], c[1]); a.stroke(); a.globalCompositeOperation = b } }; var U = function (a) { this.n = this.o = m; this.g = a; this.D = new P }, eb = function (a) { if (!a.o) return m; var b = document.createElement("button"), c = N(a.o), d = a.o.offsetWidth, a = a.o.offsetHeight; navigator.userAgent.match(/iPad/) && (d = 86, a = 40); document.getElementById("skb") ? (b.className = "lsbb", O(b, { fontSize: "15px", background: "url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAAmCAYAAAAFvPEHAAAABGdBTUEAALGPC/xhBQAAAAFzUkdCAK7OHOkAAAAgY0hSTQAAeiYAAICEAAD6AAAAgOgAAHUwAADqYAAAOpgAABdwnLpRPAAAAAZiS0dEAAAAAAAA+UO7fwAAAAlwSFlzAAALEwAACxMBAJqcGAAAAAl2cEFnAAAAJgAAACYAB/nYBgAAADFJREFUCNd9jDEKACAQw0L//17BKW4iR3ErbVL20ihE4EkgdVAIo7swBe6av7+pWYcD6Xg4BFIWHrsAAAAldEVYdGRhdGU6Y3JlYXRlADIwMTEtMTItMTNUMTA6MTE6MjctMDg6MDD1wN6AAAAAJXRFWHRkYXRlOm1vZGlmeQAyMDExLTEyLTEzVDEwOjExOjI3LTA4OjAwhJ1mPAAAAABJRU5ErkJggg==') repeat-x", color: "#374A82" }), b.innerHTML = "\u2652", c.y -= 1) : (b.innerHTML = "Defrost", O(b, { backgroundColor: "#4d90fe", backgroundImage: "-webkit-,-moz-,-ms-,-o-,,".split(",").join("linear-gradient(top,#4d90fe,#4787ed);"), filter: "progid:DXImageTransform.Microsoft.gradient(startColorStr='#4d90fe',EndColorStr='#4787ed')", border: "1px solid #3079ed", borderRadius: "2px", webkitBorderRadius: "2px", mozBorderRadius: "2px", color: "white", fontSize: "11px", fontWeight: "bold", textAlign: "center", position: "fixed", top: c.y + "px", left: c.x + "px", width: d + "px", height: a + "px", padding: "0 8px", zIndex: 1201, opacity: 0 }), 30 < a && O(b, { fontSize: "15px" })); return b }; U.prototype.qa = function (a) { var b = this.o = document.getElementById("gbqfb") || document.getElementById("sblsbb") || document.getElementsByName("btnG")[0]; if (this.o) { this.n = eb(this); this.D.M = this.n; Ua(this.D, function () { b.style.visibility = "hidden" }); var c = this.o.parentNode; if (c && "sbds" == c.id) c.style.width = c.offsetWidth + "px"; this.g.k(this.n, "click", a); document.body.insertBefore(this.n, document.body.firstChild) } }; U.prototype.detach = function () { if (this.o && this.n) this.n.parentNode.removeChild(this.n), this.n = m, this.o.style.visibility = "visible" }; U.prototype.pa = function () { if (this.o && this.n) { var a = N(this.o); this.n.style.top = a.y + "px"; this.n.style.left = a.x + "px" } }; var V = function (a, b) { this.i = b; this.g = a; this.U = this.V = this.a = m; this.va = {}; this.ua = {}; this.p = m; this.D = new P; this.m = m }, fb = function (a) { function b(a) { return d.charAt(a >> 6 & 63) + d.charAt(a & 63) } function c(a) { var c = 0; 0 > a && (c = 32, a = -a); return b(c | a & 31).charAt(1) } for (var d = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_", a = a.i.L, f = [], e = 0, h; h = a[e]; ++e) { var n = []; h = h.O; for (var j = m, t = 0, i; i = h[t]; t++) j && 32 > Math.abs(i[0] - j[0]) && 32 > Math.abs(i[1] - j[1]) ? (j = [i[0] - j[0], i[1] - j[1]], n.push(c(j[0]) + c(j[1]))) : n.push((0 == t ? "" : ";") + (b(i[0]) + b(i[1]))), j = i; f.push(n.join("")) } return "1;" + f.join("!") }, gb = function (a) { function b(a) { var b = String.fromCharCode(a); return "A" <= b && "Z" >= b ? a - 65 : "a" <= b && "z" >= b ? a - 97 + 26 : "0" <= b && "9" >= b ? a - 48 + 52 : "-" == b ? 62 : "_" == b ? 63 : m } function c(a, c) { var d = b(a.charCodeAt(c)), e = b(a.charCodeAt(c + 1)); return d === m || e === m ? m : d << 6 | e } function d(a, c) { var d = b(a.charCodeAt(c)); if (d === m) return m; d & 32 && (d = -(d & 31)); return d } var f = /&frostwriting=([A-Za-z0-9-_!;]+)/.exec(window.location.href); if (!f) return o; var f = f[1].split("!"), e = [], h = 0; 0 < f.length && "1;" == f[0].substr(0, 2) && (f[0] = f[0].substr(2), h = 1); for (var n = 0, j; j = f[n]; ++n) { for (var t = new S, i = m, l = 0; l < j.length;) { var s = o; if (";" == j.charAt(l)) { if (0 == h) return o; l++; s = k } if (0 == h || !i || s) { if (l + 3 >= j.length) return o; i = c(j, l); s = c(j, l + 2); if (i === m || s === m) return o; t.ba(i, s); i = [i, s]; l += 4 } else { if (l + 1 >= j.length) return o; var s = d(j, l), ja = d(j, l + 1); if (s === m || ja === m) return o; t.ba(i[0] + s, i[1] + ja); i = [i[0] + s, i[1] + ja]; l += 2 } } e.push(t) } cb(a.i, e); return k }, hb = function () { return -1 == window.location.hash.indexOf("&fp=") ? window.location.href : window.location.protocol + "//" + window.location.host + "/search?" + window.location.hash.substr(1).replace(/&fp=[0-9a-z]+/, "") }; V.prototype.wa = function (a) { a.stopPropagation(); a.preventDefault(); return o }; V.prototype.Fa = function (a) { var b = fb(this), c = hb() + "&frostwriting=" + b; if ("1;" == b) W(this, hb(), "Draw something on your window. #letitsnow"); else if (480 < c.length) { if (this.m !== m) clearTimeout(this.m), this.m = m; google.letitsnowGCO = v(this.Na, this); ib(c); this.m = setTimeout(v(function () { W(this, window.location.href, "My drawing is too complex to share, but you should try this out and have fun, anyway. #letitsnow"); this.m = m }, this), 5E3) } else W(this, c, "Check out what I drew. #letitsnow"); return this.wa(a) }; var W = function (a, b, c) { if (a.m !== m) clearTimeout(a.m), a.m = m; gbar.asmc(function () { return { items: [{ properties: { url: [b], name: ["Google - Let it snow!"], image: ["http://www.google.com/images/logos/google_logo_41.png"], description: [c] } }] } }); if (document.createEvent) { var d = document.createEvent("MouseEvents"); d.initEvent("click", k, o); a.V.dispatchEvent(d) } else a.V.fireEvent && a.V.fireEvent("onclick") }; V.prototype.qa = function () { this.a = document.getElementById("gbgs3"); this.U = document.getElementById("gbwc"); this.V = document.getElementById("gbg3"); if (this.a && this.U && this.V) { this.p = document.createElement("div"); O(this.p, { height: this.a.offsetHeight + "px", width: this.a.offsetWidth + "px" }); this.a.parentNode.insertBefore(this.p, this.a); var a = N(this.a); this.va = O(this.a, { font: "13px/27px Arial,sans-serif", left: a.x + "px", position: "fixed", top: a.y - this.a.offsetHeight + "px", zIndex: 1201 }); this.ua = O(this.U, { background: "#fff", zIndex: 1201 }); this.a.parentNode.removeChild(this.a); document.body.appendChild(this.a); "SPAN" == this.a.tagName ? this.a.style.lineHeight = "20px" : this.D.M = this.a; this.g.k(this.a, "click", v(this.Fa, this)); this.g.k(this.a, "mousedown", v(this.wa, this)) } }; V.prototype.detach = function () { if (this.a && this.U) { this.m !== m && clearTimeout(this.m); O(this.a, this.va); O(this.U, this.ua); this.D.restore(); for (var a = this.g, b = this.a, c = 0; c < a.H.length; ++c) { var d = a.H[c]; d && d[0] == b && (Wa.apply(m, d), a.H[c] = m) } this.a.parentNode.removeChild(this.a); this.p.parentNode.insertBefore(this.a, this.p); this.p.parentNode.removeChild(this.p); this.p = this.a = m } }; V.prototype.pa = function () { if (this.a && this.p) this.a.style.left = N(this.p).x + "px" }; V.prototype.Na = function (a) { a && "OK" == a.status && !a.error && a.id && (clearTimeout(this.m), W(this, a.id, "Check out what I drew. #letitsnow")) }; var ib = function (a) { var a = a.replace(window.location.host, "www.google.com"), b = document.createElement("script"); b.src = "//google-doodles.appspot.com/?callback=google.letitsnowGCO&url=" + encodeURIComponent(a); document.body.appendChild(b) }; var jb = Math.floor(60), kb = Math.floor(300), X = function () { this.J = this.i = this.K = this.h = this.A = m; this.Z = this.ia = o; this.g = this.F = m; this.ha = o; this.T = 0; this.ja = this.R = m; this.Da = window.opera || navigator.userAgent.match(/MSIE/) ? jb : kb }, Y = "goog.egg.snowyfog.Snowyfog".split("."), Z = q; !(Y[0] in Z) && Z.execScript && Z.execScript("var " + Y[0]); for (var $; Y.length && ($ = Y.shift());)!Y.length && X !== g ? Z[$] = X : Z = Z[$] ? Z[$] : Z[$] = {}; X.prototype.init = function () { var a = this, b = function () { document.getElementById("snfloader_script") && (!document.getElementById("foot") && !document.getElementById("bfoot") ? window.setTimeout(b, 50) : (google.rein && google.dstr && (google.rein.push(v(a.Aa, a)), google.dstr.push(v(a.Pa, a))), a.Aa())) }; b() }; X.prototype.init = X.prototype.init; X.prototype.Aa = function () { if (!google || !google.snowyfogInited) { google.snowyfogInited = k; var a = document.createElement("canvas"); document.body.insertBefore(a, document.body.firstChild); this.h = a; O(this.h, { pointerEvents: "none", position: "fixed", top: "0", left: "0", zIndex: 1200 }); this.h.width = window.innerWidth; this.h.height = window.innerHeight; this.T = 0; this.ha = this.Z = this.ia = o; this.g = new Va; this.A = document.createElement("div"); a = window.opera || navigator.userAgent.match(/MSIE/) ? 0 : 800; O(this.A, { pointerEvents: "none", position: "absolute", zIndex: a, width: document.body.clientWidth + "px", height: Math.max(window.innerHeight, document.body.clientHeight) + "px", overflow: "hidden" }); document.body.insertBefore(this.A, document.body.firstChild); this.K = new R(this.A); this.i = new Ya(this.g, this.h, 0, 0); this.F = new V(this.g, this.i);<|fim▁hole|> this.J = new U(this.g); a = v(this.K.Ca, this.K); this.g.k(window, "resize", v(this.N, this)); this.g.k(window, "deviceorientation", a); this.g.k(window, "MozOrientation", a); this.R = this.h.getContext("2d"); gb(this.F) && (ab(this.i, this.R), lb(this)); this.ja = v(this.Ba, this); window.setTimeout(this.ja, 50) } }; X.prototype.Pa = function () { this.ha = k; for (var a = this.g, b = 0; b < a.H.length; ++b) { var c = a.H[b]; c && (Wa.apply(m, c), a.H[b] = m) } this.J.detach(); this.F.detach(); if (this.h) this.h.parentNode.removeChild(this.h), this.h = m; if (this.A) this.A.parentNode.removeChild(this.A), this.A = m }; var lb = function (a) { if (!a.ia) a.ia = k, a.h.style.pointerEvents = "auto", Ta(), db(a.i), a.F.qa(), a.J.qa(v(function (a) { this.Z = o; this.i = m; this.h && this.h.parentNode.removeChild(this.h); this.h = m; this.J.detach(); this.F.detach(); a.stopPropagation() }, a)) }, mb = function (a) { a.K.s(); if (a.Z) { var b = a.i; if (!(580.8 > b.S) && !(b.$ || 88 < b.ga)) { for (var c = b.S = 0, d = 0; 66 > d; d++) for (var f = 0; 88 > f; f++) b.w[d][f] += b.G[d][f], 3.5 <= b.w[d][f] && c++; b.ga++; if (c >= 70.4 * 66) b.$ = k; else for (d = 0; 66 > d; d++) for (f = 0; 88 > f; f++) if (c = 4 - b.w[d][f], c > 4 * Math.random() && 0.7 < Math.random()) { var e = Math.min(1, 3 * $a(b, f, d)) * Math.random(); b.G[d][f] = c * e } else b.G[d][f] = 0 } a.i.s(a.R); a.J.D.s(); a.F.D.s() } }; X.prototype.Ba = function () { if (!this.ha) { window.setTimeout(this.ja, 50); this.T++; if (this.T == jb) this.Z = k; this.T == this.Da && lb(this); mb(this) } }; X.prototype.N = function () { this.K && this.K.N(document.body.offsetWidth, Math.max(document.body.offsetHeight, window.innerHeight)); this.i && (!navigator.userAgent.match(/iPad/) || this.T > jb) && this.i.N(window.innerWidth, window.innerHeight, this.R); lb(this); this.J.pa(); this.F.pa(); this.A && this.R && mb(this) }; X.prototype.resize = X.prototype.N; })();<|fim▁end|>
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use ::NailResult; use regex::Regex; use ssh::KeyExchangeInit; use std::fmt; pub enum CipherBlockSize { Eight, Sixteen, } impl From<u8> for CipherBlockSize { fn from(v: u8) -> CipherBlockSize { match v { 16 => CipherBlockSize::Sixteen, _ => CipherBlockSize::Eight, } } } impl From<CipherBlockSize> for u8 { fn from(cbs: CipherBlockSize) -> u8 { match cbs { CipherBlockSize::Eight => 8, CipherBlockSize::Sixteen => 16, } } } #[derive(PartialEq)] pub enum KeyExchangeAlgorithm { Curve25519Sha256, ExchangeSha1, ExchangeSha256, FourteenSha1, OneSha1, } use self::KeyExchangeAlgorithm::{Curve25519Sha256, ExchangeSha1, ExchangeSha256, FourteenSha1, OneSha1}; impl fmt::Display for KeyExchangeAlgorithm { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let kex_str = match *self { Curve25519Sha256 => "[email protected]", ExchangeSha1 => "diffie-hellman-group-exchange-sha1", ExchangeSha256 => "diffie-hellman-group-exchange-sha256", FourteenSha1 => "diffie-hellman-group14-sha1", OneSha1 => "diffie-hellman-group1-sha1", }; write!(f, "{}", kex_str) } } #[derive(PartialEq)] pub enum ServerHostKeyAlgorithm { PgpSignDss, PgpSignRsa, SshDss, SshEd25519, SshRsa, } use self::ServerHostKeyAlgorithm::{PgpSignDss, PgpSignRsa, SshDss, SshEd25519, SshRsa}; impl fmt::Display for ServerHostKeyAlgorithm { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let shk_str = match *self { PgpSignDss => "pgp-sign-dss", PgpSignRsa => "pgp-sign-rsa", SshDss => "ssh-dss", SshRsa => "ssh-rsa", SshEd25519 => "ssh-ed25519", }; write!(f, "{}", shk_str) } } #[derive(PartialEq)] pub enum EncryptionAlgorithm { TripleDesCbc, BlowfishCbc, Twofish256Cbc, Twofish192Cbc, Twofish128Cbc, Aes256Cbc, Aes192Cbc, Aes128Cbc, Serpent256Cbc, Serpent192Cbc, Serpent128Cbc, Arcfour, IdeaCbc, Cast128Cbc, NoEnc, } use self::EncryptionAlgorithm::{TripleDesCbc, BlowfishCbc, Twofish256Cbc, Twofish192Cbc, Twofish128Cbc, Aes256Cbc, Aes192Cbc, Aes128Cbc, Serpent256Cbc, Serpent192Cbc, Serpent128Cbc, Arcfour, IdeaCbc, Cast128Cbc, NoEnc}; impl fmt::Display for EncryptionAlgorithm { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let enc_str = match *self { TripleDesCbc => "3des-cbc", BlowfishCbc => "blowfish-cbc", Twofish256Cbc => "twofish256-cbc", Twofish192Cbc => "twofish192-cbc", Twofish128Cbc => "twofish128-cbc", Aes256Cbc => "aes256-cbc", Aes192Cbc => "aes192-cbc", Aes128Cbc => "aes128-cbc", Serpent256Cbc => "serpent256-cbc", Serpent192Cbc => "serpent192-cbc", Serpent128Cbc => "serpent128-cbc", Arcfour => "arcfour", IdeaCbc => "idea-cbc", Cast128Cbc => "cast128-cbc", NoEnc => "none", }; write!(f, "{}", enc_str) } } #[derive(PartialEq)] pub enum MacAlgorithm { HmacSha1, HmacSha1NinetySix, HmacMd5, HmacMd5NinetySix, NoMac, } use self::MacAlgorithm::{HmacSha1, HmacSha1NinetySix, HmacMd5, HmacMd5NinetySix, NoMac}; impl fmt::Display for MacAlgorithm { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let mac_str = match *self { HmacSha1 => "hmac-sha1", HmacSha1NinetySix => "hmac-sha1-96", HmacMd5 => "hmac-md5", HmacMd5NinetySix => "hmac-md5-96", NoMac => "none", }; write!(f, "{}", mac_str) } } #[derive(PartialEq)] pub enum CompressionAlgorithm { Xz, Zlib, NoComp, } use self::CompressionAlgorithm::{Xz, Zlib, NoComp}; impl fmt::Display for CompressionAlgorithm { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { let comp_str = match *self { Xz => "xz", Zlib => "zlib", NoComp => "none", }; write!(f, "{}", comp_str) } } pub const VERSION: Option<&'static str> = option_env!("CARGO_PKG_VERSION"); lazy_static! { pub static ref VER_EXCH_PROTO: Regex = Regex::new(r#"^SSH-2.0-([\x21-\x2C\x2E-\x7E]+)([ ][\x21-\x2C\x2E-\x7E]+)?\r\n$"#).unwrap(); } #[derive(Default)] pub struct VersionExchange { server_version: String, client_version: String, } impl VersionExchange { #[allow(dead_code)] pub fn client_version(&self) -> &String { &self.client_version } #[allow(dead_code)] pub fn server_version(&self) -> &String { &self.server_version } pub fn set_client_version(&mut self, client_version: String) -> &mut VersionExchange { self.client_version = client_version; self } pub fn set_server_version(&mut self, server_version: String) -> &mut VersionExchange { self.server_version = server_version; self } } impl fmt::Display for VersionExchange { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Server: {}Client: {}", self.server_version, self.client_version) } } #[derive(Default)] pub struct KeyExchange { version_exchange: VersionExchange, server_key_exchange_init: Option<KeyExchangeInit>, client_key_exchange_init: Option<KeyExchangeInit>,<|fim▁hole|> key_exchange_algorithm: Option<KeyExchangeAlgorithm>, server_host_key_algorithm: Option<ServerHostKeyAlgorithm>, s2c_encryption_algorithm: Option<EncryptionAlgorithm>, c2s_encryption_algorithm: Option<EncryptionAlgorithm>, s2c_mac_algorithm: Option<MacAlgorithm>, c2s_mac_algorithm: Option<MacAlgorithm>, s2c_compression_algorithm: Option<CompressionAlgorithm>, c2s_compression_algorithm: Option<CompressionAlgorithm>, } impl KeyExchange { pub fn new(version_exchange: VersionExchange) -> KeyExchange { KeyExchange { version_exchange: version_exchange, ..Default::default() } } pub fn setup(&mut self, ckex: KeyExchangeInit, skex: KeyExchangeInit) -> NailResult<&mut KeyExchange> { { let skexa = skex.kex_algorithms(); let ckexa = ckex.kex_algorithms(); } self.client_key_exchange_init = Some(ckex); self.server_key_exchange_init = Some(skex); Ok(self) } } #[cfg(test)] mod test { use super::VER_EXCH_PROTO; #[test] fn version_exchange_regex() { assert!(VER_EXCH_PROTO.is_match("SSH-2.0-testclient_1.0\r\n")); assert!(VER_EXCH_PROTO.is_match("SSH-2.0-nailed_0.0.1 Prototype\r\n")); assert!(VER_EXCH_PROTO.is_match("SSH-2.0-nailed_0.0.1 Prototype_@Cool\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-1.0-someold\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-No-dashes-allowed\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-nailed_0.0.1 or spaces\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-or\ttabs\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-nailed_1.0.0 Or-dashes-here\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-nailed_1.0.0 Or spaces\r\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-nailed_1.0.0")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-nailed_1.0.0\n")); assert!(!VER_EXCH_PROTO.is_match("SSH-2.0-nailed_1.0.0\r")); } }<|fim▁end|>
<|file_name|>docserver.py<|end_file_name|><|fim▁begin|>from __future__ import print_function import flask import os import threading import time import webbrowser from tornado.wsgi import WSGIContainer from tornado.httpserver import HTTPServer from tornado.ioloop import IOLoop _basedir = os.path.join("..", os.path.dirname(__file__)) <|fim▁hole|>PORT=5009 http_server = HTTPServer(WSGIContainer(app)) """this is a simple server to facilitate developing the docs. by serving up static files from this server, we avoid the need to use a symlink. """ @app.route('/') def welcome(): return """ <h1>Welcome to the Bokeh documentation server</h1> You probably want to go to <a href="/en/latest/index.html"> Index</a> """ @app.route('/en/latest/<path:filename>') def send_pic(filename): return flask.send_from_directory( os.path.join(_basedir,"sphinx/_build/html/"), filename) def open_browser(): # Child process time.sleep(0.5) webbrowser.open("http://localhost:%d/en/latest/index.html" % PORT, new="tab") def serve_http(): http_server.listen(PORT) IOLoop.instance().start() def shutdown_server(): ioloop = IOLoop.instance() ioloop.add_callback(ioloop.stop) print("Asked Server to shut down.") def ui(): time.sleep(0.5) input("Press <ENTER> to exit...\n") if __name__ == "__main__": print("\nStarting Bokeh plot server on port %d..." % PORT) print("Visit http://localhost:%d/en/latest/index.html to see plots\n" % PORT) t_server = threading.Thread(target=serve_http) t_server.start() t_browser = threading.Thread(target=open_browser) t_browser.start() ui() shutdown_server() t_server.join() t_browser.join() print("Server shut down.")<|fim▁end|>
app = flask.Flask(__name__, static_path="/unused")
<|file_name|>org-edit.js<|end_file_name|><|fim▁begin|>'use strict'; angular.module('BaubleApp') .controller('OrgEditCtrl', ['$scope', '$location', 'Alert', 'User', 'Organization', function ($scope, $location, Alert, User, Organization) {<|fim▁hole|> Organization.save(org) .success(function(data, status, headers, config) { $scope.user = User.local(); $scope.user.organization_id = data.id; User.local($scope.user); // TODO: we should probably return to where we came from $location.path('/'); }) .error(function(data, status, headers, config) { var defaultMessage = "Could not save organization."; Alert.onErrorResponse(data, defaultMessage); }) .finally(function() { $scope.working = false; }); }; }]);<|fim▁end|>
$scope.save = function(org){ $scope.working = true;
<|file_name|>saveRetrospectiveSurveyResponses.test.js<|end_file_name|><|fim▁begin|>/* eslint-env mocha */ /* global expect, testContext */ /* eslint-disable prefer-arrow-callback, no-unused-expressions */ import factory from 'src/test/factories' import {resetDB, runGraphQLMutation, useFixture} from 'src/test/helpers' import {Cycle} from 'src/server/services/dataService' import {COMPLETE, PRACTICE} from 'src/common/models/cycle' import fields from '../index' describe(testContext(__filename), function () { useFixture.buildOneQuestionSurvey() useFixture.buildSurvey() beforeEach(resetDB) beforeEach(async function () { await this.buildSurvey() this.user = await factory.build('user', {id: this.project.memberIds[0]}) this.respondentId = this.project.memberIds[0] this.invokeAPI = function () { const responses = this.project.memberIds.slice(1).map(memberId => ({ values: [{subjectId: memberId, value: 'foo'}], questionId: this.surveyQuestion.id, surveyId: this.survey.id, respondentId: this.respondentId, })) return runGraphQLMutation( `mutation($responses: [SurveyResponseInput]!) { saveRetrospectiveSurveyResponses(responses: $responses) { createdIds } }`, fields, {responses}, {currentUser: this.user}, ) } }) it('returns new response ids for all responses created in REFLECTION state', function () { return this.invokeAPI() .then(result => result.data.saveRetrospectiveSurveyResponses.createdIds) .then(createdIds => expect(createdIds).have.length(this.project.memberIds.length - 1)) }) <|fim▁hole|> .then(result => result.data.saveRetrospectiveSurveyResponses.createdIds) .then(createdIds => expect(createdIds).have.length(this.project.memberIds.length - 1)) }) it('returns an error when in PRACTICE state', async function () { await Cycle.get(this.cycleId).updateWithTimestamp({state: PRACTICE}) return expect(this.invokeAPI()) .to.be.rejectedWith(/cycle is in the PRACTICE state/) }) })<|fim▁end|>
it('returns new response ids for all responses created in COMPLETE state', async function () { await Cycle.get(this.cycleId).updateWithTimestamp({state: COMPLETE}) return this.invokeAPI()
<|file_name|>iconhead_plugin.js<|end_file_name|><|fim▁begin|>var iconhead={ title:"Icon Heading Shortcode", id :'oscitas-form-iconhead', pluginName: 'iconhead', setRowColors:false }; (function() { _create_tinyMCE_options(iconhead,800); })(); function create_oscitas_iconhead(pluginObj){ if(jQuery(pluginObj.hashId).length){ jQuery(pluginObj.hashId).remove(); } // creates a form to be displayed everytime the button is clicked // you should achieve this using AJAX instead of direct html code like this var iconhead_fa=''; /*if(ebs.ebs_fa_inclusion==1){ iconhead_fa='<h4>Font Awesome</h4><ul name="oscitas-heading-icon_servicebox" class="oscitas-heading-icon">'+ebsfaicons+'</ul>'; }*/ iconhead_fa='<h4>Font Awesome</h4><ul name="oscitas-heading-icon_servicebox" class="oscitas-heading-icon">'+ebsfaicons+'</ul>'; // creates a form to be displayed everytime the button is clicked // you should achieve this using AJAX instead of direct html code like this var form = jQuery('<div id="'+pluginObj.id+'" class="oscitas-container" title="'+pluginObj.title+'"><table id="gallery-table" class="form-table">\ <tr>\ <th><label for="oscitas-heading-icon">Select Icon:</label></th>\ <td><div id="click_icon_list" class="oscitas-icon-div"><span id="osc_show_icon"></span><span class="show-drop"></span></div><input type="hidden" id="oscitas-iconhead-icon" value=""><input type="hidden" id="oscitas-iconhead-icontype" value="">\ <div id="osc_show_iconlist" class="oscitas-icon" style="display:none;width:100%"><h4>Glyphicons</h4><ul name="oscitas-heading-icon_servicebox" class="oscitas-heading-icon">'+ebsicons+'</ul>'+iconhead_fa+'</div>\ </td>\ </tr>\ <tr>\ <th><label for="oscitas-iconhead-iconcolor">Icon Color:</label></th>\ <td><input type="text" name="label" id="oscitas-iconhead-iconcolor" class="color" value="" /><br />\ </td>\ </tr>\ <tr>\ <th><label for="oscitas-iconhead-headingtype">Heading Type:</label></th>\ <td><select name="oscitas-iconhead-headingtype" id="oscitas-iconhead-headingtype">\ <option value="h1">H1</option>\ <option value="h2">H2</option>\ <option value="h3">H3</option>\ <option value="h4">H4</option>\ <option value="h5">H5</option>\ <option value="h6">H6</option>\ </select><br />\ </td>\ </tr>\<|fim▁hole|> <td><input type="text" name="oscitas-iconhead-heading" id="oscitas-iconhead-heading" value="Heading"/><br />\ </td>\ </tr>\ <tr>\ <th><label for="oscitas-iconhead-class">Custom Class:</label></th>\ <td><input type="text" name="line" id="oscitas-iconhead-class" value=""/><br />\ </td>\ </tr>\ </table>\ <p class="submit">\ <input type="button" id="oscitas-iconhead-submit" class="button-primary" value="Insert Icon Heading" name="submit" />\ </p>\ </div>'); var table = form.find('table'); jQuery('.glyphicon').css('display','inline'); form.appendTo('body').hide(); form.find('.color').wpColorPicker(); table.find('#click_icon_list').click(function(){ if(!jQuery(this).hasClass('osc_icon_showing')){ jQuery(this).addClass('osc_icon_showing') table.find('#osc_show_iconlist').show(); } else{ jQuery(this).removeClass('osc_icon_showing') table.find('#osc_show_iconlist').hide(); } }); table.find('.oscitas-heading-icon li').click(function(){ var val=jQuery(this).attr('data-value'); var type=jQuery(this).attr('type'); table.find('.oscitas-heading-icon li').removeClass('osc_icon_selected'); jQuery(this).addClass('osc_icon_selected'); table.find('#click_icon_list').removeClass('osc_icon_showing'); table.find('#osc_show_iconlist').hide(); table.find('#osc_show_icon').removeClass().addClass(type).addClass(val); table.find('#oscitas-iconhead-icon').val(val); table.find('#oscitas-iconhead-icontype').val(type); }); // // handles the click event of the submit button form.find('#oscitas-iconhead-submit').click(function() { // defines the options and their default values // again, this is not the most elegant way to do this // but well, this gets the job done nonetheless var type=jQuery('#oscitas-iconhead-headingtype').val(); var cusclass='',style=''; if(table.find('#oscitas-iconhead-icon').val()!=''){ style=' style="' + table.find('#oscitas-iconhead-icon').val()+'"' ; } if(table.find('#oscitas-iconhead-icontype').val()!=''){ style+=' icontype="' + table.find('#oscitas-iconhead-icontype').val()+'"' ; } if(table.find('#oscitas-iconhead-iconcolor').val()!=''){ cusclass+= ' color="'+table.find('#oscitas-iconhead-iconcolor').val()+'"'; } if(table.find('#oscitas-iconhead-class').val()!=''){ cusclass+= ' class="'+table.find('#oscitas-iconhead-class').val()+'"'; } var shortcode = '[iconheading type="'+type+'"'; shortcode += style+cusclass ; shortcode += ']'+table.find('#oscitas-iconhead-heading').val()+'[/iconheading]' ; // inserts the shortcode into the active editor tinyMCE.activeEditor.execCommand('mceInsertContent',0 , shortcode); // closes Dialoguebox close_dialogue(pluginObj.hashId); }); }<|fim▁end|>
<tr>\ <th><label for="oscitas-iconhead-heading">Heading:</label></th>\
<|file_name|>access_q_fail.cpp<|end_file_name|><|fim▁begin|>//Copyright (c) 2008-2010 Emil Dotchevski and Reverge Studios, Inc. //Distributed under the Boost Software License, Version 1.0. (See accompanying //file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt) #include <boost/qvm/q_access.hpp> <|fim▁hole|> namespace boost { namespace qvm { template <> struct q_traits<my_quat> { typedef int scalar_type; template <int I> static int r( my_quat const & ); template <int I> static int & w( my_quat & ); }; } } int main() { using namespace boost::qvm; my_quat const q=my_quat(); q%V%A<3>(); return 1; }<|fim▁end|>
#include <boost/qvm/v_access.hpp> struct my_quat { };
<|file_name|>839_similar-string-groups.py<|end_file_name|><|fim▁begin|>import collections class Solution: def numSimilarGroups(self, A): UF = {} for i in range(len(A)): UF[i] = i def find(x): if x != UF[x]: UF[x] = find(UF[x]) return UF[x] def union(x, y): UF.setdefault(x, x) UF.setdefault(y, y) UF[find(x)] = find(y) def match(s1, s2): i = 0 j = -1<|fim▁hole|> i += 1 return s1[i+1:] == s2[i+1:] N, W = len(A), len(A[0]) if N < W*W: for i in range(len(A)): UF[i] = i for i in range(len(A)): for j in range(i+1, len(A)): if match(A[i], A[j]): union(i, j) else: d = collections.defaultdict(set) for idx, w in enumerate(A): lw = list(w) for i in range(W): for j in range(i+1, W): lw[i], lw[j] = lw[j], lw[i] d["".join(lw)].add(idx) lw[i], lw[j] = lw[j], lw[i] for i, w in enumerate(A): for j in d[w]: union(i, j) return len({find(x) for x in UF}) print(Solution().numSimilarGroups(["tars","rats","arts","star"]))<|fim▁end|>
while i<len(s1): if s1[i] != s2[i]: if j == -1: j = i else: break
<|file_name|>main.cpp<|end_file_name|><|fim▁begin|>#include "version.h" #include <boost/filesystem/operations.hpp> #include <iostream> #define PCRE2_CODE_UNIT_WIDTH 8 #include <pcre2.h> #include "file.h" #include "options.h" #include "parser.h" using namespace boost::filesystem; using namespace std; int errorcode; PCRE2_SIZE erroffset; inline unsigned int match(Parser* parser, Options* options) { static const string color_filename {"\x1B[34;1m\x1B[K"}; // 32=blue, 1=bold static const string color_match {"\x1B[31;1m\x1B[K"}; // 31=red, 1=bold static const string color_lineno {"\x1B[32;1m\x1B[K"}; // 33=green, 1=bold static const string color_normal {"\x1B[0m\x1B[K"}; // Reset/normal (all attributes off). PCRE2_SIZE *ovector; auto tmp = strndupa(parser->recordBegin(), parser->recordLength()); auto printLine = false; for(auto const& linePattern: options->linePatterns()) {<|fim▁hole|> while ((strlen(tmp) > 0) && ((rc = pcre2_jit_match(linePattern, reinterpret_cast<PCRE2_SPTR>(tmp), strlen(tmp), 0, 0, match_data, nullptr)) > 0)) { if (!printLine && options->fileName()) { cerr << color_filename; cout << parser->fileName() << ":"; cerr << color_normal; } if (!printLine && options->lineNumber()) { cerr << color_lineno; cout << parser->recordNumber() << ":"; cerr << color_normal; } printLine = true; ovector = pcre2_get_ovector_pointer(match_data); for (int i = 0; i < rc; i++) { cout << string(tmp, ovector[2 * i]); cerr << color_match; cout << string(tmp + ovector[2 * i], ovector[2 * i + 1] - ovector[2 * i]); cerr << color_normal; tmp = tmp + ovector[2 * i + 1]; } } pcre2_match_data_free(match_data); } if (printLine) { cout << tmp << endl; return 1; } return 0; } int main(int argc, const char **argv) { Options options; try { options.run(argc, argv); } catch (exception &e) { cerr << e.what() << endl; return 2; } if (options.version()) { cout << "techlog "<< VERSION_MAJOR << "." << VERSION_MINOR << endl; cout << LICENSE << endl; cout << WRITTENBY << endl; return 1; } if (options.linePatterns().empty() && options.propertyPatterns().empty() && options.events().empty()) { cout << "usage: techlog [-ifl] [-s num] [-p pattern] [--event] [--property=pattern] [pattern]" << endl; if (!options.help() && !options.helpEvents()) { return 1; } } if (options.help()) { cout << options.visibleOptions() << endl; return 1; } if (options.helpEvents()) { cout << options.eventOptions() << endl; return 1; } bool isRegularFile; long unsigned linesSelected = 0; boost::system::error_code ec; pcre2_code *fileNamePattern = pcre2_compile(reinterpret_cast<PCRE2_SPTR>("^\\d{8}\\.log$"), PCRE2_ZERO_TERMINATED, PCRE2_CASELESS, &errorcode, &erroffset, nullptr); pcre2_jit_compile(fileNamePattern, PCRE2_JIT_COMPLETE); pcre2_match_data *fileNameMatchData = pcre2_match_data_create_from_pattern(fileNamePattern, nullptr); for (recursive_directory_iterator it("./"); it != recursive_directory_iterator();) { try { isRegularFile = is_regular_file(it->path()); } catch (const filesystem_error& ex) { isRegularFile = false; } if (isRegularFile && (pcre2_jit_match(fileNamePattern, reinterpret_cast<PCRE2_SPTR>(it->path().filename().c_str()), it->path().filename().size(), 0, 0, fileNameMatchData, nullptr) > 0)) { File file(it->path().c_str()); Parser parser(&file); while (parser.next()) { linesSelected += match(&parser, &options); if (options.stopAfter() > 0 && linesSelected == options.stopAfter()) { break; } } if (options.stopAfter() > 0 && linesSelected == options.stopAfter()) { break; } } it.increment(ec); } pcre2_match_data_free(fileNameMatchData); pcre2_code_free(fileNamePattern); return linesSelected > 0 ? 0 : 1; }<|fim▁end|>
pcre2_match_data *match_data = pcre2_match_data_create_from_pattern(linePattern, nullptr); auto rc = 0;
<|file_name|>individual.py<|end_file_name|><|fim▁begin|>import logging from ..report.individual import IndividualReport <|fim▁hole|> def __init__(self, test): self.test = test async def generate(self, parent): test_group = None try: test_group = self.test(parent.filename) except OSError as e: parent.report.valid = False parent.report.reports.append(IndividualReport("FileValid", 0, {'error': str(e)})) return for test in test_group._tests_: self.logger.info("Starting Test: {}".format(test)) try: result, status = getattr(test_group, test)() parent.report.reports.append(IndividualReport(test, status, result)) # TODO: Figure out what to do next except Exception as e: self.logger.warning("failed test") parent.report.valid = False parent.report.reports.append(IndividualReport(test, 0, {'error': str(e)}))<|fim▁end|>
class IndividualGenerator(object): logger = logging.getLogger("ddvt.rep_gen.ind")
<|file_name|>transparent.py<|end_file_name|><|fim▁begin|>#!/usr/local/bin/python3 # This work is licensed under the Creative Commons Attribution 3.0 United # States License. To view a copy of this license, visit # http://creativecommons.org/licenses/by/3.0/us/ or send a letter to Creative # Commons, 171 Second Street, Suite 300, San Francisco, California, 94105, USA. # from http://oranlooney.com/make-css-sprites-python-image-library/ # Orignial Author Oran Looney <[email protected]> #mods by Josh Gourneau <[email protected]> to make one big horizontal sprite JPG with no spaces between images import os<|fim▁hole|>from PIL import Image import glob start_dir = "images/full_sprites/opaque/kanto/" end_dir = "images/full_sprites/transparent/kanto/" #get your images using glob iconmap = os.listdir(start_dir) #iconMap = sorted(iconMap) print(len(iconmap)) for filename in iconmap: image = Image.open(start_dir+filename) image_width, image_height = image.size print( "the image will by %d by %d" % (image_width, image_height)) print( "creating image...") master = Image.new( mode='RGBA', size=(image_width, image_height), color=(0,0,0,0)) # fully transparent master.paste(image,(0,0)) data = master.getdata() newdata = [] for item in data: if item[0] == 255 and item[1] == 255 and item[2] == 255: newdata.append((255,255,255,0)) else: newdata.append(item) master.putdata(newdata) print( "saving master.jpg...") master.save(end_dir+filename) print( "saved!")<|fim▁end|>
<|file_name|>discovery_tests.py<|end_file_name|><|fim▁begin|>from proboscis import test @test(groups=['benchmark.discovery']) class BenchmarkDiscoveryTests(object): def __init__(self):<|fim▁hole|><|fim▁end|>
pass
<|file_name|>Rgaa30Rule120401.java<|end_file_name|><|fim▁begin|>/* * Asqatasun - Automated webpage assessment * Copyright (C) 2008-2019 Asqatasun.org * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. * * Contact us by mail: asqatasun AT asqatasun DOT org */ package org.asqatasun.rules.rgaa30;<|fim▁hole|> /** * Implementation of the rule 12.4.1 of the referential Rgaa 3.0. * * For more details about the implementation, refer to <a href="http://doc.asqatasun.org/en/90_Rules/rgaa3.0/12.Navigation/Rule-12-4-1.html">the rule 12.4.1 design page.</a> * @see <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#test-12-4-1"> 12.4.1 rule specification</a> */ public class Rgaa30Rule120401 extends AbstractNotTestedRuleImplementation { /** * Default constructor */ public Rgaa30Rule120401 () { super(); } }<|fim▁end|>
import org.asqatasun.ruleimplementation.AbstractNotTestedRuleImplementation;
<|file_name|>testMixin.js<|end_file_name|><|fim▁begin|>'use strict'; function A() { this._va = 0; console.log('A'); } A.prototype = { va: 1, fa: function() { console.log('A->fa()'); } }; function B() { this._vb = 0; console.log('B'); } B.prototype = { vb: 1, fb: function() { console.log('B->fb()'); } }; function C() { this._vc = 0; console.log('C');<|fim▁hole|> fc: function() { console.log('C->fc()'); } }; function D(){ this._vd = 0; console.log('D'); } D.prototype = { vd: 1, fd: function() { this.fa(); this.fb(); this.fc(); console.log('D->fd()'); } }; var mixin = require('../mixin'); D = mixin(D, A); D = mixin(D, B); D = mixin(D, C); var d = new D(); console.log(d); console.log(d.constructor.name); d.fd(); var a = new A(); console.log(a); console.log(a.__proto__); console.log(a.va);<|fim▁end|>
} C.prototype = { vc: 1,
<|file_name|>helpers.js<|end_file_name|><|fim▁begin|>'use strict'; // Load modules const i18n = require('../services/i18n'); const fs = require('fs'); var dateFormat = require('dateformat'); const Remarkable = require('remarkable'); const path = require('path'); const logger = require('../services/logger'); const config = require('../config/config'); const _ = require('lodash'); <|fim▁hole|> exports.toId = (input) => { return input.replace(/ /g, ''); }; exports.clientConfig = () => { return JSON.stringify({ baseUrl: process.env.REACT_BASEURL || config.react.baseUrl, clientSecret: process.env.REACT_CLIENT_SECRET || config.react.clientSecret, clientId: process.env.REACT_CLIENT_ID || config.react.clientId }); }; exports.cloudinaryConfig = () => JSON.stringify({ cloud_name: config.cloudinary.cloud_name, api_key: config.cloudinary.api_key }); exports.transform = function (parameters, url) { if (!url) { return; } let newUrl; if (!url.includes('cloudinary')) { // cannot transform images that are not from cloudinary var myRegexp = /w_(\d+)/g; var match = myRegexp.exec(parameters); newUrl = 'https://images.break-out.org/' + match[1] + 'x,q80/' + url; return newUrl; } else { newUrl = url.replace(/image\/upload\/.*\//, `image/upload/${parameters}/`); return newUrl; } }; exports.transformVideo = function (parameters, url) { if (!url) { return; } let newUrl; if (!url.includes('cloudinary')) { // cannot transform images that are not from cloudinary return url; } else { newUrl = url.replace(/video\/upload\/.*\//, `video/upload/${parameters}/`); return newUrl; } }; exports.stringify = (obj) => { if (!obj) { return false; } return JSON.stringify(obj); }; exports.thumbnail = (videoUrl, ctx) => { try { if (videoUrl.includes('breakoutmedia.blob.core.windows.net')) { // this video is served from our old azure blob storage where // we can't just change the extension to get a thumbnail // Instead we do nothing and have a black "thumbnail" return ''; } // replace the ending of the video with .png. This will use cloudinary // to automatically generate a thumbnail based on the video url for us return videoUrl.substr(0, videoUrl.lastIndexOf('.')) + '.png'; } catch (err) { logger.error(`Error parsing thumbnail url for url '${videoUrl}'`); } }; function changeExtension(videoUrl, newExtension) { try { if (videoUrl.includes('breakoutmedia.blob.core.windows.net')) { // this video is served from our old azure blob storage where return videoUrl; } // replace the ending of the video with .png. This will use cloudinary // to automatically generate a thumbnail based on the video url for us return videoUrl.substr(0, videoUrl.lastIndexOf('.')) + '.' + newExtension; } catch (err) { logger.error(`Error changing extension to ${newExtension} for url '${videoUrl}'`); } } exports.changeExtension = changeExtension; exports.transformVideoAndExtension = (format, extension, videoUrl, ctx) => { let newUrl = changeExtension(videoUrl, extension); return exports.transformVideo(format, newUrl, ctx); }; /** * Concatenates first and second. * @param first * @param second */ exports.concat = (first, second) => first + second; /** * Returns true if v1 == v2. * @param v1 * @param v2 * @param options * @returns {*} */ exports.ifCond = function (v1, v2, options) { if (v1 === v2) { return options.fn(this); } return options.inverse(this); }; exports.weakIfCond = function (v1, v2, options) { if (v1 == v2) { return options.fn(this); } return options.inverse(this); }; exports.fixed = function (v1, options) { let number = new Number(options.fn(this)); return number.toFixed(v1); }; exports.isEven = function (context) { if ((context.data.index % 2) === 0) { return context.fn(this); } else { return context.inverse(this); } }; exports.isOdd = function (context) { if ((context.data.index % 2) !== 0) { return context.fn(this); } else { return context.inverse(this); } }; exports.isLast = function (context) { if (context.data.last) { return context.fn(this); } else { return context.inverse(this); } }; /** * Render markdown from content/mdFileName to html * @param mdFileName * @returns rendered html */ exports.markdown = function renderMarkdown(mdFileName, context) { const rawMd = loadFileContent(mdFileName); const html = md.render(rawMd); return html; }; exports.date = function makeDate(timestamp, context) { return new Date(timestamp); }; exports.beautifuldate = function makeDate(timestamp, context) { if (timestamp == null) { return 'No Date Available'; } let date = new Date(timestamp); let beautifuldate = dateFormat(date, 'dS mmmm, h:MM TT'); return beautifuldate; }; exports.md = function renderMarkdown(rawMd, context) { const html = md.render(rawMd); return html; }; exports.contentfulImage = function (imageObject, clazz, id, context) { clazz = (clazz != null) ? clazz : ''; id = (id != null) ? id : ''; const url = imageObject.fields.file.url; const alt = (imageObject.fields.description != null) ? imageObject.fields.description : ''; return `<img src="${url}" alt="${alt}" class="${clazz}" id="${id}"/>`; }; function loadFileContent(mdFileName) { const path = getFilepath(mdFileName); return fs.readFileSync(path, 'utf-8'); } function getFilepath(mdFileName) { const contentFolderPath = path.resolve('content/'); return `${contentFolderPath}/${mdFileName}.md`; } /** * Tries to find the matching translation for the language the browser sent us. * @param text * @param options * @private */ exports.__ = (text, options) => { if (!options.data.root.language) { throw 'You did not pass the language to handlebars!'; } const view = options.data.exphbs.view; let viewArr = []; if (view) { if (view.indexOf('\\') > -1) { viewArr = view.split('\\'); } else { viewArr = view.split('/'); } } if (text.indexOf('.') > -1) { viewArr = text.split('.'); text = viewArr.pop(); } else if (!view) { logger.error(`Could not parse view in ${options.data.exphbs}`); } return i18n.translate(viewArr[viewArr.length - 1].toUpperCase(), text.toUpperCase(), options.data.root.language); }; exports.ifOr = function (v1, v2, options) { if (v1 || v2) { return options.fn(this); } return options.inverse(this); }; /* eslint-disable no-console */ exports.debug = function (optionalValue) { console.log('Current Context'); console.log('===================='); console.log(this); if (optionalValue) { console.log('Value'); console.log('===================='); console.log(optionalValue); } }; /*eslint-enable no-console */ exports.json = function (context) { return JSON.stringify(context); }; exports.getAtIndex = function (array, index) { return array[index]; }; /** * Calculate a 32 bit FNV-1a hash * Found here: https://gist.github.com/vaiorabbit/5657561 * Ref.: http://isthe.com/chongo/tech/comp/fnv/ * * @param {string} str the input value * @param {boolean} [asString=false] set to true to return the hash value as * 8-digit hex string instead of an integer * @param {integer} [seed] optionally pass the hash of the previous chunk * @returns {integer | string} */ exports.hash = (str, seed) => { /*jshint bitwise:false */ var i, l, hval = (seed === undefined) ? 0x811c9dc5 : seed; for (i = 0, l = str.length; i < l; i++) { hval ^= str.charCodeAt(i); hval += (hval << 1) + (hval << 4) + (hval << 7) + (hval << 8) + (hval << 24); } // Convert to 8 digit hex string return ('0000000' + (hval >>> 0).toString(16)).substr(-8); }; exports.relativeTime = function (timestamp) { function leftPad(zahlen) { let string = '00' + zahlen; return string.substring(string.length - 2); } const MONTHS = ['Januar', 'Februar', 'März', 'April', 'Mai', 'Juni', 'Juli', 'August', 'September', 'Oktober', 'November', 'Dezember']; let dO = new Date((timestamp + (60 * 60 * 2)) * 1000); // TODO: Hack for timezone! Fix this in 2018 let now = new Date(Date.now() + 60 * 60 * 2 * 1000); let difference = now - dO.getTime(); if (difference < 60 * 1000) { return 'Gerade eben'; } else if (difference < 60 * 60 * 1000) { return `vor ${Math.floor(difference / 60 / 1000)} Minuten`; } else if (difference < 60 * 60 * 24 * 1000) { return `vor ${Math.floor(difference / 60 / 60 / 1000)} Stunden`; } else { return `am ${dO.getDate()}. ${MONTHS[dO.getMonth()]} um ${dO.getHours()}:${leftPad(dO.getMinutes())}`; } }; exports.length = function (array) { return array.length; }; exports.strColor = (str) => { if (!str) { return 'gray'; } var hash = 0; for (let i = 0; i < str.length; i++) { hash = str.charCodeAt(i) + ((hash << 5) - hash); } var colour = '#'; for (let i = 0; i < 3; i++) { var value = (hash >> (i * 8)) & 0xFF; colour += ('00' + value.toString(16)).substr(-2); } return colour; }; exports.round = (amount) => { return Math.round(parseFloat(amount)).toString().replace(/\B(?=(\d{3})+(?!\d))/g, '.'); }; exports.roundWithoutFormat = (amount) => { return Math.round(parseFloat(amount)); }; exports.addOne = (amount) => { return (parseFloat(amount) + 1); }; exports.prettyLocation = (location) => { //Check if it exists. if (!location) return ''; var locString = ''; //Check for best Level if (location.hasOwnProperty('LOCALITY')) { locString = location.LOCALITY; } else if (location.hasOwnProperty('ADMINISTRATIVE_AREA_LEVEL_3')) { locString = location.ADMINISTRATIVE_AREA_LEVEL_3; } else if (location.hasOwnProperty('ADMINISTRATIVE_AREA_LEVEL_2')) { locString = location.ADMINISTRATIVE_AREA_LEVEL_2; } else if (location.hasOwnProperty('ADMINISTRATIVE_AREA_LEVEL_1')) { locString = location.ADMINISTRATIVE_AREA_LEVEL_1; } if (location.hasOwnProperty('COUNTRY')) { if (locString !== '') { locString += ', '; } locString += location.COUNTRY; } if (locString !== '') { locString = ' in ' + locString; } return locString; }; exports.challengeHasProof = (status) => { return status === 'WITH_PROOF'; }; exports.isOlderTenMinutes = (date, context) => { if ((new Date(date * 1000 + 10 * 60 * 1000).getTime() < new Date().getTime())) { return context.fn(this); } else { return context.inverse(this); } }; exports.isNewerTenMinutes = (date, context) => { if ((new Date(date * 1000 + 10 * 60 * 1000).getTime() > new Date().getTime())) { return context.fn(this); } else { return context.inverse(this); } }; exports.displayCurrency = (amount, context) => Number(amount).toFixed(2); /** * This handlebars helper will render a block in the handlerbars file if the config value is true * * Example: * * {{#isEnabled 'printHeadline.enabled'}} <h1>I am a headline </h1> {{/isEnabled}} * * @param key The config key to look up * @param context Handlebars context * @returns {string} Either the block to be rendered or an empty string */ exports.isEnabled = function (key, context) { const value = _.get(config, key); if (value === true) { return context.fn(this); } else if (value === false) { return ''; } else { throw new Error(`Found value '${value}' for config key '${key}'. Expected boolean value`); } }; /** * Returns the value for a key from the config file * * Example: {{config 'backend.accessToken'}} will render the accessToken into the handlebars file * * @param key The config key to look up * @param context Handlebars context * @returns {*} The value found for the given key */ exports.config = function (key, context) { const value = _.get(config, key); if (value) { return value; } else { throw new Error(`Couldn't find value for key '${key}' in configuration`); } };<|fim▁end|>
// Setup const md = new Remarkable({ html: true });
<|file_name|>BaseBlockNode.js<|end_file_name|><|fim▁begin|>import BaseStatementNode from './BaseStatementNode'; import * as symbols from '../symbols'; export default class BaseBlockNode extends BaseStatementNode { packBlock(bitstr, propName) { var prop = this[propName] if (!prop) { bitstr.writebits(0, 1);<|fim▁hole|> } bitstr.writebits(1, 1); bitstr.writebits(prop.length, 32); prop.forEach(p => p.pack(bitstr)); } [symbols.FMAKEHLIRBLOCK](builder, arr, expectedType) { return arr.map(a => a[symbols.FMAKEHLIR](builder, expectedType)) .map(a => Array.isArray(a) ? a : [a]) .reduce((a, b) => a.concat(b), []); } };<|fim▁end|>
return;
<|file_name|>test_rfxtrx.py<|end_file_name|><|fim▁begin|>"""Th tests for the Rfxtrx component.""" # pylint: disable=too-many-public-methods,protected-access import unittest import time from homeassistant.bootstrap import _setup_component from homeassistant.components import rfxtrx as rfxtrx from tests.common import get_test_home_assistant class TestRFXTRX(unittest.TestCase): """Test the Rfxtrx component.""" def setUp(self): """Setup things to be run when tests are started.""" self.hass = get_test_home_assistant(0) def tearDown(self): """Stop everything that was started.""" rfxtrx.RECEIVED_EVT_SUBSCRIBERS = [] rfxtrx.RFX_DEVICES = {} if rfxtrx.RFXOBJECT: rfxtrx.RFXOBJECT.close_connection() self.hass.stop() def test_default_config(self): """Test configuration.""" self.assertTrue(_setup_component(self.hass, 'rfxtrx', { 'rfxtrx': { 'device': '/dev/serial/by-id/usb' + '-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0', 'dummy': True} })) self.assertTrue(_setup_component(self.hass, 'sensor', { 'sensor': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {}}})) <|fim▁hole|> time.sleep(0.1) self.assertEqual(len(rfxtrx.RFXOBJECT.sensors()), 1) def test_valid_config(self): """Test configuration.""" self.assertTrue(_setup_component(self.hass, 'rfxtrx', { 'rfxtrx': { 'device': '/dev/serial/by-id/usb' + '-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0', 'dummy': True}})) self.assertTrue(_setup_component(self.hass, 'rfxtrx', { 'rfxtrx': { 'device': '/dev/serial/by-id/usb' + '-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0', 'dummy': True, 'debug': True}})) def test_invalid_config(self): """Test configuration.""" self.assertFalse(_setup_component(self.hass, 'rfxtrx', { 'rfxtrx': {} })) self.assertFalse(_setup_component(self.hass, 'rfxtrx', { 'rfxtrx': { 'device': '/dev/serial/by-id/usb' + '-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0', 'invalid_key': True}})) def test_fire_event(self): """Test fire event.""" self.assertTrue(_setup_component(self.hass, 'rfxtrx', { 'rfxtrx': { 'device': '/dev/serial/by-id/usb' + '-RFXCOM_RFXtrx433_A1Y0NJGR-if00-port0', 'dummy': True} })) self.assertTrue(_setup_component(self.hass, 'switch', { 'switch': {'platform': 'rfxtrx', 'automatic_add': True, 'devices': {'0b1100cd0213c7f210010f51': { 'name': 'Test', rfxtrx.ATTR_FIREEVENT: True} }}})) calls = [] def record_event(event): """Add recorded event to set.""" calls.append(event) self.hass.bus.listen(rfxtrx.EVENT_BUTTON_PRESSED, record_event) entity = rfxtrx.RFX_DEVICES['213c7f216'] self.assertEqual('Test', entity.name) self.assertEqual('off', entity.state) self.assertTrue(entity.should_fire_event) event = rfxtrx.get_rfx_object('0b1100cd0213c7f210010f51') event.data = bytearray([0x0b, 0x11, 0x00, 0x10, 0x01, 0x18, 0xcd, 0xea, 0x01, 0x01, 0x0f, 0x70]) rfxtrx.RECEIVED_EVT_SUBSCRIBERS[0](event) self.hass.pool.block_till_done() self.assertEqual(event.values['Command'], "On") self.assertEqual('on', entity.state) self.assertEqual(1, len(rfxtrx.RFX_DEVICES)) self.assertEqual(1, len(calls)) self.assertEqual(calls[0].data, {'entity_id': 'switch.test', 'state': 'on'})<|fim▁end|>
while len(rfxtrx.RFX_DEVICES) < 1:
<|file_name|>SingleUrlResponseCache.java<|end_file_name|><|fim▁begin|>package org.fakekoji.xmlrpc.server.expensiveobjectscache; import org.fakekoji.xmlrpc.server.xmlrpcrequestparams.XmlRpcRequestParams; import java.io.BufferedWriter; import java.io.IOException; import java.lang.reflect.Array; import java.lang.reflect.InvocationTargetException; import java.net.URL; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; import edu.umd.cs.findbugs.annotations.*; public class SingleUrlResponseCache { private final URL id; private final Map<XmlRpcRequestParams, ResultWithTimeStamp> cache = Collections.synchronizedMap(new HashMap<>()); public SingleUrlResponseCache(final URL u) { this.id = u; } public ResultWithTimeStamp get(final XmlRpcRequestParams params) { return cache.get(params); } public void put(final Object result, XmlRpcRequestParams params) { cache.put(params, new ResultWithTimeStamp(result)); } public void remove(XmlRpcRequestParams key) { cache.remove(key); } public URL getId() { return id; }<|fim▁hole|> return " (" + (l / 1000 / 600) + "min)"; } private static final Map<Class<?>, Class<?>> WRAPPER_TYPE_MAP; static { WRAPPER_TYPE_MAP = new HashMap<Class<?>, Class<?>>(20); WRAPPER_TYPE_MAP.put(Integer.class, int.class); WRAPPER_TYPE_MAP.put(Byte.class, byte.class); WRAPPER_TYPE_MAP.put(Character.class, char.class); WRAPPER_TYPE_MAP.put(Boolean.class, boolean.class); WRAPPER_TYPE_MAP.put(Double.class, double.class); WRAPPER_TYPE_MAP.put(Float.class, float.class); WRAPPER_TYPE_MAP.put(Long.class, long.class); WRAPPER_TYPE_MAP.put(Short.class, short.class); WRAPPER_TYPE_MAP.put(Void.class, void.class); WRAPPER_TYPE_MAP.put(String.class, String.class); } public synchronized void dump(String preffix, BufferedWriter bw, RemoteRequestsCache validator) throws IOException { List<Map.Entry<XmlRpcRequestParams, ResultWithTimeStamp>> entries = new ArrayList(cache.entrySet()); entries.sort((o1, o2) -> o1.getKey().getMethodName().compareTo(o2.getKey().getMethodName())); for (Map.Entry<XmlRpcRequestParams, ResultWithTimeStamp> entry : entries) { bw.write(preffix + XmlRpcRequestParams.toNiceString(entry.getKey()) + ": "); bw.newLine(); bw.write(preffix + " dateCreated: " + entry.getValue().dateCreated); bw.newLine(); bw.write(preffix + " notBeingRepalced: " + entry.getValue().notBeingRepalced); bw.newLine(); bw.write(preffix + " validity: " + validator.isValid(entry.getValue(), entry.getKey().getMethodName(), id.getHost())); bw.newLine(); long ttl = validator.getPerMethodValidnesMilis(entry.getKey().getMethodName(), id.getHost()); bw.write(preffix + " original ttl: " + ttl + "ms" + asMinutes(ttl)); bw.newLine(); long cttl = new Date().getTime() - entry.getValue().dateCreated.getTime(); bw.write(preffix + " time alive " + cttl + "ms" + asMinutes(cttl)); bw.newLine(); bw.write(preffix + " => ttl: " + (ttl - cttl) + "ms" + asMinutes(ttl - cttl)); bw.newLine(); if (WRAPPER_TYPE_MAP.containsKey(entry.getValue().result.getClass())) { bw.write(preffix + " result: " + entry.getValue().result + " (" + entry.getValue().result.getClass().getName() + ")"); bw.newLine(); } else { bw.write(preffix + " result: "); bw.newLine(); entry.getValue().dump(preffix + " ", bw); } } bw.write(preffix + "total: " + entries.size()); bw.newLine(); } @SuppressFBWarnings(value = {"EI_EXPOSE_REP", "EI_EXPOSE_REP2"}, justification = "pure wrapper class") public static final class ResultWithTimeStamp { private final Date dateCreated; private final Object result; private boolean notBeingRepalced = true; public ResultWithTimeStamp(final Object result) { this.dateCreated = new Date(); this.result = result; } public Date getDateCreated() { return dateCreated; } public Object getResult() { return result; } public boolean isNotBeingReplaced() { return notBeingRepalced; } public void flagBeingReplaced() { this.notBeingRepalced = false; } public void dump(String preffix, BufferedWriter bw) throws IOException { dump(preffix, result, bw); } private static final String FINAL_INCREMENT = " "; public static void dump(String preffix, Object o, BufferedWriter bw) throws IOException { if (o == null) { bw.write(preffix + "null"); bw.newLine(); return; } if (o instanceof Map) { bw.write(preffix + " map " + o.getClass().getName() + " map (size: " + ((Map) o).size()); bw.newLine(); Set<Map.Entry> entries = ((Map) o).entrySet(); for (Map.Entry e : entries) { if (e.getKey() == null) { bw.write(preffix + FINAL_INCREMENT + "null="); bw.newLine(); dump(preffix + FINAL_INCREMENT + FINAL_INCREMENT, e.getValue(), bw); } else { bw.write(preffix + FINAL_INCREMENT + e.getKey() + "="); bw.newLine(); dump(preffix + FINAL_INCREMENT + FINAL_INCREMENT, e.getValue(), bw); } } } else if (o.getClass().isArray()) { bw.write(preffix + " ary " + o.getClass().getName() + " ary (size: " + Array.getLength(o)); bw.newLine(); if (o instanceof Object[]) { for (Object e : (Object[]) o) { dump(preffix + FINAL_INCREMENT, e, bw); } } else if (o instanceof int[]) { bw.write(preffix + FINAL_INCREMENT); for (int e : (int[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof byte[]) { bw.write(preffix + FINAL_INCREMENT); for (byte e : (byte[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof char[]) { bw.write(preffix + FINAL_INCREMENT); for (char e : (char[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof boolean[]) { bw.write(preffix + FINAL_INCREMENT); for (boolean e : (boolean[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof double[]) { bw.write(preffix + FINAL_INCREMENT); for (double e : (double[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof float[]) { bw.write(preffix + FINAL_INCREMENT); for (float e : (float[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof long[]) { bw.write(preffix + FINAL_INCREMENT); for (long e : (long[]) o) { bw.write("" + e + ","); } bw.newLine(); } else if (o instanceof short[]) { bw.write(preffix + FINAL_INCREMENT); for (short e : (short[]) o) { bw.write("" + e + ","); } bw.newLine(); } } else if (o instanceof Collection) { bw.write(preffix + " col " + o.getClass().getName() + " col (size: " + ((Collection) o).size()); bw.newLine(); for (Object e : (Collection) o) { dump(preffix + FINAL_INCREMENT, e, bw); } } else if (o instanceof Iterable) { bw.write(preffix + " ite " + o.getClass().getName() + " ite"); bw.newLine(); for (Object e : (Iterable) o) { dump(preffix + FINAL_INCREMENT, e, bw); } } else { bw.write(preffix + o + " (" + o.getClass().getName() + ")"); bw.newLine(); } } } Set<Map.Entry<XmlRpcRequestParams, ResultWithTimeStamp>> getContent() { return cache.entrySet(); } }<|fim▁end|>
private static String asMinutes(long l) {
<|file_name|>mpqfile.cpp<|end_file_name|><|fim▁begin|>/* * Copyright (C) 2005-2014 MaNGOS <http://getmangos.com/> * * This program is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License as published by the * Free Software Foundation; either version 3 of the License, or (at your * option) any later version. * * This program is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for * more details. * * You should have received a copy of the GNU General Public License along * with this program. If not, see <http://www.gnu.org/licenses/>. */ #include "mpqfile.h" #include <deque> #include <cstdio> #include "StormLib.h" MPQFile::MPQFile(HANDLE mpq, const char* filename, bool warnNoExist /*= true*/) : eof(false), buffer(0), pointer(0), size(0) { HANDLE file; if (!SFileOpenFileEx(mpq, filename, SFILE_OPEN_PATCHED_FILE, &file)) { if (warnNoExist || GetLastError() != ERROR_FILE_NOT_FOUND) fprintf(stderr, "Can't open %s, err=%u!\n", filename, GetLastError()); eof = true; return; } DWORD hi = 0; size = SFileGetFileSize(file, &hi); if (hi) { fprintf(stderr, "Can't open %s, size[hi] = %u!\n", filename, uint32(hi)); SFileCloseFile(file); eof = true; return; } if (size <= 1) { fprintf(stderr, "Can't open %s, size = %u!\n", filename, uint32(size)); SFileCloseFile(file); eof = true;<|fim▁hole|> DWORD read = 0; buffer = new char[size]; if (!SFileReadFile(file, buffer, size, &read) || size != read) { fprintf(stderr, "Can't read %s, size=%u read=%u!\n", filename, uint32(size), uint32(read)); SFileCloseFile(file); eof = true; return; } SFileCloseFile(file); } size_t MPQFile::read(void* dest, size_t bytes) { if (eof) return 0; size_t rpos = pointer + bytes; if (rpos > size) { bytes = size - pointer; eof = true; } memcpy(dest, &(buffer[pointer]), bytes); pointer = rpos; return bytes; } void MPQFile::seek(int offset) { pointer = offset; eof = (pointer >= size); } void MPQFile::seekRelative(int offset) { pointer += offset; eof = (pointer >= size); } void MPQFile::close() { if (buffer) delete[] buffer; buffer = 0; eof = true; }<|fim▁end|>
return; }
<|file_name|>gatsby-node.js<|end_file_name|><|fim▁begin|>const path = require(`path`) const chunk = require(`lodash/chunk`) // This is a simple debugging tool // dd() will prettily dump to the terminal and kill the process // const { dd } = require(`dumper.js`) /** * exports.createPages is a built-in Gatsby Node API. * It's purpose is to allow you to create pages for your site! 💡 * * See https://www.gatsbyjs.com/docs/node-apis/#createPages for more info. */ exports.createPages = async gatsbyUtilities => { // Query our posts from the GraphQL server const posts = await getNodes(gatsbyUtilities) // If there are no posts in WordPress, don't do anything if (!posts.length) { return } // If there are posts and pages, create Gatsby pages for them await createSinglePages({ posts, gatsbyUtilities }) // And a paginated archive await createBlogPostArchive({ posts, gatsbyUtilities }) } /** * This function creates all the individual blog pages in this site */ const createSinglePages = async ({ posts, gatsbyUtilities }) => Promise.all( posts.map(({ previous, post, next }) => // createPage is an action passed to createPages // See https://www.gatsbyjs.com/docs/actions#createPage for more info gatsbyUtilities.actions.createPage({ // Use the WordPress uri as the Gatsby page path // This is a good idea so that internal links and menus work 👍 path: post.uri, // use the blog post template as the page component component: path.resolve( `./src/templates/${post.__typename.replace(`Wp`, ``)}.js` ), // `context` is available in the template as a prop and // as a variable in GraphQL. context: { // we need to add the post id here // so our blog post template knows which blog post // the current page is (when you open it in a browser) id: post.id, // We also use the next and previous id's to query them and add links! previousPostId: previous ? previous.id : null, nextPostId: next ? next.id : null, }, }) ) ) /** * This function creates all the individual blog pages in this site */ async function createBlogPostArchive({ posts, gatsbyUtilities }) { const graphqlResult = await gatsbyUtilities.graphql(/* GraphQL */ ` { wp { readingSettings { postsPerPage } } } `) const { postsPerPage } = graphqlResult.data.wp.readingSettings const postsChunkedIntoArchivePages = chunk(posts, postsPerPage) const totalPages = postsChunkedIntoArchivePages.length return Promise.all( postsChunkedIntoArchivePages.map(async (_posts, index) => { const pageNumber = index + 1 const getPagePath = page => { if (page > 0 && page <= totalPages) { // Since our homepage is our blog page // we want the first page to be "/" and any additional pages // to be numbered. // "/blog/2" for example return page === 1 ? `/` : `/blog/${page}` } return null } // createPage is an action passed to createPages // See https://www.gatsbyjs.com/docs/actions#createPage for more info await gatsbyUtilities.actions.createPage({ path: getPagePath(pageNumber), // use the blog post archive template as the page component component: path.resolve(`./src/templates/blog-post-archive.js`), // `context` is available in the template as a prop and // as a variable in GraphQL. context: { // the index of our loop is the offset of which posts we want to display // so for page 1, 0 * 10 = 0 offset, for page 2, 1 * 10 = 10 posts offset, // etc offset: index * postsPerPage, // We need to tell the template how many posts to display too postsPerPage, nextPagePath: getPagePath(pageNumber + 1), previousPagePath: getPagePath(pageNumber - 1), }, }) }) ) }<|fim▁hole|> * This function queries Gatsby's GraphQL server and asks for * All WordPress blog posts. If there are any GraphQL error it throws an error * Otherwise it will return the posts 🙌 * * We're passing in the utilities we got from createPages. * So see https://www.gatsbyjs.com/docs/node-apis/#createPages for more info! */ async function getNodes({ graphql, reporter }) { const graphqlResult = await graphql(/* GraphQL */ ` query WpPosts { # Query all WordPress blog posts sorted by date allWpPost(sort: { fields: [date], order: DESC }) { edges { previous { id } # note: this is a GraphQL alias. It renames "node" to "post" for this query # We're doing this because this "node" is a post! It makes our code more readable further down the line. post: node { __typename id uri } next { id } } } allWpPage(sort: { fields: [date], order: DESC }) { edges { previous { id } # note: this is a GraphQL alias. It renames "node" to "post" for this query # We're doing this because this "node" is a post! It makes our code more readable further down the line. post: node { __typename id uri } next { id } } } } `) if (graphqlResult.errors) { reporter.panicOnBuild( `There was an error loading your blog posts`, graphqlResult.errors ) return } return [ ...graphqlResult.data.allWpPost.edges, ...graphqlResult.data.allWpPage.edges, ] }<|fim▁end|>
/**
<|file_name|>Value.java<|end_file_name|><|fim▁begin|>package com.planet_ink.coffee_mud.Commands; import com.planet_ink.coffee_mud.core.interfaces.*; import com.planet_ink.coffee_mud.core.*; import com.planet_ink.coffee_mud.Abilities.interfaces.*; import com.planet_ink.coffee_mud.Areas.interfaces.*; import com.planet_ink.coffee_mud.Behaviors.interfaces.*; import com.planet_ink.coffee_mud.CharClasses.interfaces.*; import com.planet_ink.coffee_mud.Commands.interfaces.*; import com.planet_ink.coffee_mud.Common.interfaces.*; import com.planet_ink.coffee_mud.Exits.interfaces.*; import com.planet_ink.coffee_mud.Items.interfaces.*; import com.planet_ink.coffee_mud.Locales.interfaces.*; import com.planet_ink.coffee_mud.MOBS.interfaces.*; import com.planet_ink.coffee_mud.Races.interfaces.*; import java.util.*; /* Copyright 2000-2010 Bo Zimmerman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ @SuppressWarnings("unchecked") public class Value extends StdCommand { public Value(){} private String[] access={"VALUE","VAL","V"}; public String[] getAccessWords(){return access;} public boolean execute(MOB mob, Vector commands, int metaFlags) throws java.io.IOException { Environmental shopkeeper=CMLib.english().parseShopkeeper(mob,commands,"Value what with whom?"); if(shopkeeper==null) return false; if(commands.size()==0) { mob.tell("Value what?"); return false; } int maxToDo=Integer.MAX_VALUE; if((commands.size()>1) &&(CMath.s_int((String)commands.firstElement())>0)) { maxToDo=CMath.s_int((String)commands.firstElement()); commands.setElementAt("all",0); } String whatName=CMParms.combine(commands,0); Vector V=new Vector(); boolean allFlag=((String)commands.elementAt(0)).equalsIgnoreCase("all"); if(whatName.toUpperCase().startsWith("ALL.")){ allFlag=true; whatName="ALL "+whatName.substring(4);} if(whatName.toUpperCase().endsWith(".ALL")){ allFlag=true; whatName="ALL "+whatName.substring(0,whatName.length()-4);} int addendum=1; String addendumStr=""; do { Item itemToDo=mob.fetchCarried(null,whatName+addendumStr); if(itemToDo==null) break; if((CMLib.flags().canBeSeenBy(itemToDo,mob)) &&(!V.contains(itemToDo))) V.addElement(itemToDo); addendumStr="."+(++addendum); } while((allFlag)&&(addendum<=maxToDo)); if(V.size()==0) mob.tell("You don't seem to have '"+whatName+"'."); else for(int v=0;v<V.size();v++) { Item thisThang=(Item)V.elementAt(v); CMMsg newMsg=CMClass.getMsg(mob,shopkeeper,thisThang,CMMsg.MSG_VALUE,null); if(mob.location().okMessage(mob,newMsg)) mob.location().send(mob,newMsg); } return false; } public double combatActionsCost(MOB mob, Vector cmds){return CMath.div(CMProps.getIntVar(CMProps.SYSTEMI_DEFCOMCMDTIME),100.0);} public double actionsCost(MOB mob, Vector cmds){return CMath.div(CMProps.getIntVar(CMProps.SYSTEMI_DEFCMDTIME),100.0);} public boolean canBeOrdered(){return true;} <|fim▁hole|><|fim▁end|>
}
<|file_name|>PackageVariable.py<|end_file_name|><|fim▁begin|>"""engine.SCons.Variables.PackageVariable This file defines the option type for SCons implementing 'package activation'. To be used whenever a 'package' may be enabled/disabled and the package path may be specified. Usage example: Examples: x11=no (disables X11 support) x11=yes (will search for the package installation dir) x11=/usr/local/X11 (will check this path for existance) To replace autoconf's --with-xxx=yyy opts = Variables() opts.Add(PackageVariable('x11', 'use X11 installed here (yes = search some places', 'yes')) ... if env['x11'] == True: dir = ... search X11 in some standard places ... env['x11'] = dir if env['x11']:<|fim▁hole|>""" # # Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Variables/PackageVariable.py 5134 2010/08/16 23:02:40 bdeegan" __all__ = ['PackageVariable',] import SCons.Errors __enable_strings = ('1', 'yes', 'true', 'on', 'enable', 'search') __disable_strings = ('0', 'no', 'false', 'off', 'disable') def _converter(val): """ """ lval = val.lower() if lval in __enable_strings: return True if lval in __disable_strings: return False #raise ValueError("Invalid value for boolean option: %s" % val) return val def _validator(key, val, env, searchfunc): # NB: searchfunc is currenty undocumented and unsupported """ """ # todo: write validator, check for path import os if env[key] is True: if searchfunc: env[key] = searchfunc(key, val) elif env[key] and not os.path.exists(val): raise SCons.Errors.UserError( 'Path does not exist for option %s: %s' % (key, val)) def PackageVariable(key, help, default, searchfunc=None): # NB: searchfunc is currenty undocumented and unsupported """ The input parameters describe a 'package list' option, thus they are returned with the correct converter and validator appended. The result is usable for input to opts.Add() . A 'package list' option may either be 'all', 'none' or a list of package names (seperated by space). """ help = '\n '.join( (help, '( yes | no | /path/to/%s )' % key)) return (key, help, default, lambda k, v, e: _validator(k,v,e,searchfunc), _converter) # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:<|fim▁end|>
... build with x11 ...
<|file_name|>MemberDetailsRedirectView.py<|end_file_name|><|fim▁begin|>from django.views.decorators.cache import never_cache from django.views.generic.base import RedirectView from C4CApplication.views.utils import create_user class MemberDetailsRedirectView(RedirectView): url = "" connected_member = None def dispatch(self, request, *args, **kwargs): # Create the object representing the user if 'email' not in self.request.session: raise PermissionDenied # HTTP 403 self.connected_member = create_user(self.request.session['email']) return super(MemberDetailsRedirectView, self).dispatch(request, *args, **kwargs) @never_cache def get(self, request, *args, **kwargs): member_to_ad_as_a_friend_mail = kwargs['pk'] self.url = "/memberdetails/"+str(member_to_ad_as_a_friend_mail) <|fim▁hole|> self.connected_member.add_favorite( member_to_ad_as_a_friend_mail) return super(MemberDetailsRedirectView, self).get(request, *args, **kwargs)<|fim▁end|>
<|file_name|>home.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit } from '@angular/core'; @Component({ selector: 'app-home', templateUrl: './home.component.html', styleUrls: ['./home.component.scss']<|fim▁hole|> constructor() { } ngOnInit(): void { // } }<|fim▁end|>
}) export class HomeComponent implements OnInit {
<|file_name|>proc_005.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2 from taptaptap.proc import plan, ok, not_ok, out plan(first=1, last=13)<|fim▁hole|>ok('Starting the engine') ok('Find the object') ok('Grab it', todo=True) ok('Use it', todo=True) 2 * 2 == 4 and ok('2 * 2 == 4') or not_ok('2 * 2 != 4') out() ## validity: -1 ## ok testcases: 6 / 13 ## bailout: no ## stderr: 2 * 2 == 4 ## stderr: TODO ## stderr: ~TRUE ## stderr: ~True ## stderr: ~true<|fim▁end|>
ok('Starting the program')
<|file_name|>bitstreamTest.cc<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2011 The WebRTC project authors. All Rights Reserved. * * Use of this source code is governed by a BSD-style license * that can be found in the LICENSE file in the root of the source * tree. An additional intellectual property rights grant can be found * in the file PATENTS. All contributing project authors may * be found in the AUTHORS file in the root of the source tree. */ #include "../../source/BitstreamBuilder.h" #include "../../source/BitstreamParser.h" #include <assert.h> #include <stdio.h> #include <math.h> #include <tchar.h> #include <windows.h> uint32_t BitRateBPS(uint16_t x ) { return (x & 0x3fff) * uint32_t(pow(10.0f,(2 + (x >> 14)))); } uint16_t BitRateBPSInv(uint32_t x ) { // 16383 0x3fff // 1 638 300 exp 0 // 16 383 000 exp 1 // 163 830 000 exp 2 // 1 638 300 000 exp 3 const float exp = log10(float(x>>14)) - 2; if(exp < 0.0) { return uint16_t(x /100); }else if(exp < 1.0) { return 0x4000 + uint16_t(x /1000); }else if(exp < 2.0) { return 0x8000 + uint16_t(x /10000); }else if(exp < 3.0) { return 0xC000 + uint16_t(x /100000); } else { assert(false); return 0; } } int _tmain(int argc, _TCHAR* argv[]) { uint8_t dataBuffer[128]; BitstreamBuilder builder(dataBuffer, sizeof(dataBuffer)); // test 1 to 4 bits builder.Add1Bit(1); builder.Add1Bit(0); builder.Add1Bit(1); builder.Add2Bits(1); builder.Add2Bits(2); builder.Add2Bits(3); builder.Add3Bits(1); builder.Add3Bits(3); builder.Add3Bits(7); builder.Add4Bits(1); builder.Add4Bits(5); builder.Add4Bits(15); assert(4 == builder.Length()); BitstreamParser parser(dataBuffer, sizeof(dataBuffer)); assert(1 == parser.Get1Bit()); assert(0 == parser.Get1Bit()); assert(1 == parser.Get1Bit()); assert(1 == parser.Get2Bits()); assert(2 == parser.Get2Bits()); assert(3 == parser.Get2Bits()); assert(1 == parser.Get3Bits()); assert(3 == parser.Get3Bits()); assert(7 == parser.Get3Bits()); assert(1 == parser.Get4Bits()); assert(5 == parser.Get4Bits()); assert(15 == parser.Get4Bits()); printf("Test of 1 to 4 bits done\n"); // test 5 to 7 bits builder.Add5Bits(1); builder.Add5Bits(15); builder.Add5Bits(30); builder.Add6Bits(1); builder.Add6Bits(30); builder.Add6Bits(60); builder.Add7Bits(1); builder.Add7Bits(60); builder.Add7Bits(120); assert(1 == parser.Get5Bits()); assert(15 == parser.Get5Bits()); assert(30 == parser.Get5Bits()); assert(1 == parser.Get6Bits()); assert(30 == parser.Get6Bits()); assert(60 == parser.Get6Bits()); assert(1 == parser.Get7Bits()); assert(60 == parser.Get7Bits()); assert(120 == parser.Get7Bits()); printf("Test of 5 to 7 bits done\n"); builder.Add8Bits(1); builder.Add1Bit(1); builder.Add8Bits(255); builder.Add1Bit(0); builder.Add8Bits(127); builder.Add1Bit(1); builder.Add8Bits(60); builder.Add1Bit(0); builder.Add8Bits(30); builder.Add1Bit(1); builder.Add8Bits(120); builder.Add1Bit(0); builder.Add8Bits(160); builder.Add1Bit(1); builder.Add8Bits(180); assert(1 == parser.Get8Bits()); assert(1 == parser.Get1Bit()); assert(255 == parser.Get8Bits()); assert(0 == parser.Get1Bit()); assert(127 == parser.Get8Bits()); assert(1 == parser.Get1Bit()); assert(60 == parser.Get8Bits()); assert(0 == parser.Get1Bit()); assert(30 == parser.Get8Bits()); assert(1 == parser.Get1Bit()); assert(120 == parser.Get8Bits()); assert(0 == parser.Get1Bit()); assert(160 == parser.Get8Bits()); assert(1 == parser.Get1Bit()); assert(180 == parser.Get8Bits()); printf("Test of 8 bits done\n"); builder.Add16Bits(1); builder.Add1Bit(1); builder.Add16Bits(255); builder.Add1Bit(0); builder.Add16Bits(12756); builder.Add1Bit(1); builder.Add16Bits(60); builder.Add1Bit(0); builder.Add16Bits(30); builder.Add1Bit(1); builder.Add16Bits(30120); builder.Add1Bit(0); builder.Add16Bits(160); builder.Add1Bit(1); builder.Add16Bits(180); assert(1 == parser.Get16Bits()); assert(1 == parser.Get1Bit()); assert(255 == parser.Get16Bits()); assert(0 == parser.Get1Bit()); assert(12756 == parser.Get16Bits()); assert(1 == parser.Get1Bit()); assert(60 == parser.Get16Bits()); assert(0 == parser.Get1Bit()); assert(30 == parser.Get16Bits()); assert(1 == parser.Get1Bit()); assert(30120 == parser.Get16Bits()); assert(0 == parser.Get1Bit()); assert(160 == parser.Get16Bits()); assert(1 == parser.Get1Bit()); assert(180 == parser.Get16Bits()); printf("Test of 16 bits done\n"); builder.Add24Bits(1); builder.Add1Bit(1); builder.Add24Bits(255); builder.Add1Bit(0); builder.Add24Bits(12756); builder.Add1Bit(1); builder.Add24Bits(60); builder.Add1Bit(0); builder.Add24Bits(303333); builder.Add1Bit(1); builder.Add24Bits(30120); builder.Add1Bit(0); builder.Add24Bits(160); builder.Add1Bit(1); builder.Add24Bits(8018018); assert(1 == parser.Get24Bits()); assert(1 == parser.Get1Bit()); assert(255 == parser.Get24Bits()); assert(0 == parser.Get1Bit()); assert(12756 == parser.Get24Bits()); assert(1 == parser.Get1Bit()); assert(60 == parser.Get24Bits()); assert(0 == parser.Get1Bit()); assert(303333 == parser.Get24Bits()); assert(1 == parser.Get1Bit()); assert(30120 == parser.Get24Bits()); assert(0 == parser.Get1Bit()); assert(160 == parser.Get24Bits()); assert(1 == parser.Get1Bit()); assert(8018018 == parser.Get24Bits()); printf("Test of 24 bits done\n"); builder.Add32Bits(1); builder.Add1Bit(1); builder.Add32Bits(255); builder.Add1Bit(0); builder.Add32Bits(12756); builder.Add1Bit(1); builder.Add32Bits(60); builder.Add1Bit(0); builder.Add32Bits(303333); builder.Add1Bit(1); builder.Add32Bits(3012000012); builder.Add1Bit(0); builder.Add32Bits(1601601601); builder.Add1Bit(1); builder.Add32Bits(8018018); assert(1 == parser.Get32Bits()); assert(1 == parser.Get1Bit()); assert(255 == parser.Get32Bits()); assert(0 == parser.Get1Bit()); assert(12756 == parser.Get32Bits()); assert(1 == parser.Get1Bit()); assert(60 == parser.Get32Bits()); assert(0 == parser.Get1Bit()); assert(303333 == parser.Get32Bits()); assert(1 == parser.Get1Bit()); assert(3012000012 == parser.Get32Bits()); assert(0 == parser.Get1Bit()); assert(1601601601 == parser.Get32Bits()); assert(1 == parser.Get1Bit()); assert(8018018 == parser.Get32Bits()); printf("Test of 32 bits done\n"); builder.AddUE(1); builder.AddUE(4); builder.AddUE(9809706); builder.AddUE(2); builder.AddUE(15); builder.AddUE(16998); assert( 106 == builder.Length()); assert(1 == parser.GetUE()); assert(4 == parser.GetUE()); assert(9809706 == parser.GetUE()); assert(2 == parser.GetUE()); assert(15 == parser.GetUE()); assert(16998 == parser.GetUE()); printf("Test UE bits done\n"); BitstreamBuilder builderScalabilityInfo(dataBuffer, sizeof(dataBuffer)); BitstreamParser parserScalabilityInfo(dataBuffer, sizeof(dataBuffer)); const uint8_t numberOfLayers = 4; const uint8_t layerId[numberOfLayers] = {0,1,2,3}; const uint8_t priorityId[numberOfLayers] = {0,1,2,3}; const uint8_t discardableId[numberOfLayers] = {0,1,1,1}; const uint8_t dependencyId[numberOfLayers]= {0,1,1,1}; const uint8_t qualityId[numberOfLayers]= {0,0,0,1}; const uint8_t temporalId[numberOfLayers]= {0,0,1,1}; const uint16_t avgBitrate[numberOfLayers]= {BitRateBPSInv(100000), BitRateBPSInv(200000), BitRateBPSInv(400000), BitRateBPSInv(800000)}; // todo which one is the sum?<|fim▁hole|> BitRateBPSInv(300000), BitRateBPSInv(500000), BitRateBPSInv(900000)}; const uint16_t maxBitrateLayerRepresentation[numberOfLayers] = {BitRateBPSInv(150000), BitRateBPSInv(450000), BitRateBPSInv(950000), BitRateBPSInv(1850000)}; assert( 16300 == BitRateBPS(BitRateBPSInv(16383))); assert( 163800 == BitRateBPS(BitRateBPSInv(163830))); assert( 1638300 == BitRateBPS(BitRateBPSInv(1638300))); assert( 1638000 == BitRateBPS(BitRateBPSInv(1638400))); assert( 18500 == BitRateBPS(BitRateBPSInv(18500))); assert( 185000 == BitRateBPS(BitRateBPSInv(185000))); assert( 1850000 == BitRateBPS(BitRateBPSInv(1850000))); assert( 18500000 == BitRateBPS(BitRateBPSInv(18500000))); assert( 185000000 == BitRateBPS(BitRateBPSInv(185000000))); const uint16_t maxBitrareCalcWindow[numberOfLayers] = {200, 200,200,200};// in 1/100 of second builderScalabilityInfo.Add1Bit(0); // temporal_id_nesting_flag builderScalabilityInfo.Add1Bit(0); // priority_layer_info_present_flag builderScalabilityInfo.Add1Bit(0); // priority_id_setting_flag builderScalabilityInfo.AddUE(numberOfLayers-1); for(int i = 0; i<= numberOfLayers-1; i++) { builderScalabilityInfo.AddUE(layerId[i]); builderScalabilityInfo.Add6Bits(priorityId[i]); builderScalabilityInfo.Add1Bit(discardableId[i]); builderScalabilityInfo.Add3Bits(dependencyId[i]); builderScalabilityInfo.Add4Bits(qualityId[i]); builderScalabilityInfo.Add3Bits(temporalId[i]); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(1); // bitrate_info_present_flag builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add1Bit(0); builderScalabilityInfo.Add16Bits(avgBitrate[i]); builderScalabilityInfo.Add16Bits(maxBitrateLayer[i]); builderScalabilityInfo.Add16Bits(maxBitrateLayerRepresentation[i]); builderScalabilityInfo.Add16Bits(maxBitrareCalcWindow[i]); builderScalabilityInfo.AddUE(0); // layer_dependency_info_src_layer_id_delta builderScalabilityInfo.AddUE(0); // parameter_sets_info_src_layer_id_delta } printf("Test builderScalabilityInfo done\n"); // Scalability Info parser parserScalabilityInfo.Get1Bit(); // not used in futher parsing const uint8_t priority_layer_info_present = parserScalabilityInfo.Get1Bit(); const uint8_t priority_id_setting_flag = parserScalabilityInfo.Get1Bit(); uint32_t numberOfLayersMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t j = 0; j<= numberOfLayersMinusOne; j++) { parserScalabilityInfo.GetUE(); parserScalabilityInfo.Get6Bits(); parserScalabilityInfo.Get1Bit(); parserScalabilityInfo.Get3Bits(); parserScalabilityInfo.Get4Bits(); parserScalabilityInfo.Get3Bits(); const uint8_t sub_pic_layer_flag = parserScalabilityInfo.Get1Bit(); const uint8_t sub_region_layer_flag = parserScalabilityInfo.Get1Bit(); const uint8_t iroi_division_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t profile_level_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t bitrate_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t frm_rate_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t frm_size_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t layer_dependency_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t parameter_sets_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t bitstream_restriction_info_present_flag = parserScalabilityInfo.Get1Bit(); const uint8_t exact_inter_layer_pred_flag = parserScalabilityInfo.Get1Bit(); // not used in futher parsing if(sub_pic_layer_flag || iroi_division_info_present_flag) { parserScalabilityInfo.Get1Bit(); } const uint8_t layer_conversion_flag = parserScalabilityInfo.Get1Bit(); const uint8_t layer_output_flag = parserScalabilityInfo.Get1Bit(); // not used in futher parsing if(profile_level_info_present_flag) { parserScalabilityInfo.Get24Bits(); } if(bitrate_info_present_flag) { // this is what we want assert(avgBitrate[j] == parserScalabilityInfo.Get16Bits()); assert(maxBitrateLayer[j] == parserScalabilityInfo.Get16Bits()); assert(maxBitrateLayerRepresentation[j] == parserScalabilityInfo.Get16Bits()); assert(maxBitrareCalcWindow[j] == parserScalabilityInfo.Get16Bits()); }else { assert(false); } if(frm_rate_info_present_flag) { parserScalabilityInfo.Get2Bits(); parserScalabilityInfo.Get16Bits(); } if(frm_size_info_present_flag || iroi_division_info_present_flag) { parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); } if(sub_region_layer_flag) { parserScalabilityInfo.GetUE(); if(parserScalabilityInfo.Get1Bit()) { parserScalabilityInfo.Get16Bits(); parserScalabilityInfo.Get16Bits(); parserScalabilityInfo.Get16Bits(); parserScalabilityInfo.Get16Bits(); } } if(sub_pic_layer_flag) { parserScalabilityInfo.GetUE(); } if(iroi_division_info_present_flag) { if(parserScalabilityInfo.Get1Bit()) { parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); }else { const uint32_t numRoisMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t k = 0; k <= numRoisMinusOne; k++) { parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); } } } if(layer_dependency_info_present_flag) { const uint32_t numDirectlyDependentLayers = parserScalabilityInfo.GetUE(); for(uint32_t k = 0; k < numDirectlyDependentLayers; k++) { parserScalabilityInfo.GetUE(); } } else { parserScalabilityInfo.GetUE(); } if(parameter_sets_info_present_flag) { const uint32_t numSeqParameterSetMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t k = 0; k <= numSeqParameterSetMinusOne; k++) { parserScalabilityInfo.GetUE(); } const uint32_t numSubsetSeqParameterSetMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t l = 0; l <= numSubsetSeqParameterSetMinusOne; l++) { parserScalabilityInfo.GetUE(); } const uint32_t numPicParameterSetMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t m = 0; m <= numPicParameterSetMinusOne; m++) { parserScalabilityInfo.GetUE(); } }else { parserScalabilityInfo.GetUE(); } if(bitstream_restriction_info_present_flag) { parserScalabilityInfo.Get1Bit(); parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); parserScalabilityInfo.GetUE(); } if(layer_conversion_flag) { parserScalabilityInfo.GetUE(); for(uint32_t k = 0; k <2;k++) { if(parserScalabilityInfo.Get1Bit()) { parserScalabilityInfo.Get24Bits(); parserScalabilityInfo.Get16Bits(); parserScalabilityInfo.Get16Bits(); } } } } if(priority_layer_info_present) { const uint32_t prNumDidMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t k = 0; k <= prNumDidMinusOne;k++) { parserScalabilityInfo.Get3Bits(); const uint32_t prNumMinusOne = parserScalabilityInfo.GetUE(); for(uint32_t l = 0; l <= prNumMinusOne; l++) { parserScalabilityInfo.GetUE(); parserScalabilityInfo.Get24Bits(); parserScalabilityInfo.Get16Bits(); parserScalabilityInfo.Get16Bits(); } } } if(priority_id_setting_flag) { uint8_t priorityIdSettingUri; uint32_t priorityIdSettingUriIdx = 0; do { priorityIdSettingUri = parserScalabilityInfo.Get8Bits(); } while (priorityIdSettingUri != 0); } printf("Test parserScalabilityInfo done\n"); printf("\nAPI test of parser for ScalabilityInfo done\n"); ::Sleep(5000); }<|fim▁end|>
const uint16_t maxBitrateLayer[numberOfLayers]= {BitRateBPSInv(150000),
<|file_name|>CSSDocumentRangFormatProvider.ts<|end_file_name|><|fim▁begin|>import { TextDocument, DocumentFormattingEditProvider, DocumentRangeFormattingEditProvider, FormattingOptions, CancellationToken, TextEdit, ExtensionContext, TextEditor, commands, window, DocumentSelector, languages, Position, Range } from 'vscode'; import { Formater } from './Formater'; import { FormaterProvider } from './FormaterProvider'; export class CSSDocumentRangFormatProvider implements DocumentRangeFormattingEditProvider { private format: Formater; constructor(format: Formater = new FormaterProvider()) { this.format = format; } provideDocumentRangeFormattingEdits(document: TextDocument, range: Range, options: FormattingOptions, token: CancellationToken): TextEdit[] | Thenable<TextEdit[]> { if (document.validateRange(range)) {<|fim▁hole|> let textEdits: TextEdit[] = []; let reformated = TextEdit.replace(range, formattedText); textEdits.push(reformated); return textEdits; } return null; } }<|fim▁end|>
let originText = document.getText(range); let formattedText: string = this.format.css(originText, options);
<|file_name|>angular-schema-form-download.js<|end_file_name|><|fim▁begin|>angular.module('schemaForm').config( ['schemaFormProvider', 'schemaFormDecoratorsProvider', 'sfPathProvider', function(schemaFormProvider, schemaFormDecoratorsProvider, sfPathProvider) { var download = function(name, schema, options) { if (schema.type === 'string' && schema.format === 'download') { var f = schemaFormProvider.stdFormObj(name, schema, options); f.key = options.path; f.type = 'download'; options.lookup[sfPathProvider.stringify(options.path)] = f; return f; } }; schemaFormProvider.defaults.string.unshift(download); //Add to the bootstrap directive<|fim▁hole|> 'download', 'directives/decorators/bootstrap/download/angular-schema-form-download.html' ); schemaFormDecoratorsProvider.createDirective( 'download', 'directives/decorators/bootstrap/download/angular-schema-form-download.html' ); } ]); angular.module('schemaForm').directive('downloadOptions', function() { return { restrict : 'A', controller : function($scope, $rootScope) { $scope.notifyClick = function(ele) { $rootScope.$emit('DownloadTriggered', { element : ele }) }; }, link : function(scope, ele, attr) { angular.element(ele).click(function() { scope.notifyClick(ele); }); } }; });<|fim▁end|>
schemaFormDecoratorsProvider.addMapping( 'bootstrapDecorator',
<|file_name|>const.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>//- @FOO defines/binding SFOO //- SFoo.node/kind constant const FOO: &'static str = "hi"; fn foo() { //- @FOO ref SFOO FOO; }<|fim▁end|>
<|file_name|>removal.js<|end_file_name|><|fim▁begin|>// This file contains methods responsible for removing a node. import { hooks } from "./lib/removal-hooks"; export function remove() { this._assertUnremoved(); this.resync(); if (this._callRemovalHooks()) { this._markRemoved(); return; }<|fim▁hole|> this.shareCommentsWithSiblings(); this._remove(); this._markRemoved(); } export function _callRemovalHooks() { for (let fn of (hooks: Array<Function>)) { if (fn(this, this.parentPath)) return true; } } export function _remove() { if (Array.isArray(this.container)) { this.container.splice(this.key, 1); this.updateSiblingKeys(this.key, -1); } else { this._replaceWith(null); } } export function _markRemoved() { this.shouldSkip = true; this.removed = true; this.node = null; } export function _assertUnremoved() { if (this.removed) { throw this.buildCodeFrameError("NodePath has been removed so is read-only."); } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>from investor_lifespan_model.investor import Investor<|fim▁hole|>from investor_lifespan_model.lifespan_model import LifespanModel from investor_lifespan_model.mortality_data import π, G, tf<|fim▁end|>
from investor_lifespan_model.market import Market from investor_lifespan_model.insurer import Insurer
<|file_name|>versions_repository.py<|end_file_name|><|fim▁begin|>import logging import os from lib import exception from lib import repository from lib.constants import REPOSITORIES_DIR LOG = logging.getLogger(__name__) def get_versions_repository(config): """ Get the packages metadata Git repository, cloning it if does not yet exist. Args: config (dict): configuration dictionary Raises: exception.RepositoryError: if the clone is unsuccessful """ path = os.path.join(config.get('work_dir'), REPOSITORIES_DIR) url = config.get('packages_metadata_repo_url') name = "versions_{subcommand}".format( subcommand=config.get('subcommand')) try: versions_repo = repository.get_git_repository(url, path, name) except exception.RepositoryError: LOG.error("Failed to clone versions repository") raise return versions_repo def setup_versions_repository(config): """ Prepare the packages metadata Git repository, cloning it and checking out at the chosen branch. Args: config (dict): configuration dictionary Raises: exception.RepositoryError: if the clone or checkout are unsuccessful """ versions_repo = get_versions_repository(config) branch = config.get('packages_metadata_repo_branch') refspecs = config.get('packages_metadata_repo_refspecs') try: versions_repo.checkout(branch, refspecs) except exception.RepositoryError: LOG.error("Failed to checkout versions repository") raise return versions_repo def read_version_and_milestone(versions_repo): """ Read current version and milestone (alpha or beta) from VERSION file Args: versions_repo (GitRepository): packages metadata git repository Returns: version_milestone (str): version and milestone. Format: <version>-<milestone>, valid milestone values: alpha, beta """ version_file_path = os.path.join(versions_repo.working_tree_dir, 'open-power-host-os', 'CentOS', '7', 'SOURCES', 'VERSION') version_milestone = "" with open(version_file_path, 'r') as version_file: #ignore first line with file format information version_file.readline() version_milestone = version_file.readline().strip('\n')<|fim▁hole|><|fim▁end|>
return version_milestone
<|file_name|>ActionCheckerService.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2010 Yahoo! Inc. All rights reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. See accompanying LICENSE file. */ package org.apache.oozie.service; import java.util.ArrayList; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.oozie.CoordinatorActionBean; import org.apache.oozie.ErrorCode; import org.apache.oozie.WorkflowActionBean; import org.apache.oozie.command.CommandException; import org.apache.oozie.command.coord.CoordActionCheckCommand; import org.apache.oozie.command.coord.CoordActionCheckXCommand; import org.apache.oozie.command.wf.ActionCheckCommand; import org.apache.oozie.command.wf.ActionCheckXCommand; import org.apache.oozie.executor.jpa.CoordActionsRunningGetJPAExecutor; import org.apache.oozie.executor.jpa.JPAExecutorException; import org.apache.oozie.executor.jpa.WorkflowActionsRunningGetJPAExecutor; import org.apache.oozie.util.XCallable; import org.apache.oozie.util.XLog; /** * The Action Checker Service queue ActionCheckCommands to check the status of * running actions and CoordActionCheckCommands to check the status of * coordinator actions. The delay between checks on the same action can be * configured. */ public class ActionCheckerService implements Service { public static final String CONF_PREFIX = Service.CONF_PREFIX + "ActionCheckerService."; /** * The frequency at which the ActionCheckService will run. */ public static final String CONF_ACTION_CHECK_INTERVAL = CONF_PREFIX + "action.check.interval"; /** * The time, in seconds, between an ActionCheck for the same action. */ public static final String CONF_ACTION_CHECK_DELAY = CONF_PREFIX + "action.check.delay"; /** * The number of callables to be queued in a batch. */ public static final String CONF_CALLABLE_BATCH_SIZE = CONF_PREFIX + "callable.batch.size"; protected static final String INSTRUMENTATION_GROUP = "actionchecker"; protected static final String INSTR_CHECK_ACTIONS_COUNTER = "checks_wf_actions"; protected static final String INSTR_CHECK_COORD_ACTIONS_COUNTER = "checks_coord_actions"; private static boolean useXCommand = true; /** * {@link ActionCheckRunnable} is the runnable which is scheduled to run and * queue Action checks. */ static class ActionCheckRunnable implements Runnable { private int actionCheckDelay; private List<XCallable<Void>> callables; private StringBuilder msg = null; public ActionCheckRunnable(int actionCheckDelay) { this.actionCheckDelay = actionCheckDelay; } public void run() { XLog.Info.get().clear(); XLog LOG = XLog.getLog(getClass()); msg = new StringBuilder(); try { runWFActionCheck(); runCoordActionCheck(); } catch (CommandException ce) { LOG.error("Unable to run action checks, ", ce); } LOG.debug("QUEUING [{0}] for potential checking", msg.toString()); if (null != callables) { boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables); if (ret == false) { LOG.warn("Unable to queue the callables commands for CheckerService. " + "Most possibly command queue is full. Queue size is :" + Services.get().get(CallableQueueService.class).queueSize()); } callables = null; } } /** * check workflow actions * * @throws CommandException */ private void runWFActionCheck() throws CommandException { JPAService jpaService = Services.get().get(JPAService.class); if (jpaService == null) { throw new CommandException(ErrorCode.E0610); } List<WorkflowActionBean> actions; try { actions = jpaService .execute(new WorkflowActionsRunningGetJPAExecutor(actionCheckDelay)); } catch (JPAExecutorException je) { throw new CommandException(je); } if (actions == null || actions.size() == 0) { return; } msg.append(" WF_ACTIONS : " + actions.size()); for (WorkflowActionBean action : actions) { Services.get().get(InstrumentationService.class).get().incr(INSTRUMENTATION_GROUP, INSTR_CHECK_ACTIONS_COUNTER, 1); if (useXCommand) { queueCallable(new ActionCheckXCommand(action.getId())); } else { queueCallable(new ActionCheckCommand(action.getId())); } } }<|fim▁hole|> /** * check coordinator actions * * @throws CommandException */ private void runCoordActionCheck() throws CommandException { JPAService jpaService = Services.get().get(JPAService.class); if (jpaService == null) { throw new CommandException(ErrorCode.E0610); } List<CoordinatorActionBean> cactions; try { cactions = jpaService.execute(new CoordActionsRunningGetJPAExecutor( actionCheckDelay)); } catch (JPAExecutorException je) { throw new CommandException(je); } if (cactions == null || cactions.size() == 0) { return; } msg.append(" COORD_ACTIONS : " + cactions.size()); for (CoordinatorActionBean caction : cactions) { Services.get().get(InstrumentationService.class).get().incr(INSTRUMENTATION_GROUP, INSTR_CHECK_COORD_ACTIONS_COUNTER, 1); if (useXCommand) { queueCallable(new CoordActionCheckXCommand(caction.getId(), actionCheckDelay)); } else { queueCallable(new CoordActionCheckCommand(caction.getId(), actionCheckDelay)); } } } /** * Adds callables to a list. If the number of callables in the list * reaches {@link ActionCheckerService#CONF_CALLABLE_BATCH_SIZE}, the * entire batch is queued and the callables list is reset. * * @param callable the callable to queue. */ private void queueCallable(XCallable<Void> callable) { if (callables == null) { callables = new ArrayList<XCallable<Void>>(); } callables.add(callable); if (callables.size() == Services.get().getConf().getInt(CONF_CALLABLE_BATCH_SIZE, 10)) { boolean ret = Services.get().get(CallableQueueService.class).queueSerial(callables); if (ret == false) { XLog.getLog(getClass()).warn( "Unable to queue the callables commands for CheckerService. " + "Most possibly command queue is full. Queue size is :" + Services.get().get(CallableQueueService.class).queueSize()); } callables = new ArrayList<XCallable<Void>>(); } } } /** * Initializes the Action Check service. * * @param services services instance. */ @Override public void init(Services services) { Configuration conf = services.getConf(); Runnable actionCheckRunnable = new ActionCheckRunnable(conf.getInt(CONF_ACTION_CHECK_DELAY, 600)); services.get(SchedulerService.class).schedule(actionCheckRunnable, 10, conf.getInt(CONF_ACTION_CHECK_INTERVAL, 60), SchedulerService.Unit.SEC); if (Services.get().getConf().getBoolean(USE_XCOMMAND, true) == false) { useXCommand = false; } } /** * Destroy the Action Checker Services. */ @Override public void destroy() { } /** * Return the public interface for the action checker service. * * @return {@link ActionCheckerService}. */ @Override public Class<? extends Service> getInterface() { return ActionCheckerService.class; } }<|fim▁end|>
<|file_name|>require.d.ts<|end_file_name|><|fim▁begin|>/* require-2.1.5.d.ts may be freely distributed under the MIT license. Copyright (c) 2013 Josh Baldwin https://github.com/jbaldwin/require.d.ts Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */ interface RequireError extends Error { /** * Error Type **/ requireType: string; /** * Required modules. **/ requireModules: string[]; /** * Original error, might be null. **/ originalError: Error; } interface RequireShim { /** * List of dependencies. **/ deps?: string[]; /** * Name the module will be exported as. **/ exports?: string; /** * Initialize function with all dependcies passed in, * if the function returns a value then that value is used * as the module export value instead of the object * found via the 'exports' string. * @param dependencies * @return **/ init?: (...dependencies: any[]) => any; } interface RequireConfig { // The root path to use for all module lookups. baseUrl?: string; // Path mappings for module names not found directly under // baseUrl. paths?: { [key: string]: string; }; // Dictionary of Shim's. // does not cover case of key->string[] shim?: { [key: string]: RequireShim; }; /** * For the given module prefix, instead of loading the * module with the given ID, substitude a different * module ID. * * @example * requirejs.config({ * map: { * 'some/newmodule': { * 'foo': 'foo1.2' * }, * 'some/oldmodule': { * 'foo': 'foo1.0' * } * } * }); **/ map?: { [id: string]: { [id: string]: string; }; }; /** * AMD configurations, use module.config() to access in * define() functions **/ config?: { [id: string]: {}; }; /** * Configures loading modules from CommonJS packages. **/ packages?: {}; /** * The number of seconds to wait before giving up on loading * a script. The default is 7 seconds. **/ waitSeconds?: number; /** * A name to give to a loading context. This allows require.js * to load multiple versions of modules in a page, as long as * each top-level require call specifies a unique context string. **/ context?: string; /** * An array of dependencies to load. **/ deps?: string[]; /** * A function to pass to require that should be require after * deps have been loaded. * @param modules **/ callback?: (...modules: any[]) => void; /** * If set to true, an error will be thrown if a script loads * that does not call define() or have shim exports string * value that can be checked. **/ enforceDefine?: boolean; /** * If set to true, document.createElementNS() will be used * to create script elements. **/ xhtml?: boolean; /** * Extra query string arguments appended to URLs that RequireJS * uses to fetch resources. Most useful to cachce bust when * the browser or server is not configured correcty. * * @example * urlArgs: "bust= + (new Date()).getTime() **/ urlArgs?: string; /** * Specify the value for the type="" attribute used for script * tags inserted into the document by RequireJS. Default is * "text/javascript". To use Firefox's JavasScript 1.8 * features, use "text/javascript;version=1.8". **/ scriptType?: string; } // todo: not sure what to do with this guy interface RequireModule { /** * **/ config(): {}; } interface RequireMap { prefix: string; name: string; parentMap: RequireMap; url: string; originalName: string; fullName: string; } interface Require { /** <|fim▁hole|> /** * Start the main app logic. * Callback is optional. * Can alternatively use deps and callback. * @param modules Required modules to load. **/ (modules: string[]): void; /** * @see Require() * @param ready Called when required modules are ready. **/ (modules: string[], ready: (...modules: any[]) => void ): void; /** * Generate URLs from require module * @param module Module to URL * @return URL string **/ toUrl(module: string): string; /** * On Error override * @param err **/ onError(err: RequireError): void; /** * Undefine a module * @param module Module to undefine. **/ undef(module: string): void; /** * Semi-private function, overload in special instance of undef() **/ onResourceLoad(context: Object, map: RequireMap, depArray: RequireMap[]): void; } interface RequireDefine { /** * Define Simple Name/Value Pairs * @param config Dictionary of Named/Value pairs for the config. **/ (config: { [key: string]: any; }): void; /** * Define function. * @param func: The function module. **/ (func: () => any): void; /** * Define function with dependencies. * @param deps List of dependencies module IDs. * @param ready Callback function when the dependencies are loaded. * callback param deps module dependencies * callback return module definition **/ (deps: string[], ready: (...deps: any[]) => any): void; /** * Define module with simplified CommonJS wrapper. * @param ready * callback require requirejs instance * callback exports exports object * callback module module * callback return module definition **/ (ready: (require: Require, exports: { [key: string]: any; }, module: RequireModule) => any): void; /** * Define a module with a name and dependencies. * @param name The name of the module. * @param deps List of dependencies module IDs. * @param ready Callback function when the dependencies are loaded. * callback deps module dependencies * callback return module definition **/ (name: string, deps: string[], ready: (...deps: any[]) => any): void; } // Ambient declarations for 'require' and 'define' declare var require: Require; declare var define: RequireDefine;<|fim▁end|>
* Configure require.js **/ config(config: RequireConfig): Require;
<|file_name|>test_ppsread.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2013 Adam.Dybbroe # Author(s): # Adam.Dybbroe <[email protected]> # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. """Unittests reading various PPS data files """ import os import unittest from pypps_reader import NwcSafPpsData # The test data cases: # FIXME! class Test(unittest.TestCase): """Unit testing the pps reading""" def setUp(self): """Set up""" return def test_read(self): """Test that it is possible to read pps products""" return <|fim▁hole|> def tearDown(self): """Clean up""" return<|fim▁end|>
<|file_name|>SummonCommand.java<|end_file_name|><|fim▁begin|><|fim▁hole|>package dualcraft.org.server.classic.cmd.impl; /*License ==================== Copyright (c) 2010-2012 Daniel Vidmar We use a modified GNU gpl v 3 license for this. GNU gpl v 3 is included in License.txt The modified part of the license is some additions which state the following: "Redistributions of this project in source or binary must give credit to UnXoft Interactive and DualCraft" "Redistributions of this project in source or binary must modify at least 300 lines of code in order to release an initial version. This will require documentation or proof of the 300 modified lines of code." "Our developers reserve the right to add any additions made to a redistribution of DualCraft into the main project" "Our developers reserver the right if they suspect a closed source software using any code from our project to request to overview the source code of the suspected software. If the owner of the suspected software refuses to allow a devloper to overview the code then we shall/are granted the right to persue legal action against him/her"*/ import dualcraft.org.server.classic.cmd.Command; import dualcraft.org.server.classic.cmd.CommandParameters; import dualcraft.org.server.classic.model.Player; /** * Official /summon command * */ public class SummonCommand extends Command { /** * The instance of this command. */ private static final SummonCommand INSTANCE = new SummonCommand(); /** * Gets the singleton instance of this command. * @return The singleton instance of this command. */ public static SummonCommand getCommand() { return INSTANCE; } public String name() { return "summon"; } /** * Default private constructor. */ private SummonCommand() { /* empty */ } public void execute(Player player, CommandParameters params) { // Player using command is OP? if (params.getArgumentCount() == 1) { for (Player other : player.getWorld().getPlayerList().getPlayers()) { if (other.getName().toLowerCase().equals(params.getStringArgument(0).toLowerCase())) { //TODO: Make the player face each other? other.teleport(player.getPosition(), player.getRotation()); return; } } // Player not found player.getActionSender().sendChatMessage(params.getStringArgument(0) + " was not found"); return; } else { player.getActionSender().sendChatMessage("Wrong number of arguments"); } player.getActionSender().sendChatMessage("/summon <name>"); } }<|fim▁end|>
<|file_name|>WMLInputElement.cpp<|end_file_name|><|fim▁begin|>/** * Copyright (C) 2008, 2009 Torch Mobile Inc. All rights reserved. (http://www.torchmobile.com/) * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Library General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Library General Public License for more details. * * You should have received a copy of the GNU Library General Public License * along with this library; see the file COPYING.LIB. If not, write to * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, * Boston, MA 02110-1301, USA. * */ #include "config.h" #if ENABLE(WML) #include "WMLInputElement.h" #include "EventNames.h" #include "FormDataList.h" #include "Frame.h" #include "HTMLNames.h" #include "KeyboardEvent.h" #include "MappedAttribute.h" #include "RenderTextControlSingleLine.h" #include "TextEvent.h" #include "WMLDocument.h" #include "WMLNames.h" #include "WMLPageState.h" namespace WebCore { WMLInputElement::WMLInputElement(const QualifiedName& tagName, Document* doc) : WMLFormControlElement(tagName, doc) , m_isPasswordField(false) , m_isEmptyOk(false) , m_numOfCharsAllowedByMask(0) { } WMLInputElement::~WMLInputElement() { if (m_isPasswordField) document()->unregisterForDocumentActivationCallbacks(this); } static const AtomicString& formatCodes() { DEFINE_STATIC_LOCAL(AtomicString, codes, ("AaNnXxMm")); return codes; } bool WMLInputElement::isKeyboardFocusable(KeyboardEvent*) const { return WMLFormControlElement::isFocusable(); } bool WMLInputElement::isMouseFocusable() const { return WMLFormControlElement::isFocusable(); } void WMLInputElement::dispatchFocusEvent() { InputElement::dispatchFocusEvent(this, this); WMLElement::dispatchFocusEvent(); } void WMLInputElement::dispatchBlurEvent() { // Firstly check if it is allowed to leave this input field String val = value(); if ( //SAMSUNG_CHANGES_BEGGIN /*(!m_isEmptyOk && val.isEmpty()) || */ //SAMSUNG_CHANGES_END !isConformedToInputMask(val)) { updateFocusAppearance(true); return; } // update the name variable of WML input elmenet String nameVariable = formControlName(); if (!nameVariable.isEmpty()) wmlPageStateForDocument(document())->storeVariable(nameVariable, val); InputElement::dispatchBlurEvent(this, this); WMLElement::dispatchBlurEvent(); } void WMLInputElement::updateFocusAppearance(bool restorePreviousSelection) { InputElement::updateFocusAppearance(m_data, this, this, restorePreviousSelection); } void WMLInputElement::aboutToUnload() { InputElement::aboutToUnload(this, this); } int WMLInputElement::size() const { return m_data.size(); } const AtomicString& WMLInputElement::formControlType() const { // needs to be lowercase according to DOM spec if (m_isPasswordField) { DEFINE_STATIC_LOCAL(const AtomicString, password, ("password")); return password; } DEFINE_STATIC_LOCAL(const AtomicString, text, ("text")); return text; } const AtomicString& WMLInputElement::formControlName() const { return m_data.name(); } const String& WMLInputElement::suggestedValue() const { return m_data.suggestedValue(); } String WMLInputElement::value() const { String value = m_data.value(); if (value.isNull()) value = constrainValue(getAttribute(HTMLNames::valueAttr)); return value; } void WMLInputElement::setValue(const String& value, bool sendChangeEvent) { setFormControlValueMatchesRenderer(false); m_data.setValue(constrainValue(value)); if (inDocument()) document()->updateStyleIfNeeded(); if (renderer()) renderer()->updateFromElement(); setNeedsStyleRecalc(); unsigned max = m_data.value().length(); if (document()->focusedNode() == this) InputElement::updateSelectionRange(this, this, max, max); else cacheSelection(max, max); InputElement::notifyFormStateChanged(this); } // SAMSUNG_WML_FIXES+ // wml/struct/control/input/format/1 void WMLInputElement::setValuePreserveSelectionPos(const String& value) { //InputElement::updatePlaceholderVisibility(m_data, this, this); setFormControlValueMatchesRenderer(false); m_data.setValue(constrainValue(value)); if (inDocument()) document()->updateStyleIfNeeded(); if (renderer()) renderer()->updateFromElement(); setNeedsStyleRecalc(); InputElement::notifyFormStateChanged(this); } // SAMSUNG_WML_FIXES- void WMLInputElement::setValueForUser(const String& value) { /* InputElement class defines pure virtual function 'setValueForUser', which will be useful only in HTMLInputElement. Do nothing in 'WMLInputElement'. */ } void WMLInputElement::setValueFromRenderer(const String& value) { InputElement::setValueFromRenderer(m_data, this, this, value); } bool WMLInputElement::saveFormControlState(String& result) const { if (m_isPasswordField) return false; result = value(); return true; } void WMLInputElement::restoreFormControlState(const String& state) { ASSERT(!m_isPasswordField); // should never save/restore password fields setValue(state, true); } void WMLInputElement::select() { if (RenderTextControl* r = toRenderTextControl(renderer())) r->select(); } void WMLInputElement::accessKeyAction(bool) { // should never restore previous selection here focus(false); } void WMLInputElement::parseMappedAttribute(MappedAttribute* attr) { if (attr->name() == HTMLNames::nameAttr) m_data.setName(parseValueForbiddingVariableReferences(attr->value())); else if (attr->name() == HTMLNames::typeAttr) { String type = parseValueForbiddingVariableReferences(attr->value()); m_isPasswordField = (type == "password"); } else if (attr->name() == HTMLNames::valueAttr) { // We only need to setChanged if the form is looking at the default value right now. if (m_data.value().isNull()) setNeedsStyleRecalc(); setFormControlValueMatchesRenderer(false); } else if (attr->name() == HTMLNames::maxlengthAttr) InputElement::parseMaxLengthAttribute(m_data, this, this, attr); else if (attr->name() == HTMLNames::sizeAttr) InputElement::parseSizeAttribute(m_data, this, attr); else if (attr->name() == WMLNames::formatAttr) m_formatMask = validateInputMask(parseValueForbiddingVariableReferences(attr->value())); else if (attr->name() == WMLNames::emptyokAttr) m_isEmptyOk = (attr->value() == "true"); else WMLElement::parseMappedAttribute(attr); // FIXME: Handle 'accesskey' attribute // FIXME: Handle 'tabindex' attribute // FIXME: Handle 'title' attribute } void WMLInputElement::copyNonAttributeProperties(const Element* source) { const WMLInputElement* sourceElement = static_cast<const WMLInputElement*>(source); m_data.setValue(sourceElement->m_data.value()); WMLElement::copyNonAttributeProperties(source); } RenderObject* WMLInputElement::createRenderer(RenderArena* arena, RenderStyle*) { return new (arena) RenderTextControlSingleLine(this, false); } void WMLInputElement::detach() { WMLElement::detach(); setFormControlValueMatchesRenderer(false); } bool WMLInputElement::appendFormData(FormDataList& encoding, bool) { if (formControlName().isEmpty()) return false; encoding.appendData(formControlName(), value()); return true; } void WMLInputElement::reset() { setValue(String(), true); } void WMLInputElement::defaultEventHandler(Event* evt) { bool clickDefaultFormButton = false; String filteredString; if (evt->type() == eventNames().textInputEvent && evt->isTextEvent()) { TextEvent* textEvent = static_cast<TextEvent*>(evt); if (textEvent->data() == "\n") clickDefaultFormButton = true; // SAMSUNG_WML_FIXES+ // wml/struct/control/input/format/1 // else if (renderer() && !isConformedToInputMask(textEvent->data()[0], toRenderTextControl(renderer())->text().length() + 1)) else if (renderer()) { WMLElement::defaultEventHandler(evt); if (evt->defaultHandled()) { filteredString = filterInvalidChars((toRenderTextControl(renderer()))->text()); setValuePreserveSelectionPos(filteredString); return; } } // SAMSUNG_WML_FIXES- } if (evt->type() == eventNames().keydownEvent && evt->isKeyboardEvent() && focused() && document()->frame() && document()->frame()->doTextFieldCommandFromEvent(this, static_cast<KeyboardEvent*>(evt))) { evt->setDefaultHandled(); return; } // Let the key handling done in EventTargetNode take precedence over the event handling here for editable text fields if (!clickDefaultFormButton) { WMLElement::defaultEventHandler(evt); if (evt->defaultHandled()) return; } // Use key press event here since sending simulated mouse events // on key down blocks the proper sending of the key press event. if (evt->type() == eventNames().keypressEvent && evt->isKeyboardEvent()) { // Simulate mouse click on the default form button for enter for these types of elements. if (static_cast<KeyboardEvent*>(evt)->charCode() == '\r') clickDefaultFormButton = true; } if (clickDefaultFormButton) { // Fire onChange for text fields. RenderObject* r = renderer(); if (r && toRenderTextControl(r)->wasChangedSinceLastChangeEvent()) { dispatchEvent(Event::create(eventNames().changeEvent, true, false)); // Refetch the renderer since arbitrary JS code run during onchange can do anything, including destroying it. r = renderer(); if (r) toRenderTextControl(r)->setChangedSinceLastChangeEvent(false); } evt->setDefaultHandled(); return; } if (evt->isBeforeTextInsertedEvent()) InputElement::handleBeforeTextInsertedEvent(m_data, this, this, evt); if (renderer() && (evt->isMouseEvent() || evt->isDragEvent() || evt->isWheelEvent() || evt->type() == eventNames().blurEvent || evt->type() == eventNames().focusEvent)) toRenderTextControlSingleLine(renderer())->forwardEvent(evt);<|fim▁hole|>void WMLInputElement::cacheSelection(int start, int end) { m_data.setCachedSelectionStart(start); m_data.setCachedSelectionEnd(end); } String WMLInputElement::constrainValue(const String& proposedValue) const { return InputElement::sanitizeUserInputValue(this, proposedValue, m_data.maxLength()); } void WMLInputElement::documentDidBecomeActive() { ASSERT(m_isPasswordField); reset(); } void WMLInputElement::willMoveToNewOwnerDocument() { // Always unregister for cache callbacks when leaving a document, even if we would otherwise like to be registered if (m_isPasswordField) document()->unregisterForDocumentActivationCallbacks(this); WMLElement::willMoveToNewOwnerDocument(); } void WMLInputElement::didMoveToNewOwnerDocument() { if (m_isPasswordField) document()->registerForDocumentActivationCallbacks(this); WMLElement::didMoveToNewOwnerDocument(); } void WMLInputElement::initialize() { String nameVariable = formControlName(); String variableValue; WMLPageState* pageSate = wmlPageStateForDocument(document()); ASSERT(pageSate); if (!nameVariable.isEmpty()) variableValue = pageSate->getVariable(nameVariable); if (variableValue.isEmpty() || !isConformedToInputMask(variableValue)) { String val = value(); if (isConformedToInputMask(val)) variableValue = val; else variableValue = ""; pageSate->storeVariable(nameVariable, variableValue); } setValue(variableValue, true); if (!hasAttribute(WMLNames::emptyokAttr)) { if (m_formatMask.isEmpty() || // check if the format codes is just "*f" (m_formatMask.length() == 2 && m_formatMask[0] == '*' && formatCodes().find(m_formatMask[1]) != -1)) m_isEmptyOk = true; } } String WMLInputElement::validateInputMask(const String& inputMask) { bool isValid = true; bool hasWildcard = false; unsigned escapeCharCount = 0; unsigned maskLength = inputMask.length(); UChar formatCode; for (unsigned i = 0; i < maskLength; ++i) { formatCode = inputMask[i]; if (formatCodes().find(formatCode) == -1) { if (formatCode == '*' || (WTF::isASCIIDigit(formatCode) && formatCode != '0')) { // validate codes which ends with '*f' or 'nf' formatCode = inputMask[++i]; if ((i + 1 != maskLength) || formatCodes().find(formatCode) == -1) { isValid = false; break; } hasWildcard = true; } else if (formatCode == '\\') { //skip over the next mask character ++i; ++escapeCharCount; } else { isValid = false; break; } } } if (!isValid) return String(); // calculate the number of characters allowed to be entered by input mask m_numOfCharsAllowedByMask = maskLength; if (escapeCharCount) m_numOfCharsAllowedByMask -= escapeCharCount; if (hasWildcard) { formatCode = inputMask[maskLength - 2]; if (formatCode == '*') m_numOfCharsAllowedByMask = m_data.maxLength(); else { unsigned leftLen = String(&formatCode).toInt(); m_numOfCharsAllowedByMask = leftLen + m_numOfCharsAllowedByMask - 2; } } return inputMask; } // SAMSUNG_WML_FIXES+ String WMLInputElement::filterInvalidChars(const String& inputChars) { String filteredString; unsigned charCount = 0; for (unsigned i = 0; i < inputChars.length(); ++i) { if (isConformedToInputMask(inputChars[i], charCount+1, false)) { filteredString.append(inputChars[i]); charCount++; } } return filteredString; } // SAMSUNG_WML_FIXES- bool WMLInputElement::isConformedToInputMask(const String& inputChars) { for (unsigned i = 0; i < inputChars.length(); ++i) if (!isConformedToInputMask(inputChars[i], i + 1, false)) return false; return true; } bool WMLInputElement::isConformedToInputMask(UChar inChar, unsigned inputCharCount, bool isUserInput) { if (m_formatMask.isEmpty()) return true; if (inputCharCount > m_numOfCharsAllowedByMask) return false; unsigned maskIndex = 0; if (isUserInput) { unsigned cursorPosition = 0; if (renderer()) cursorPosition = toRenderTextControl(renderer())->selectionStart(); else cursorPosition = m_data.cachedSelectionStart(); maskIndex = cursorPositionToMaskIndex(cursorPosition); } else maskIndex = cursorPositionToMaskIndex(inputCharCount - 1); bool ok = true; UChar mask = m_formatMask[maskIndex]; // match the inputed character with input mask switch (mask) { case 'A': ok = !WTF::isASCIIDigit(inChar) && !WTF::isASCIILower(inChar) && WTF::isASCIIPrintable(inChar); break; case 'a': ok = !WTF::isASCIIDigit(inChar) && !WTF::isASCIIUpper(inChar) && WTF::isASCIIPrintable(inChar); break; case 'N': ok = WTF::isASCIIDigit(inChar); break; case 'n': ok = !WTF::isASCIIAlpha(inChar) && WTF::isASCIIPrintable(inChar); break; case 'X': ok = !WTF::isASCIILower(inChar) && WTF::isASCIIPrintable(inChar); break; case 'x': ok = !WTF::isASCIIUpper(inChar) && WTF::isASCIIPrintable(inChar); break; case 'M': ok = WTF::isASCIIPrintable(inChar); break; case 'm': ok = WTF::isASCIIPrintable(inChar); break; default: ok = (mask == inChar); break; } return ok; } unsigned WMLInputElement::cursorPositionToMaskIndex(unsigned cursorPosition) { UChar mask; int index = -1; do { mask = m_formatMask[++index]; if (mask == '\\') ++index; else if (mask == '*' || (WTF::isASCIIDigit(mask) && mask != '0')) { index = m_formatMask.length() - 1; break; } } while (cursorPosition--); return index; } } #endif<|fim▁end|>
}
<|file_name|>utils.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ use rustc::front::map as ast_map; use rustc::lint::LateContext; use rustc::middle::def; use rustc::middle::def_id::DefId; use rustc_front::hir; use syntax::ast; use syntax::attr::mark_used; use syntax::ptr::P; /// Matches a type with a provided string, and returns its type parameters if successful /// /// Try not to use this for types defined in crates you own, use match_lang_ty instead (for lint passes) pub fn match_ty_unwrap<'a>(ty: &'a ast::Ty, segments: &[&str]) -> Option<&'a [P<ast::Ty>]> { match ty.node { ast::TyPath(_, ast::Path { segments: ref seg, .. }) => { // So hir::Path isn't the full path, just the tokens that were provided. // I could muck around with the maps and find the full path // however the more efficient way is to simply reverse the iterators and zip them // which will compare them in reverse until one of them runs out of segments if seg.iter().rev().zip(segments.iter().rev()).all(|(a, b)| a.identifier.name.as_str() == *b) { match seg.last() { Some(&ast::PathSegment { parameters: ast::AngleBracketedParameters(ref a), .. }) => { Some(&a.types) } _ => None } } else { None } }, _ => None } } /// Checks if a type has a #[servo_lang = "str"] attribute pub fn match_lang_ty(cx: &LateContext, ty: &hir::Ty, value: &str) -> bool { match ty.node { hir::TyPath(..) => {}, _ => return false, } let def_id = match cx.tcx.def_map.borrow().get(&ty.id) { Some(&def::PathResolution { base_def: def::DefTy(def_id, _), .. }) => def_id, _ => return false, }; match_lang_did(cx, def_id, value) } pub fn match_lang_did(cx: &LateContext, did: DefId, value: &str) -> bool { cx.tcx.get_attrs(did).iter().any(|attr| { match attr.node.value.node { ast::MetaNameValue(ref name, ref val) if &**name == "servo_lang" => { match val.node { ast::LitStr(ref v, _) if &**v == value => { mark_used(attr); true }, _ => false, }<|fim▁hole|> } _ => false, } }) } // Determines if a block is in an unsafe context so that an unhelpful // lint can be aborted. pub fn unsafe_context(map: &ast_map::Map, id: ast::NodeId) -> bool { match map.find(map.get_parent(id)) { Some(ast_map::NodeImplItem(itm)) => { match itm.node { hir::MethodImplItem(ref sig, _) => sig.unsafety == hir::Unsafety::Unsafe, _ => false } }, Some(ast_map::NodeItem(itm)) => { match itm.node { hir::ItemFn(_, style, _, _, _, _) => match style { hir::Unsafety::Unsafe => true, _ => false, }, _ => false, } } _ => false // There are probably a couple of other unsafe cases we don't care to lint, those will need // to be added. } } /// check if a DefId's path matches the given absolute type path /// usage e.g. with /// `match_def_path(cx, id, &["core", "option", "Option"])` pub fn match_def_path(cx: &LateContext, def_id: DefId, path: &[&str]) -> bool { cx.tcx.with_path(def_id, |iter| iter.map(|elem| elem.name()) .zip(path.iter()).all(|(nm, p)| &nm.as_str() == p)) }<|fim▁end|>
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>"""This component provides support to the Ring Door Bell camera.""" import asyncio from datetime import timedelta import logging import voluptuous as vol from homeassistant.components.camera import PLATFORM_SCHEMA, Camera from homeassistant.components.ffmpeg import DATA_FFMPEG from homeassistant.const import ATTR_ATTRIBUTION, CONF_SCAN_INTERVAL from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_aiohttp_proxy_stream from homeassistant.util import dt as dt_util from . import ATTRIBUTION, DATA_RING, NOTIFICATION_ID CONF_FFMPEG_ARGUMENTS = 'ffmpeg_arguments' FORCE_REFRESH_INTERVAL = timedelta(minutes=45) _LOGGER = logging.getLogger(__name__) NOTIFICATION_TITLE = 'Ring Camera Setup' SCAN_INTERVAL = timedelta(seconds=90) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Optional(CONF_FFMPEG_ARGUMENTS): cv.string, vol.Optional(CONF_SCAN_INTERVAL, default=SCAN_INTERVAL): cv.time_period, }) def setup_platform(hass, config, add_entities, discovery_info=None): """Set up a Ring Door Bell and StickUp Camera.""" ring = hass.data[DATA_RING] cams = [] cams_no_plan = [] for camera in ring.doorbells: if camera.has_subscription: cams.append(RingCam(hass, camera, config)) else: cams_no_plan.append(camera) for camera in ring.stickup_cams: if camera.has_subscription: cams.append(RingCam(hass, camera, config)) else: cams_no_plan.append(camera) # show notification for all cameras without an active subscription if cams_no_plan: cameras = str(', '.join([camera.name for camera in cams_no_plan])) err_msg = '''A Ring Protect Plan is required for the''' \ ''' following cameras: {}.'''.format(cameras) _LOGGER.error(err_msg) hass.components.persistent_notification.create( 'Error: {}<br />' 'You will need to restart hass after fixing.' ''.format(err_msg), title=NOTIFICATION_TITLE, notification_id=NOTIFICATION_ID) add_entities(cams, True) return True class RingCam(Camera): """An implementation of a Ring Door Bell camera.""" def __init__(self, hass, camera, device_info): """Initialize a Ring Door Bell camera.""" super(RingCam, self).__init__() self._camera = camera self._hass = hass self._name = self._camera.name self._ffmpeg = hass.data[DATA_FFMPEG] self._ffmpeg_arguments = device_info.get(CONF_FFMPEG_ARGUMENTS) self._last_video_id = self._camera.last_recording_id self._video_url = self._camera.recording_url(self._last_video_id) self._utcnow = dt_util.utcnow() self._expires_at = FORCE_REFRESH_INTERVAL + self._utcnow @property def name(self): """Return the name of this camera.""" return self._name @property def unique_id(self): """Return a unique ID.""" return self._camera.id @property def device_state_attributes(self): """Return the state attributes.""" return { ATTR_ATTRIBUTION: ATTRIBUTION, 'device_id': self._camera.id, 'firmware': self._camera.firmware, 'kind': self._camera.kind, 'timezone': self._camera.timezone, 'type': self._camera.family, 'video_url': self._video_url, } async def async_camera_image(self): """Return a still image response from the camera.""" from haffmpeg.tools import ImageFrame, IMAGE_JPEG ffmpeg = ImageFrame(self._ffmpeg.binary, loop=self.hass.loop) if self._video_url is None: return image = await asyncio.shield(ffmpeg.get_image( self._video_url, output_format=IMAGE_JPEG, extra_cmd=self._ffmpeg_arguments)) return image async def handle_async_mjpeg_stream(self, request): """Generate an HTTP MJPEG stream from the camera.""" from haffmpeg.camera import CameraMjpeg if self._video_url is None: return stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop) await stream.open_camera( self._video_url, extra_cmd=self._ffmpeg_arguments)<|fim▁hole|> self.hass, request, stream_reader, self._ffmpeg.ffmpeg_stream_content_type) finally: await stream.close() @property def should_poll(self): """Update the image periodically.""" return True def update(self): """Update camera entity and refresh attributes.""" _LOGGER.debug("Checking if Ring DoorBell needs to refresh video_url") self._camera.update() self._utcnow = dt_util.utcnow() try: last_event = self._camera.history(limit=1)[0] except (IndexError, TypeError): return last_recording_id = last_event['id'] video_status = last_event['recording']['status'] if video_status == 'ready' and \ (self._last_video_id != last_recording_id or self._utcnow >= self._expires_at): video_url = self._camera.recording_url(last_recording_id) if video_url: _LOGGER.info("Ring DoorBell properties refreshed") # update attributes if new video or if URL has expired self._last_video_id = last_recording_id self._video_url = video_url self._expires_at = FORCE_REFRESH_INTERVAL + self._utcnow<|fim▁end|>
try: stream_reader = await stream.get_reader() return await async_aiohttp_proxy_stream(
<|file_name|>Dropdown.Props.js<|end_file_name|><|fim▁begin|>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); var DropdownMenuItemType;<|fim▁hole|> DropdownMenuItemType[DropdownMenuItemType["Header"] = 2] = "Header"; })(DropdownMenuItemType = exports.DropdownMenuItemType || (exports.DropdownMenuItemType = {})); //# sourceMappingURL=Dropdown.Props.js.map<|fim▁end|>
(function (DropdownMenuItemType) { DropdownMenuItemType[DropdownMenuItemType["Normal"] = 0] = "Normal"; DropdownMenuItemType[DropdownMenuItemType["Divider"] = 1] = "Divider";
<|file_name|>test_sample_app.py<|end_file_name|><|fim▁begin|>import asyncio import random import names from chilero.web.test import asynctest from chilero.pg import Resource from chilero.pg.test import TestCase, TEST_DB_SUFFIX import json class Friends(Resource): order_by = 'name ASC' search_fields = ['name'] allowed_fields = ['name', 'meta'] required_fields = ['name'] allow_order_by = ['name'] def serialize_object(self, row): return dict( id=row[0], name=row[1], meta=row[2], url=self.get_object_url(row[0]) ) def serialize_list_object(self, row): return dict( name=row[1], url=self.get_object_url(row[0]) ) class Friends2(Resource): order_by = 'name ASC' search_fields = ['name'] allowed_fields = ['name', 'meta'] required_fields = ['name'] allow_order_by = ['name'] table_name = 'friends' def serialize_object(self, row): return dict( id=row[0], name=row[1], meta=row[2], url=self.get_object_url(row[0]) )<|fim▁hole|> name=row[1], url=self.get_object_url(row[0]) ) def index(self): condition = dict(name='pedro', meta='{}') index = yield from self.do_index(condition) return self.response(index) class BaseTestCase(TestCase): settings = dict( db_url='postgres://postgres@localhost:5432/chilero_pg_{}'.format( TEST_DB_SUFFIX ) ) routes = [ ['/friends', Friends], ['/friends2', Friends2] ] @asyncio.coroutine def _create_friend(self, **kwargs): defaults = dict( name=self._random_string(), meta=json.dumps(dict(name='name1', data2='data2')) ) return( yield from self._create_and_get('/friends', kwargs, defaults) ) class TestAdvancedOptions(BaseTestCase): @asyncio.coroutine def _a_lot_of_friends(self): # create a lot of friends all_names = [] for i in range(100): name = names.get_full_name()+str(i) all_names.append(name) _, f = yield from self._create_friend(name=name) t = yield from _.text() print(t) assert _.status==201 _.close() return all_names @asynctest def test_pagination(self): yield from self._a_lot_of_friends() # list with default values # page 1 r = yield from self._get_json(self.full_url('/friends')) assert r['data']['count'] >= 100 assert r['data']['prev'] == None assert 'offset=20' in r['data']['next'] assert 'limit=20' in r['data']['next'] assert len(r['index']) == r['data']['length'] # page 2 r = yield from self._get_json(r['data']['next']) assert 'offset=0' in r['data']['prev'] assert 'offset=40' in r['data']['next'] assert len(r['index']) == r['data']['length'] assert len(r['index'][0].keys()) == 2 @asynctest def test_pagination_no_limit(self): yield from self._a_lot_of_friends() # list with no limit r = yield from self._get_json(self.full_url('/friends?limit=0')) assert r['data']['count'] >= 100 assert r['data']['prev'] == None assert r['data']['next'] == None assert r['data']['length'] == r['data']['count'] assert len(r['index']) == r['data']['count'] @asynctest def test_search_pagination(self): rnames = list((yield from self._a_lot_of_friends())) rname = random.choice(rnames).split()[0] for i in range(5): name = '{} {}'.format(rname, names.get_last_name()) _, friend = yield from self._create_friend(name=name) _.close() rname = rname.lower() r = yield from self._get_json( self.full_url('/friends?search={}&limit=1'.format(rname)) ) assert r['data']['count'] >= 1 assert rname in r['data']['next'] while r['data']['next']: r = yield from self._get_json(r['data']['next']) if r['data']['next'] is not None: assert rname in r['data']['next'] assert rname in r['data']['prev'] rname.lower() in r['index'][0]['name'].lower() @asynctest def test_oreder_by_ASC(self): yield from self._a_lot_of_friends() name = 'Abel Barrera' _, friend = yield from self._create_friend(name=name) _.close() url = self.full_url('/friends?order_by={}'.format('name')) resp = yield from self._get_json(url) assert resp['index'][0]['name'].startswith('A') @asynctest def test_oreder_by_400(self): yield from self._a_lot_of_friends() url = self.full_url('/friends?order_by={}'.format('other')) resp = yield from self._get(url) assert resp.status == 400 @asynctest def test_oreder_by_desc(self): yield from self._a_lot_of_friends() defaults = dict( name='Zarahi zuna' ) resp = yield from self._create('/friends', defaults) assert resp.status == 201 resp.close() url = self.full_url('/friends?order_by={}'.format('-name')) resp = yield from self._get_json(url) assert resp['index'][0]['name'].startswith('Z') class TestBasic(BaseTestCase): # Test common REST actions @asynctest def test_index(self): resp = yield from self._get(self.full_url('/friends')) assert resp.status == 200 resp.close() @asynctest def test_index_json(self): resp = yield from self._index('/friends') assert isinstance(resp, dict) assert 'index' in resp @asynctest def test_index_json_condition(self): resp = yield from self._index('/friends2') assert isinstance(resp, dict) assert 'index' in resp @asynctest def test_create(self): name = self._random_string() _, friend = yield from self._create_friend(name=name) assert _.status == 201 _.close() assert friend['name'] == name assert len(friend.keys()) == 4 efriend = yield from self._delete(friend['url']) assert efriend.status==200 @asynctest def test_create_error(self): _, friend = yield from self._create_friend(wrong_field=123) assert _.status == 400 _.close() @asynctest def test_create_conflict(self): name = names.get_full_name() _, friend = yield from self._create_friend(name=name) _.close() _, friend = yield from self._create_friend(name=name) assert _.status == 409 _.close() @asynctest def test_update(self): _, friend = yield from self._create_friend() _.close() new_name = self._random_string() presp = yield from self._patch(friend['url'], name=new_name) assert presp.status == 204 presp.close() updated_friend = yield from self._get_json(friend['url']) assert updated_friend['body']['name'] == new_name @asynctest def test_search(self): name = 'some known name' _, friend = yield from self._create_friend(name=name) _.close() results = yield from self._search('/friends', terms='known name') assert len(results['index']) > 0 assert results['index'][0]['name'] == name @asynctest def test_view_404(self): resp = yield from self._get(self.full_url('/friends/999999')) assert resp.status == 404 resp.close() @asynctest def test_update_400(self): _, friend = yield from self._create_friend() _.close() new_name = self._random_string() presp = yield from self._patch(friend['url'], names=new_name) assert presp.status == 400 presp.close() @asynctest def test_update_empty_required_400(self): _, friend = yield from self._create_friend() _.close() new_name = " " presp = yield from self._patch(friend['url'], name=new_name) assert presp.status == 400 presp.close() @asynctest def test_update_None_required_400(self): _, friend = yield from self._create_friend() _.close() new_name = None presp = yield from self._patch(friend['url'], name=new_name) assert presp.status == 400 presp.close()<|fim▁end|>
def serialize_list_object(self, row): return dict(
<|file_name|>dohtml.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # Copyright 1999-2013 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 # # Typical usage: # dohtml -r docs/* # - put all files and directories in docs into /usr/share/doc/${PF}/html # dohtml foo.html # - put foo.html into /usr/share/doc/${PF}/html # # # Detailed usage: # dohtml <list-of-files> # - will install the files in the list of files (space-separated list) into # /usr/share/doc/${PF}/html, provided the file ends in .css, .gif, .htm, # .html, .jpeg, .jpg, .js or .png. # dohtml -r <list-of-files-and-directories> # - will do as 'dohtml', but recurse into all directories, as long as the # directory name is not CVS # dohtml -A jpe,java [-r] <list-of-files[-and-directories]> # - will do as 'dohtml' but add .jpe,.java (default filter list is # added to your list) # dohtml -a png,gif,html,htm [-r] <list-of-files[-and-directories]> # - will do as 'dohtml' but filter on .png,.gif,.html,.htm (default filter # list is ignored) # dohtml -x CVS,SCCS,RCS -r <list-of-files-and-directories> # - will do as 'dohtml -r', but ignore directories named CVS, SCCS, RCS # from __future__ import print_function import os import shutil import sys from portage.util import normalize_path # Change back to original cwd _after_ all imports (bug #469338). os.chdir(os.environ["__PORTAGE_HELPER_CWD"]) def dodir(path): try: os.makedirs(path, 0o755) except OSError: if not os.path.isdir(path): raise os.chmod(path, 0o755) def dofile(src,dst): shutil.copy(src, dst) os.chmod(dst, 0o644) def eqawarn(lines): cmd = "source '%s/isolated-functions.sh' ; " % \ os.environ["PORTAGE_BIN_PATH"] for line in lines: cmd += "eqawarn \"%s\" ; " % line os.spawnlp(os.P_WAIT, "bash", "bash", "-c", cmd) skipped_directories = [] skipped_files = [] warn_on_skipped_files = os.environ.get("PORTAGE_DOHTML_WARN_ON_SKIPPED_FILES") is not None unwarned_skipped_extensions = os.environ.get("PORTAGE_DOHTML_UNWARNED_SKIPPED_EXTENSIONS", "").split() unwarned_skipped_files = os.environ.get("PORTAGE_DOHTML_UNWARNED_SKIPPED_FILES", "").split() def install(basename, dirname, options, prefix=""): fullpath = basename if prefix: fullpath = os.path.join(prefix, fullpath) if dirname: fullpath = os.path.join(dirname, fullpath) if options.DOCDESTTREE: desttree = options.DOCDESTTREE else: desttree = "html" destdir = os.path.join(options.ED, "usr", "share", "doc", options.PF.lstrip(os.sep), desttree.lstrip(os.sep), options.doc_prefix.lstrip(os.sep), prefix).rstrip(os.sep) <|fim▁hole|> if not os.path.exists(fullpath): sys.stderr.write("!!! dohtml: %s does not exist\n" % fullpath) return False elif os.path.isfile(fullpath): ext = os.path.splitext(basename)[1][1:] if ext in options.allowed_exts or basename in options.allowed_files: dodir(destdir) dofile(fullpath, os.path.join(destdir, basename)) elif warn_on_skipped_files and ext not in unwarned_skipped_extensions and basename not in unwarned_skipped_files: skipped_files.append(fullpath) elif options.recurse and os.path.isdir(fullpath) and \ basename not in options.disallowed_dirs: for i in os.listdir(fullpath): pfx = basename if prefix: pfx = os.path.join(prefix, pfx) install(i, dirname, options, pfx) elif not options.recurse and os.path.isdir(fullpath): global skipped_directories skipped_directories.append(fullpath) return False else: return False return True class OptionsClass: def __init__(self): self.PF = "" self.ED = "" self.DOCDESTTREE = "" if "PF" in os.environ: self.PF = os.environ["PF"] if self.PF: self.PF = normalize_path(self.PF) if "force-prefix" not in os.environ.get("FEATURES", "").split() and \ os.environ.get("EAPI", "0") in ("0", "1", "2"): self.ED = os.environ.get("D", "") else: self.ED = os.environ.get("ED", "") if self.ED: self.ED = normalize_path(self.ED) if "_E_DOCDESTTREE_" in os.environ: self.DOCDESTTREE = os.environ["_E_DOCDESTTREE_"] if self.DOCDESTTREE: self.DOCDESTTREE = normalize_path(self.DOCDESTTREE) self.allowed_exts = ['css', 'gif', 'htm', 'html', 'jpeg', 'jpg', 'js', 'png'] if os.environ.get("EAPI", "0") in ("4-python", "5-progress"): self.allowed_exts += ['ico', 'svg', 'xhtml', 'xml'] self.allowed_files = [] self.disallowed_dirs = ['CVS'] self.recurse = False self.verbose = False self.doc_prefix = "" def print_help(): opts = OptionsClass() print("dohtml [-a .foo,.bar] [-A .foo,.bar] [-f foo,bar] [-x foo,bar]") print(" [-r] [-V] <file> [file ...]") print() print(" -a Set the list of allowed to those that are specified.") print(" Default:", ",".join(opts.allowed_exts)) print(" -A Extend the list of allowed file types.") print(" -f Set list of allowed extensionless file names.") print(" -x Set directories to be excluded from recursion.") print(" Default:", ",".join(opts.disallowed_dirs)) print(" -p Set a document prefix for installed files (empty by default).") print(" -r Install files and directories recursively.") print(" -V Be verbose.") print() def parse_args(): options = OptionsClass() args = [] x = 1 while x < len(sys.argv): arg = sys.argv[x] if arg in ["-h","-r","-V"]: if arg == "-h": print_help() sys.exit(0) elif arg == "-r": options.recurse = True elif arg == "-V": options.verbose = True elif sys.argv[x] in ["-A","-a","-f","-x","-p"]: x += 1 if x == len(sys.argv): print_help() sys.exit(0) elif arg == "-p": options.doc_prefix = sys.argv[x] if options.doc_prefix: options.doc_prefix = normalize_path(options.doc_prefix) else: values = sys.argv[x].split(",") if arg == "-A": options.allowed_exts.extend(values) elif arg == "-a": options.allowed_exts = values elif arg == "-f": options.allowed_files = values elif arg == "-x": options.disallowed_dirs = values else: args.append(sys.argv[x]) x += 1 return (options, args) def main(): (options, args) = parse_args() if options.verbose: print("Allowed extensions:", options.allowed_exts) print("Document prefix : '" + options.doc_prefix + "'") print("Allowed files :", options.allowed_files) success = False endswith_slash = (os.sep, os.sep + ".") for x in args: trailing_slash = x.endswith(endswith_slash) x = normalize_path(x) if trailing_slash: # Modify behavior of basename and dirname # as noted in bug #425214, causing foo/ to # behave similarly to the way that foo/* # behaves. x += os.sep basename = os.path.basename(x) dirname = os.path.dirname(x) success |= install(basename, dirname, options) for x in skipped_directories: eqawarn(["QA Notice: dohtml on directory '%s' without recursion option" % x]) for x in skipped_files: eqawarn(["dohtml: skipped file '%s'" % x]) if success: retcode = 0 else: retcode = 1 sys.exit(retcode) if __name__ == "__main__": main()<|fim▁end|>
<|file_name|>constants.py<|end_file_name|><|fim▁begin|># vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 OpenStack Foundation. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # service type constants: CORE = "CORE" DUMMY = "DUMMY" LOADBALANCER = "LOADBALANCER" FIREWALL = "FIREWALL" VPN = "VPN" METERING = "METERING" L3_ROUTER_NAT = "L3_ROUTER_NAT" #maps extension alias to service type EXT_TO_SERVICE_MAPPING = { 'dummy': DUMMY, 'lbaas': LOADBALANCER, 'fwaas': FIREWALL, 'vpnaas': VPN, 'metering': METERING, 'router': L3_ROUTER_NAT } # TODO(salvatore-orlando): Move these (or derive them) from conf file ALLOWED_SERVICES = [CORE, DUMMY, LOADBALANCER, FIREWALL, VPN, METERING, L3_ROUTER_NAT] COMMON_PREFIXES = { CORE: "", DUMMY: "/dummy_svc", LOADBALANCER: "/lb", FIREWALL: "/fw", VPN: "/vpn", METERING: "/metering", L3_ROUTER_NAT: "", } # Service operation status constants ACTIVE = "ACTIVE" DOWN = "DOWN" PENDING_CREATE = "PENDING_CREATE" PENDING_UPDATE = "PENDING_UPDATE" PENDING_DELETE = "PENDING_DELETE" INACTIVE = "INACTIVE" ERROR = "ERROR" # FWaaS firewall rule action FWAAS_ALLOW = "allow" FWAAS_DENY = "deny" <|fim▁hole|>ICMP = "icmp"<|fim▁end|>
# L3 Protocol name constants TCP = "tcp" UDP = "udp"
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>use std::process::Command; use std::env; fn main() { let curr_dir = env::current_dir().unwrap();<|fim▁hole|> .current_dir(&curr_dir).status().unwrap(); #[cfg(target_os="windows")] println!("cargo:rustc-flags=-L C:\\msys64\\mingw64\\lib"); }<|fim▁end|>
Command::new("git").arg("submodule").arg("update").arg("--init")
<|file_name|>test_collection_times.py<|end_file_name|><|fim▁begin|>from concurrency.get_websites import get_number_of_links import time # Run get_number_of_links and compare it to a serial version # stub out load_url with a sleep function so the time is always the same # Show that the concurrent version takes less time than the serial import unittest from unittest.mock import patch, MagicMock from bs4 import BeautifulSoup from concurrency.get_websites import get_number_of_links, get_number_of_links_serial class TestConcurrency(unittest.TestCase): def setUp(self): self.loadtime = 1 self.fake_urls = ['url1','url2', 'url3'] @patch('concurrency.get_websites.BeautifulSoup') @patch('concurrency.get_websites.load_url') def test_concurrent_slower_than_serial(self, mock_load_url, bs_mock): """ Time the collection of data from websites """ bs_data = MagicMock(return_value="<html><a href='foo'>Baz</a></html>") bs_mock.return_value = bs_data mock_load_url.side_effect = lambda foo: time.sleep(self.loadtime) concurrent_start = time.time() list(get_number_of_links(self.fake_urls)) concurrent_total = time.time() - concurrent_start serial_start = time.time()<|fim▁hole|> print("Serial collection: {}".format(serial_total)) self.assertLess(concurrent_total, serial_total) if __name__ == "__main__": unittest.main()<|fim▁end|>
get_number_of_links_serial(self.fake_urls) serial_total = time.time() - serial_start print("Concurrent collection: {}".format(concurrent_total))
<|file_name|>Strings.js<|end_file_name|><|fim▁begin|>// @flow import compose from 'ramda/src/compose'; import contains from 'ramda/src/contains'; import curry from 'ramda/src/curry'; import curryN from 'ramda/src/curryN'; import pipe from 'ramda/src/pipe'; import props from 'ramda/src/props'; import uniq from 'ramda/src/uniq'; import { FORMAT_HUETODEGREES, FORMAT_INCLUDEALPHA, FORMAT_INTTOPERCENT, FORMAT_SHORTHEX, linearTransformFactory, toHex, splitDoubles, checkDoubles } from './Transforms'; /* --- STRING --- */ const hasAlpha = contains(FORMAT_INCLUDEALPHA); const hasHueToDegress = contains(FORMAT_HUETODEGREES); const hasIntToPercent = contains(FORMAT_INTTOPERCENT); const hasShortHex = contains(FORMAT_SHORTHEX); /** * Extract keys from Disjoint ColorObject */ function getValue(func: string, keys: Array<string>, color: ColorObject): Array<any> { return (color.func === func) ? props(keys, color) : []; }; const getRgbVals: Function = curryN(3, getValue)('rgb', ['r', 'g', 'b', 'alpha', 'format']); function floatToPercent(n: number): string { let x = (n > 1) ? 1 : n; return `${Math.round(x * 100)}%`; } /** * Convert a float [0,1] to an int [0,255] */ function percentToInt(n: number): number { if (!n) return 0; return linearTransformFactory(0, 1, 0, 255)(n); } /** * RGB/RGBA -> String */ function makeRgbString(color: ColorObject): string { const keys = ['func', 'r', 'g', 'b', 'alpha', 'format']; let props = []; if (color.func === 'rgb') { props = getValue('rgb', keys, color); } if (color.func === 'rgba') { props = getValue('rgba', keys, color); } const [func, r, g, b, alpha, format] = props; let rgb: Array<any> = [r, g, b]; if (hasIntToPercent(format)) { rgb = rgb.map(floatToPercent); // [0, 1] -> n% } else { rgb = rgb.map(percentToInt); // [0, 1] -> [0, 255] } const [nr, ng, nb] = rgb; if (hasAlpha(format) || func === 'rgba') { return `rgba(${nr}, ${ng}, ${nb}, ${alpha})`; } return `rgb(${nr}, ${ng}, ${nb})`; } /** * HSL/HSLA -> String */ function makeHslString(color: ColorObject): string { const keys = ['func', 'h', 's', 'l', 'alpha', 'format']; let props = []; let out = ""; if (color.func === 'hsl') { props = getValue('hsl', keys, color); } if (color.func === 'hsla') { props = getValue('hsla', keys, color); } const [func, h, s, l, alpha, format] = props; let hsl: Array<any> = [ Math.round(h * 360), `${Math.round(s * 100)}%`, `${Math.round(l * 100)}%` ]; if (hasHueToDegress(format)) { hsl[0] = `${hsl[0]}deg`; } if (hasAlpha(format) || func === 'hsla') { out = `hsla(${hsl.join(', ')}, ${alpha})`; } else { out = `hsl(${hsl.join(', ')})`; } return out; } /** * HEX -> String */ function makeHexString(color: ColorObject): string { const keys = ['func', 'hex', 'r', 'g', 'b', 'alpha', 'format']; let props = []; if (color.func === 'hex') { props = getValue('hex', keys, color); } const [, hex, r, g, b, alpha, format] = props; let out = hex; // operate on a copy let pairs = []; // store modified channels // No changes to format and 6/8 long then short circuit using // the stored hex string. ALSO, account for the '#'!! if (!format.length && hex.length >= 7) { if (hex.length === 7) { return out; } // Lop off alpha hex if (hex.length === 9) { return out.substr(0, 7); } } // If we're not shortening and the hex string is short, // then expand it back out (ex: parse(#039f)) if (!hasShortHex(format) && hex.length <= 7) { if (hasAlpha(format)) { pairs = [r, g, b, alpha].map(toHex); out = "#" + pairs.join(''); } else { pairs = [r, g, b].map(toHex); out = "#" + pairs.join(''); } } // We can only shorten if all pairs are doubles if (hasShortHex(format)) {<|fim▁hole|> } const hasDoubles = pairs.map(checkDoubles).every(x => x); if (hasDoubles) { out = "#" + pairs.map(splitDoubles).join(''); } else { out = "#" + pairs.join(''); // no alpha and no doubles so just 6 } } return out; } function makeString(result: ColorObject): string { const {func} = result; switch (func) { case 'hex': return makeHexString(result); case 'hsl': case 'hsla': return makeHslString(result); case 'rgb': case 'rgba': return makeRgbString(result); default: return '#error - could not output string for this color object'; } }; export { makeString };<|fim▁end|>
if (hasAlpha(format)) { pairs = [r, g, b, alpha].map(toHex); } else { pairs = [r, g, b].map(toHex);
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url from . import views <|fim▁hole|> url(r'^candidate/$', views.candidate), ]<|fim▁end|>
urlpatterns = [ url(r'^$', views.home), url(r'^interviewer/$', views.interviewer),
<|file_name|>ContainerConstants.java<|end_file_name|><|fim▁begin|>package org.vitanov.container; public class ContainerConstants { <|fim▁hole|> System.getProperty("user.dir"); }<|fim▁end|>
public static String CONTAINER_ROOT_PATH =
<|file_name|>files_70.js<|end_file_name|><|fim▁begin|>var searchData= [ ['parser_2ecpp',['Parser.cpp',['../_parser_8cpp.html',1,'']]], ['parser_2eh',['Parser.h',['../_parser_8h.html',1,'']]], ['parsercollision_2ecpp',['ParserCollision.cpp',['../_parser_collision_8cpp.html',1,'']]], ['parsercollision_2eh',['ParserCollision.h',['../_parser_collision_8h.html',1,'']]], ['pausemenu_2ecpp',['PauseMenu.cpp',['../_pause_menu_8cpp.html',1,'']]], ['pausemenu_2eh',['PauseMenu.h',['../_pause_menu_8h.html',1,'']]], ['physicalentity_2ecpp',['PhysicalEntity.cpp',['../_physical_entity_8cpp.html',1,'']]], ['physicalentity_2eh',['PhysicalEntity.h',['../_physical_entity_8h.html',1,'']]], ['physicalworld_2ecpp',['PhysicalWorld.cpp',['../_physical_world_8cpp.html',1,'']]],<|fim▁hole|> ['player_2ecpp',['Player.cpp',['../_player_8cpp.html',1,'']]], ['player_2eh',['Player.h',['../_player_8h.html',1,'']]], ['power_2ecpp',['Power.cpp',['../_power_8cpp.html',1,'']]], ['power_2eh',['Power.h',['../_power_8h.html',1,'']]] ];<|fim▁end|>
['physicalworld_2eh',['PhysicalWorld.h',['../_physical_world_8h.html',1,'']]],
<|file_name|>usernav.component.ts<|end_file_name|><|fim▁begin|>import { Component, OnInit, Input } from '@angular/core'; import { Router, ActivatedRoute, Params } from '@angular/router';<|fim▁hole|>import { Auth } from '../.././shared-services/authorization/auth.service'; @Component({ selector: 'user-top-nav', templateUrl: './usernav.component.html', styleUrls: ['./usernav.component.scss'], providers: [Auth] }) export class UserNavComponent implements OnInit{ @Input() loggedInUser: any; constructor(private auth: Auth, private route: ActivatedRoute, private router: Router) { // Do stuff } ngOnInit() { if (this.auth.authenticated() === true) { // console.log(localStorage.getItem('id_token')); // this.route.params.subscribe((response) => { // console.log('data received ', response); // }); // this.loggedInUser = this.auth.getProfile(localStorage.getItem('id_token')); // this.auth.getProfileObservable(localStorage.getItem('id_token')).subscribe( // function (x) { console.log('next', x); this.loggedInUser = x; // console.log('this.loggedInUser in UserNavComponent', this.loggedInUser); }, // function (err) { console.log('error', err); }, // function () { console.log('done'); } // ); console.log('this.loggedInUser in UserNavComponent', this.loggedInUser); } } }<|fim▁end|>
<|file_name|>ReactMultiChild.js<|end_file_name|><|fim▁begin|>/** * Copyright 2013-2015, Facebook, Inc. * All rights reserved. * * This source code is licensed under the BSD-style license found in the * LICENSE file in the root directory of this source tree. An additional grant * of patent rights can be found in the PATENTS file in the same directory. * * @providesModule ReactMultiChild * @typechecks static-only */ 'use strict'; var ReactComponentEnvironment = require("./ReactComponentEnvironment"); var ReactMultiChildUpdateTypes = require("./ReactMultiChildUpdateTypes"); var ReactReconciler = require("./ReactReconciler"); var ReactChildReconciler = require("./ReactChildReconciler"); /** * Updating children of a component may trigger recursive updates. The depth is * used to batch recursive updates to render markup more efficiently. * * @type {number} * @private */ var updateDepth = 0; /** * Queue of update configuration objects. * * Each object has a `type` property that is in `ReactMultiChildUpdateTypes`. * * @type {array<object>} * @private */ var updateQueue = []; /** * Queue of markup to be rendered. * * @type {array<string>} * @private */ var markupQueue = []; /** * Enqueues markup to be rendered and inserted at a supplied index. * * @param {string} parentID ID of the parent component. * @param {string} markup Markup that renders into an element. * @param {number} toIndex Destination index. * @private */ function enqueueMarkup(parentID, markup, toIndex) { // NOTE: Null values reduce hidden classes.<|fim▁hole|> parentID: parentID, parentNode: null, type: ReactMultiChildUpdateTypes.INSERT_MARKUP, markupIndex: markupQueue.push(markup) - 1, textContent: null, fromIndex: null, toIndex: toIndex }); } /** * Enqueues moving an existing element to another index. * * @param {string} parentID ID of the parent component. * @param {number} fromIndex Source index of the existing element. * @param {number} toIndex Destination index of the element. * @private */ function enqueueMove(parentID, fromIndex, toIndex) { // NOTE: Null values reduce hidden classes. updateQueue.push({ parentID: parentID, parentNode: null, type: ReactMultiChildUpdateTypes.MOVE_EXISTING, markupIndex: null, textContent: null, fromIndex: fromIndex, toIndex: toIndex }); } /** * Enqueues removing an element at an index. * * @param {string} parentID ID of the parent component. * @param {number} fromIndex Index of the element to remove. * @private */ function enqueueRemove(parentID, fromIndex) { // NOTE: Null values reduce hidden classes. updateQueue.push({ parentID: parentID, parentNode: null, type: ReactMultiChildUpdateTypes.REMOVE_NODE, markupIndex: null, textContent: null, fromIndex: fromIndex, toIndex: null }); } /** * Enqueues setting the text content. * * @param {string} parentID ID of the parent component. * @param {string} textContent Text content to set. * @private */ function enqueueTextContent(parentID, textContent) { // NOTE: Null values reduce hidden classes. updateQueue.push({ parentID: parentID, parentNode: null, type: ReactMultiChildUpdateTypes.TEXT_CONTENT, markupIndex: null, textContent: textContent, fromIndex: null, toIndex: null }); } /** * Processes any enqueued updates. * * @private */ function processQueue() { if (updateQueue.length) { ReactComponentEnvironment.processChildrenUpdates( updateQueue, markupQueue ); clearQueue(); } } /** * Clears any enqueued updates. * * @private */ function clearQueue() { updateQueue.length = 0; markupQueue.length = 0; } /** * ReactMultiChild are capable of reconciling multiple children. * * @class ReactMultiChild * @internal */ var ReactMultiChild = { /** * Provides common functionality for components that must reconcile multiple * children. This is used by `ReactDOMComponent` to mount, update, and * unmount child components. * * @lends {ReactMultiChild.prototype} */ Mixin: { /** * Generates a "mount image" for each of the supplied children. In the case * of `ReactDOMComponent`, a mount image is a string of markup. * * @param {?object} nestedChildren Nested child maps. * @return {array} An array of mounted representations. * @internal */ mountChildren: function(nestedChildren, transaction, context) { var children = ReactChildReconciler.instantiateChildren( nestedChildren, transaction, context ); this._renderedChildren = children; var mountImages = []; var index = 0; for (var name in children) { if (children.hasOwnProperty(name)) { var child = children[name]; // Inlined for performance, see `ReactInstanceHandles.createReactID`. var rootID = this._rootNodeID + name; var mountImage = ReactReconciler.mountComponent( child, rootID, transaction, context ); child._mountIndex = index; mountImages.push(mountImage); index++; } } return mountImages; }, /** * Replaces any rendered children with a text content string. * * @param {string} nextContent String of content. * @internal */ updateTextContent: function(nextContent) { updateDepth++; var errorThrown = true; try { var prevChildren = this._renderedChildren; // Remove any rendered children. ReactChildReconciler.unmountChildren(prevChildren); // TODO: The setTextContent operation should be enough for (var name in prevChildren) { if (prevChildren.hasOwnProperty(name)) { this._unmountChildByName(prevChildren[name], name); } } // Set new text content. this.setTextContent(nextContent); errorThrown = false; } finally { updateDepth--; if (!updateDepth) { if (errorThrown) { clearQueue(); } else { processQueue(); } } } }, /** * Updates the rendered children with new children. * * @param {?object} nextNestedChildren Nested child maps. * @param {ReactReconcileTransaction} transaction * @internal */ updateChildren: function(nextNestedChildren, transaction, context) { updateDepth++; var errorThrown = true; try { this._updateChildren(nextNestedChildren, transaction, context); errorThrown = false; } finally { updateDepth--; if (!updateDepth) { if (errorThrown) { clearQueue(); } else { processQueue(); } } } }, /** * Improve performance by isolating this hot code path from the try/catch * block in `updateChildren`. * * @param {?object} nextNestedChildren Nested child maps. * @param {ReactReconcileTransaction} transaction * @final * @protected */ _updateChildren: function(nextNestedChildren, transaction, context) { var prevChildren = this._renderedChildren; var nextChildren = ReactChildReconciler.updateChildren( prevChildren, nextNestedChildren, transaction, context ); this._renderedChildren = nextChildren; if (!nextChildren && !prevChildren) { return; } var name; // `nextIndex` will increment for each child in `nextChildren`, but // `lastIndex` will be the last index visited in `prevChildren`. var lastIndex = 0; var nextIndex = 0; for (name in nextChildren) { if (!nextChildren.hasOwnProperty(name)) { continue; } var prevChild = prevChildren && prevChildren[name]; var nextChild = nextChildren[name]; if (prevChild === nextChild) { this.moveChild(prevChild, nextIndex, lastIndex); lastIndex = Math.max(prevChild._mountIndex, lastIndex); prevChild._mountIndex = nextIndex; } else { if (prevChild) { // Update `lastIndex` before `_mountIndex` gets unset by unmounting. lastIndex = Math.max(prevChild._mountIndex, lastIndex); this._unmountChildByName(prevChild, name); } // The child must be instantiated before it's mounted. this._mountChildByNameAtIndex( nextChild, name, nextIndex, transaction, context ); } nextIndex++; } // Remove children that are no longer present. for (name in prevChildren) { if (prevChildren.hasOwnProperty(name) && !(nextChildren && nextChildren.hasOwnProperty(name))) { this._unmountChildByName(prevChildren[name], name); } } }, /** * Unmounts all rendered children. This should be used to clean up children * when this component is unmounted. * * @internal */ unmountChildren: function() { var renderedChildren = this._renderedChildren; ReactChildReconciler.unmountChildren(renderedChildren); this._renderedChildren = null; }, /** * Moves a child component to the supplied index. * * @param {ReactComponent} child Component to move. * @param {number} toIndex Destination index of the element. * @param {number} lastIndex Last index visited of the siblings of `child`. * @protected */ moveChild: function(child, toIndex, lastIndex) { // If the index of `child` is less than `lastIndex`, then it needs to // be moved. Otherwise, we do not need to move it because a child will be // inserted or moved before `child`. if (child._mountIndex < lastIndex) { enqueueMove(this._rootNodeID, child._mountIndex, toIndex); } }, /** * Creates a child component. * * @param {ReactComponent} child Component to create. * @param {string} mountImage Markup to insert. * @protected */ createChild: function(child, mountImage) { enqueueMarkup(this._rootNodeID, mountImage, child._mountIndex); }, /** * Removes a child component. * * @param {ReactComponent} child Child to remove. * @protected */ removeChild: function(child) { enqueueRemove(this._rootNodeID, child._mountIndex); }, /** * Sets this text content string. * * @param {string} textContent Text content to set. * @protected */ setTextContent: function(textContent) { enqueueTextContent(this._rootNodeID, textContent); }, /** * Mounts a child with the supplied name. * * NOTE: This is part of `updateChildren` and is here for readability. * * @param {ReactComponent} child Component to mount. * @param {string} name Name of the child. * @param {number} index Index at which to insert the child. * @param {ReactReconcileTransaction} transaction * @private */ _mountChildByNameAtIndex: function( child, name, index, transaction, context) { // Inlined for performance, see `ReactInstanceHandles.createReactID`. var rootID = this._rootNodeID + name; var mountImage = ReactReconciler.mountComponent( child, rootID, transaction, context ); child._mountIndex = index; this.createChild(child, mountImage); }, /** * Unmounts a rendered child by name. * * NOTE: This is part of `updateChildren` and is here for readability. * * @param {ReactComponent} child Component to unmount. * @param {string} name Name of the child in `this._renderedChildren`. * @private */ _unmountChildByName: function(child, name) { this.removeChild(child); child._mountIndex = null; } } }; module.exports = ReactMultiChild;<|fim▁end|>
updateQueue.push({
<|file_name|>util.js<|end_file_name|><|fim▁begin|>'use strict'; var assert = require('assert'); var fixtures = require('../fixtures'); var sharp = require('../../index'); var defaultConcurrency = sharp.concurrency(); describe('Utilities', function() { describe('Cache', function() { it('Can be disabled', function() { var cache = sharp.cache(0, 0); assert.strictEqual(0, cache.memory); assert.strictEqual(0, cache.items); }); it('Can be set to a maximum of 50MB and 500 items', function() { var cache = sharp.cache(50, 500); assert.strictEqual(50, cache.memory); assert.strictEqual(500, cache.items); }); it('Ignores invalid values', function() { sharp.cache(50, 500); var cache = sharp.cache('spoons'); assert.strictEqual(50, cache.memory); assert.strictEqual(500, cache.items); }); }); <|fim▁hole|> assert.strictEqual(16, sharp.concurrency()); }); it('Can be reset to default', function() { sharp.concurrency(0); assert.strictEqual(defaultConcurrency, sharp.concurrency()); }); it('Ignores invalid values', function() { sharp.concurrency(0); sharp.concurrency('spoons'); assert.strictEqual(defaultConcurrency, sharp.concurrency()); }); }); describe('Counters', function() { it('Have zero value at rest', function() { var counters = sharp.counters(); assert.strictEqual(0, counters.queue); assert.strictEqual(0, counters.process); }); }); describe('Format', function() { it('Contains expected attributes', function() { assert.strictEqual('object', typeof sharp.format); Object.keys(sharp.format).forEach(function(format) { assert.strictEqual(true, 'id' in sharp.format[format]); assert.strictEqual(format, sharp.format[format].id); ['input', 'output'].forEach(function(direction) { assert.strictEqual(true, direction in sharp.format[format]); assert.strictEqual('object', typeof sharp.format[format][direction]); assert.strictEqual(3, Object.keys(sharp.format[format][direction]).length); assert.strictEqual(true, 'file' in sharp.format[format][direction]); assert.strictEqual(true, 'buffer' in sharp.format[format][direction]); assert.strictEqual(true, 'stream' in sharp.format[format][direction]); assert.strictEqual('boolean', typeof sharp.format[format][direction].file); assert.strictEqual('boolean', typeof sharp.format[format][direction].buffer); assert.strictEqual('boolean', typeof sharp.format[format][direction].stream); }); }); }); }); });<|fim▁end|>
describe('Concurrency', function() { it('Can be set to use 16 threads', function() { sharp.concurrency(16);
<|file_name|>vectors.rs<|end_file_name|><|fim▁begin|>// Lumol, an extensible molecular simulation engine // Copyright (C) 2015-2016 Lumol's contributors — BSD license //! 3-dimensional vector type use std::ops::{Add, Sub, Neg, Mul, Div, BitXor, Index, IndexMut}; use std::ops::{AddAssign, SubAssign, MulAssign, DivAssign}; use std::cmp::PartialEq; use types::{Matrix3, Zero}; /// A 3-dimensional vector type /// /// A `Vector3D` implement all the arithmetic operations: /// /// ``` /// # use lumol::types::Vector3D; /// let u = Vector3D::new(1.0, 2.0, 3.0); /// let v = Vector3D::new(4.0, -2.0, 1.0); /// /// // Indexing /// assert_eq!(u[0], 1.0); /// assert_eq!(u[1], 2.0); /// assert_eq!(u[2], 3.0); /// /// // Addition /// let w = u + v; /// assert_eq!(w, Vector3D::new(5.0, 0.0, 4.0)); /// /// // Subtraction /// let w = u - v; /// assert_eq!(w, Vector3D::new(-3.0, 4.0, 2.0)); /// /// // Negation /// let w = -u; /// assert_eq!(w, Vector3D::new(-1.0, -2.0, -3.0)); /// /// // Cross product /// let w = u ^ v; /// assert_eq!(w, Vector3D::new(8.0, 11.0, -10.0)); /// /// // Multiplication /// let w = 2.0 * u; /// assert_eq!(w, Vector3D::new(2.0, 4.0, 6.0)); /// /// let w = u * 3.0; /// assert_eq!(w, Vector3D::new(3.0, 6.0, 9.0)); /// /// // Division /// let w = u / 2.0; /// assert_eq!(w, Vector3D::new(0.5, 1.0, 1.5)); /// /// // Dot product /// let a = u * v; /// assert_eq!(a, 3.0); /// ``` #[derive(Copy, Clone, Debug)] pub struct Vector3D([f64; 3]); impl Vector3D { /// Create a new `Vector3D` with components `x`, `y`, `z` /// /// # Examples /// ``` /// # use lumol::types::Vector3D; /// let vec = Vector3D::new(1.0, 0.0, -42.0); /// ``` pub fn new(x: f64, y: f64, z: f64) -> Vector3D { Vector3D([x, y, z]) } /// Return the squared euclidean norm of a `Vector3D` /// /// # Examples /// ``` /// # use lumol::types::Vector3D; /// let vec = Vector3D::new(1.0, 0.0, -4.0); /// assert_eq!(vec.norm2(), 17.0); /// ``` #[inline] pub fn norm2(&self) -> f64 { self * self } /// Return the euclidean norm of a `Vector3D` /// # Examples /// ``` /// # use lumol::types::Vector3D; /// # use std::f64; /// let vec = Vector3D::new(1.0, 0.0, -4.0); /// assert_eq!(vec.norm(), f64::sqrt(17.0)); /// ``` #[inline] pub fn norm(&self) -> f64 { f64::sqrt(self.norm2()) } /// Normalize a `Vector3D`. /// # Examples /// ``` /// # use lumol::types::Vector3D; /// let vec = Vector3D::new(1.0, 0.0, -4.0); /// let n = vec.normalized(); /// assert_eq!(n.norm(), 1.0); /// ``` #[inline] pub fn normalized(&self) -> Vector3D { self / self.norm() } /// Tensorial product between vectors. The tensorial product between the /// vectors `a` and `b` creates a `Matrix3` with component (i, j) equals to /// `a[i] * b[j]`. /// /// # Examples /// /// ``` /// # use lumol::types::Vector3D; /// # use lumol::types::Matrix3; /// let a = Vector3D::new(1.0, 0.0, -4.0); /// let b = Vector3D::new(1.0, 2.0, 3.0); /// let matrix = Matrix3::new( /// 1.0, 2.0, 3.0, /// 0.0, 0.0, 0.0, /// -4.0, -8.0, -12.0 /// ); /// assert_eq!(a.tensorial(&b), matrix); /// ``` pub fn tensorial(&self, other: &Vector3D) -> Matrix3 { Matrix3::new(self[0] * other[0], self[0] * other[1], self[0] * other[2], self[1] * other[0], self[1] * other[1], self[1] * other[2], self[2] * other[0], self[2] * other[1], self[2] * other[2]) } } impl_arithmetic!( Vector3D, Vector3D, Add, add, Vector3D, self, other, Vector3D::new(self[0] + other[0], self[1] + other[1], self[2] + other[2]) ); impl_inplace_arithmetic!( Vector3D, Vector3D, AddAssign, add_assign, self, other, {self[0] += other[0]; self[1] += other[1]; self[2] += other[2]} ); impl_arithmetic!( Vector3D, Vector3D, Sub, sub, Vector3D, self, other,<|fim▁hole|> impl_inplace_arithmetic!( Vector3D, Vector3D, SubAssign, sub_assign, self, other, {self[0] -= other[0]; self[1] -= other[1]; self[2] -= other[2]} ); // Dot product impl_arithmetic!( Vector3D, Vector3D, Mul, mul, f64, self, other, self[0] * other[0] + self[1] * other[1] + self[2] * other[2] ); // Cross product impl_arithmetic!( Vector3D, Vector3D, BitXor, bitxor, Vector3D, self, other, {let x = self[1] * other[2] - self[2] * other[1]; let y = self[2] * other[0] - self[0] * other[2]; let z = self[0] * other[1] - self[1] * other[0]; Vector3D::new(x, y, z)} ); /******************************************************************************/ lsh_scal_arithmetic!( Vector3D, Mul, mul, Vector3D, self, other, Vector3D::new(self[0] * other, self[1] * other, self[2] * other) ); rhs_scal_arithmetic!( Vector3D, Mul, mul, Vector3D, self, other, Vector3D::new(self * other[0], self * other[1], self * other[2]) ); impl_inplace_arithmetic!( Vector3D, f64, MulAssign, mul_assign, self, other, {let other = other.clone(); self[0] *= other; self[1] *= other; self[2] *= other} ); lsh_scal_arithmetic!( Vector3D, Div, div, Vector3D, self, other, Vector3D::new(self[0] / other, self[1] / other, self[2] / other) ); impl_inplace_arithmetic!( Vector3D, f64, DivAssign, div_assign, self, other, {let other = other.clone(); self[0] /= other; self[1] /= other; self[2] /= other} ); /******************************************************************************/ impl Neg for Vector3D { type Output = Vector3D; #[inline] fn neg(self) -> Vector3D { Vector3D::new(-self[0], -self[1], -self[2]) } } impl<'a> Neg for &'a Vector3D { type Output = Vector3D; #[inline] fn neg(self) -> Vector3D { Vector3D::new(-self[0], -self[1], -self[2]) } } impl<'a> Neg for &'a mut Vector3D { type Output = Vector3D; #[inline] fn neg(self) -> Vector3D { Vector3D::new(-self[0], -self[1], -self[2]) } } /******************************************************************************/ /// Comparing two vectors impl PartialEq for Vector3D { #[inline] fn eq(&self, other: &Vector3D) -> bool { self[0] == other[0] && self[1] == other[1] && self[2] == other[2] } } impl Index<usize> for Vector3D { type Output = f64; #[inline] fn index(&self, index: usize) -> &f64 { &self.0[index] } } impl IndexMut<usize> for Vector3D { #[inline] fn index_mut(&mut self, index: usize) -> &mut f64 { &mut self.0[index] } } impl Zero for Vector3D { fn zero() -> Vector3D { Vector3D::new(0.0, 0.0, 0.0) } fn is_zero(&self) -> bool { self.norm2() == 0.0 } } /******************************************************************************/ #[cfg(test)] mod tests { use types::{Vector3D, Matrix3}; use approx::ApproxEq; impl ApproxEq for Vector3D { type Epsilon = <f64 as ApproxEq>::Epsilon; fn default_epsilon() -> Self::Epsilon { f64::default_epsilon() } fn default_max_relative() -> Self::Epsilon { f64::default_max_relative() } fn default_max_ulps() -> u32 { f64::default_max_ulps() } fn relative_eq(&self, other: &Self, epsilon: Self::Epsilon, max_relative: Self::Epsilon) -> bool { f64::relative_eq(&self[0], &other[0], epsilon, max_relative) && f64::relative_eq(&self[1], &other[1], epsilon, max_relative) && f64::relative_eq(&self[2], &other[2], epsilon, max_relative) } fn ulps_eq(&self, other: &Self, epsilon: Self::Epsilon, max_ulps: u32) -> bool { f64::ulps_eq(&self[0], &other[0], epsilon, max_ulps) && f64::ulps_eq(&self[1], &other[1], epsilon, max_ulps) && f64::ulps_eq(&self[2], &other[2], epsilon, max_ulps) } } #[test] fn add() { let mut a = Vector3D::new(2.0, 3.5, 4.8); let mut b = Vector3D::new(6.1, -8.5, 7.3); let c = a + b; assert_eq!(c, Vector3D::new(8.1, -5.0, 12.1)); a += b; assert_eq!(a, Vector3D::new(8.1, -5.0, 12.1)); // Just checking that everything compile let _ = &a + b; let _ = a + &b; let _ = &a + &b; let _ = a + &mut b; let _ = &mut a + b; let _ = &mut a + &mut b; let _ = &mut a + &b; let _ = &a + &mut b; a += &b; a += &mut b; let _ = a; } #[test] fn sub() { let mut a = Vector3D::new(2.0, 3.5, 4.8); let mut b = Vector3D::new(6.1, -8.5, 7.3); let c = a - b; assert_eq!(c, Vector3D::new(-4.1, 12.0, -2.5)); a -= b; assert_eq!(a, Vector3D::new(-4.1, 12.0, -2.5)); // Just checking that everything compile let _ = &a - b; let _ = a - &b; let _ = &a - &b; let _ = a - &mut b; let _ = &mut a - b; let _ = &mut a - &mut b; let _ = &mut a - &b; let _ = &a - &mut b; a -= &b; a -= &mut b; let _ = a; } #[test] fn neg() { let mut a = Vector3D::new(6.1, -8.5, 7.3); let b = -a; assert_eq!(b, Vector3D::new(-6.1, 8.5, -7.3)); let _ = -&a; let _ = -&mut a; } #[test] fn mul() { let mut a = Vector3D::new(2.0, 3.5, 4.8); let b = 2.0; let c = b * a; assert_eq!(c, Vector3D::new(4.0, 7.0, 9.6)); let _ = b * &a; let _ = b * &mut a; let mut b = 1.5; let c = a * b; assert_eq!(c, Vector3D::new(3.0, 5.25, 7.199999999999999)); a *= b; assert_eq!(a, Vector3D::new(3.0, 5.25, 7.199999999999999)); // Just checking that everything compile let _ = &a * b; let _ = &mut a * b; a *= &b; a *= &mut b; let _ = a; } #[test] fn div() { let mut a = Vector3D::new(2.0, 3.5, 4.8); let mut b = 2.0; let c = a / b; assert_eq!(c, Vector3D::new(1.0, 1.75, 2.4)); a /= b; assert_eq!(a, Vector3D::new(1.0, 1.75, 2.4)); // Just checking that everything compile let _ = &a / b; let _ = &mut a / b; a /= &b; a /= &mut b; let _ = a; } #[test] fn dot_product() { let mut a = Vector3D::new(2.1, 3.5, 4.8); let mut b = Vector3D::new(6.1, -8.5, 7.3); let c = a * b; assert_eq!(c, 18.1); // Just checking that everything compile let _ = &a * b; let _ = a * &b; let _ = &a * &b; let _ = a * &mut b; let _ = &mut a * b; let _ = &mut a * &mut b; let _ = &mut a * &b; let _ = &a * &mut b; } #[test] fn cross_product() { let a = Vector3D::new(2.1, 3.5, 4.8); let b = Vector3D::new(6.1, -8.5, 7.3); let c = a ^ b; assert_eq!(c*a, 0.0); let mut a = Vector3D::new(1.0, 0.0, 0.0); let mut b = Vector3D::new(0.0, 1.0, 0.0); let c = a ^ b; assert_eq!(c, Vector3D::new(0.0, 0.0, 1.0)); // Just checking that everything compile let _ = &a ^ b; let _ = a ^ &b; let _ = &a ^ &b; let _ = a ^ &mut b; let _ = &mut a ^ b; let _ = &mut a ^ &mut b; let _ = &mut a ^ &b; let _ = &a ^ &mut b; } #[test] fn index() { let mut a = Vector3D::new(2.1, 3.5, 4.8); assert_eq!(a[0], a[0]); assert_eq!(a[1], a[1]); assert_eq!(a[2], a[2]); a[0] = 1.0; a[1] = 1.0; a[2] = 1.0; assert_eq!(a[0], 1.0); assert_eq!(a[1], 1.0); assert_eq!(a[2], 1.0); } #[test] fn tensorial() { let a = Vector3D::new(1.0, 0.0, -4.0); let b = Vector3D::new(1.0, 2.0, 3.0); let matrix = Matrix3::new( 1.0, 2.0, 3.0, 0.0, 0.0, 0.0, -4.0, -8.0, -12.0 ); assert_eq!(a.tensorial(&b), matrix); assert_eq!(b.tensorial(&a), matrix.transposed()); } #[test] #[should_panic] fn index_out_of_bounds() { let mut a = Vector3D::new(2.1, 3.5, 4.8); a[3] += 4.0; } }<|fim▁end|>
Vector3D::new(self[0] - other[0], self[1] - other[1], self[2] - other[2]) );
<|file_name|>provider_test.go<|end_file_name|><|fim▁begin|>package provider import ( "io/ioutil" "os" "strings" "testing" "text/template" "github.com/containous/traefik/types" ) type myProvider struct { BaseProvider TLS *ClientTLS } func (p *myProvider) Foo() string { return "bar" } func TestConfigurationErrors(t *testing.T) { templateErrorFile, err := ioutil.TempFile("", "provider-configuration-error") if err != nil { t.Fatal(err) } defer os.RemoveAll(templateErrorFile.Name()) data := []byte("Not a valid template {{ Bar }}") err = ioutil.WriteFile(templateErrorFile.Name(), data, 0700) if err != nil { t.Fatal(err) } templateInvalidTOMLFile, err := ioutil.TempFile("", "provider-configuration-error") if err != nil { t.Fatal(err) } defer os.RemoveAll(templateInvalidTOMLFile.Name()) data = []byte(`Hello {{ .Name }} {{ Foo }}`) err = ioutil.WriteFile(templateInvalidTOMLFile.Name(), data, 0700) if err != nil { t.Fatal(err) } invalids := []struct { provider *myProvider defaultTemplate string expectedError string funcMap template.FuncMap templateObjects interface{} }{ { provider: &myProvider{ BaseProvider{ Filename: "/non/existent/template.tmpl", }, nil, }, expectedError: "open /non/existent/template.tmpl: no such file or directory", }, { provider: &myProvider{}, defaultTemplate: "non/existent/template.tmpl", expectedError: "Asset non/existent/template.tmpl not found", }, { provider: &myProvider{ BaseProvider{ Filename: templateErrorFile.Name(), }, nil, }, expectedError: `function "Bar" not defined`, }, { provider: &myProvider{ BaseProvider{ Filename: templateInvalidTOMLFile.Name(), },<|fim▁hole|> nil, }, expectedError: "Near line 1 (last key parsed 'Hello'): Expected key separator '=', but got '<' instead", funcMap: template.FuncMap{ "Foo": func() string { return "bar" }, }, templateObjects: struct{ Name string }{Name: "bar"}, }, } for _, invalid := range invalids { configuration, err := invalid.provider.getConfiguration(invalid.defaultTemplate, invalid.funcMap, nil) if err == nil || !strings.Contains(err.Error(), invalid.expectedError) { t.Fatalf("should have generate an error with %q, got %v", invalid.expectedError, err) } if configuration != nil { t.Fatalf("shouldn't have return a configuration object : %v", configuration) } } } func TestGetConfiguration(t *testing.T) { templateFile, err := ioutil.TempFile("", "provider-configuration") if err != nil { t.Fatal(err) } defer os.RemoveAll(templateFile.Name()) data := []byte(`[backends] [backends.backend1] [backends.backend1.circuitbreaker] expression = "NetworkErrorRatio() > 0.5" [backends.backend1.servers.server1] url = "http://172.17.0.2:80" weight = 10 [backends.backend1.servers.server2] url = "http://172.17.0.3:80" weight = 1 [frontends] [frontends.frontend1] backend = "backend1" passHostHeader = true [frontends.frontend11.routes.test_2] rule = "Path" value = "/test"`) err = ioutil.WriteFile(templateFile.Name(), data, 0700) if err != nil { t.Fatal(err) } provider := &myProvider{ BaseProvider{ Filename: templateFile.Name(), }, nil, } configuration, err := provider.getConfiguration(templateFile.Name(), nil, nil) if err != nil { t.Fatalf("Shouldn't have error out, got %v", err) } if configuration == nil { t.Fatalf("Configuration should not be nil, but was") } } func TestReplace(t *testing.T) { cases := []struct { str string expected string }{ { str: "", expected: "", }, { str: "foo", expected: "bar", }, { str: "foo foo", expected: "bar bar", }, { str: "somethingfoo", expected: "somethingbar", }, } for _, c := range cases { actual := replace("foo", "bar", c.str) if actual != c.expected { t.Fatalf("expected %q, got %q, for %q", c.expected, actual, c.str) } } } func TestGetConfigurationReturnsCorrectMaxConnConfiguration(t *testing.T) { templateFile, err := ioutil.TempFile("", "provider-configuration") if err != nil { t.Fatal(err) } defer os.RemoveAll(templateFile.Name()) data := []byte(`[backends] [backends.backend1] [backends.backend1.maxconn] amount = 10 extractorFunc = "request.host"`) err = ioutil.WriteFile(templateFile.Name(), data, 0700) if err != nil { t.Fatal(err) } provider := &myProvider{ BaseProvider{ Filename: templateFile.Name(), }, nil, } configuration, err := provider.getConfiguration(templateFile.Name(), nil, nil) if err != nil { t.Fatalf("Shouldn't have error out, got %v", err) } if configuration == nil { t.Fatalf("Configuration should not be nil, but was") } if configuration.Backends["backend1"].MaxConn.Amount != 10 { t.Fatalf("Configuration did not parse MaxConn.Amount properly") } if configuration.Backends["backend1"].MaxConn.ExtractorFunc != "request.host" { t.Fatalf("Configuration did not parse MaxConn.ExtractorFunc properly") } } func TestNilClientTLS(t *testing.T) { provider := &myProvider{ BaseProvider{ Filename: "", }, nil, } _, err := provider.TLS.CreateTLSConfig() if err != nil { t.Fatalf("CreateTLSConfig should assume that consumer does not want a TLS configuration if input is nil") } } func TestMatchingConstraints(t *testing.T) { cases := []struct { constraints []types.Constraint tags []string expected bool }{ // simple test: must match { constraints: []types.Constraint{ { Key: "tag", MustMatch: true, Regex: "us-east-1", }, }, tags: []string{ "us-east-1", }, expected: true, }, // simple test: must match but does not match { constraints: []types.Constraint{ { Key: "tag", MustMatch: true, Regex: "us-east-1", }, }, tags: []string{ "us-east-2", }, expected: false, }, // simple test: must not match { constraints: []types.Constraint{ { Key: "tag", MustMatch: false, Regex: "us-east-1", }, }, tags: []string{ "us-east-1", }, expected: false, }, // complex test: globbing { constraints: []types.Constraint{ { Key: "tag", MustMatch: true, Regex: "us-east-*", }, }, tags: []string{ "us-east-1", }, expected: true, }, // complex test: multiple constraints { constraints: []types.Constraint{ { Key: "tag", MustMatch: true, Regex: "us-east-*", }, { Key: "tag", MustMatch: false, Regex: "api", }, }, tags: []string{ "api", "us-east-1", }, expected: false, }, } for i, c := range cases { provider := myProvider{ BaseProvider{ Constraints: c.constraints, }, nil, } actual, _ := provider.MatchConstraints(c.tags) if actual != c.expected { t.Fatalf("test #%v: expected %t, got %t, for %#v", i, c.expected, actual, c.constraints) } } }<|fim▁end|>
<|file_name|>util.py<|end_file_name|><|fim▁begin|>"""Tests for the nut integration.""" import json from unittest.mock import MagicMock, patch from homeassistant.components.nut.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT, CONF_RESOURCES from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture def _get_mock_pynutclient(list_vars=None, list_ups=None): pynutclient = MagicMock() type(pynutclient).list_ups = MagicMock(return_value=list_ups) type(pynutclient).list_vars = MagicMock(return_value=list_vars)<|fim▁hole|> async def async_init_integration( hass: HomeAssistant, ups_fixture: str, resources: list, add_options: bool = False ) -> MockConfigEntry: """Set up the nexia integration in Home Assistant.""" ups_fixture = f"nut/{ups_fixture}.json" list_vars = json.loads(load_fixture(ups_fixture)) mock_pynut = _get_mock_pynutclient(list_ups={"ups1": "UPS 1"}, list_vars=list_vars) with patch( "homeassistant.components.nut.PyNUTClient", return_value=mock_pynut, ): entry = MockConfigEntry( domain=DOMAIN, data={CONF_HOST: "mock", CONF_PORT: "mock", CONF_RESOURCES: resources}, options={CONF_RESOURCES: resources} if add_options else {}, ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() return entry<|fim▁end|>
return pynutclient
<|file_name|>conf.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # powerschool_apps documentation build configuration file, created by # sphinx-quickstart. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. from __future__ import unicode_literals import os import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.'))<|fim▁hole|># -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = [] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'powerschool_apps' copyright = """2017, Iron County School District""" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '0.1' # The full version, including alpha/beta/rc tags. release = '0.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. # html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. # html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'powerschool_appsdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'powerschool_apps.tex', 'powerschool_apps Documentation', """Iron County School District""", 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'powerschool_apps', 'powerschool_apps Documentation', ["""Iron County School District"""], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'powerschool_apps', 'powerschool_apps Documentation', """Iron County School District""", 'powerschool_apps', """PowerSchool customizations written in Django""", 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote'<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Copyright 2007-2016 The HyperSpy developers # # This file is part of HyperSpy. # # HyperSpy is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # HyperSpy is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the<|fim▁hole|> import logging from hyperspy.io_plugins import (msa, digital_micrograph, fei, mrc, ripple, tiff, semper_unf, blockfile, dens, emd, protochips) io_plugins = [msa, digital_micrograph, fei, mrc, ripple, tiff, semper_unf, blockfile, dens, emd, protochips] _logger = logging.getLogger(__name__) try: from hyperspy.io_plugins import netcdf io_plugins.append(netcdf) except ImportError: pass # NetCDF is obsolate and is only provided for users who have # old EELSLab files. Therefore, we silenly ignore if missing. try: from hyperspy.io_plugins import hdf5 io_plugins.append(hdf5) from hyperspy.io_plugins import emd io_plugins.append(emd) except ImportError: _logger.warning('The HDF5 IO features are not available. ' 'It is highly reccomended to install h5py') try: from hyperspy.io_plugins import image io_plugins.append(image) except ImportError: _logger.info('The Signal2D (PIL) IO features are not available') try: from hyperspy.io_plugins import bcf io_plugins.append(bcf) except ImportError: _logger.warning('The Bruker composite file reader cant be loaded', 'due to lxml library missing. Please install lxml', 'and python bindings, to enable the bcf loader.') default_write_ext = set() for plugin in io_plugins: if plugin.writes: default_write_ext.add( plugin.file_extensions[plugin.default_extension])<|fim▁end|>
# GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
<|file_name|>RandomPermutationChooser.cc<|end_file_name|><|fim▁begin|>// $Id$ /* Copyright (C) 2004-2006 John B. Shumway, Jr. This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. */ #ifdef HAVE_CONFIG_H #include <config.h> #endif #include "RandomPermutationChooser.h" #include "util/RandomNumGenerator.h" #include "util/Permutation.h" RandomPermutationChooser::RandomPermutationChooser(const int nsize) : PermutationChooser(nsize), nsize(nsize) { } bool RandomPermutationChooser::choosePermutation() { for (int ifrom=0; ifrom<nsize;) { int ito = (int)(nsize*RandomNumGenerator::getRand()); if (ito==nsize) ito=nsize-1;<|fim▁hole|> if (jfrom==ifrom) (*permutation)[ifrom++]=ito; if ((*permutation)[jfrom]==ito) break; } } return true; }<|fim▁end|>
for (int jfrom=0; jfrom<=ifrom; ++jfrom) {
<|file_name|>spec.js<|end_file_name|><|fim▁begin|>var doxygen = require("../lib/nodeDoxygen"); var rimraf = require("rimraf"); var exec = require("child_process").execSync; /*describe("Download:", function () { beforeEach(function (done) { rimraf("dist", function (error) { if (error) { throw error; } else { done(); } }); });<|fim▁hole|> it("FTP", function (done) { doxygen.downloadVersion() .then(function () { done(); }, function (error) { done(); done.fail(error); }); }, 360000); it("HTTP", function (done) { doxygen.downloadVersion(null, "http").then(function () { done(); }, function (error) { done(); done.fail(error); }); }, 360000); });*/ describe("Generates the config:", function () { it("From a task, with the default config location", function () { var userOptions = { OUTPUT_DIRECTORY: "testResults/Docs", INPUT: "./", RECURSIVE: "YES", FILE_PATTERNS: ["*.js", "*.md"], EXTENSION_MAPPING: "js=Javascript", GENERATE_LATEX: "NO", EXCLUDE_PATTERNS: ["*/node_modules/*", "*/filters/*"], PROJECT_NAME: "Node-Doxygen", USE_MDFILE_AS_MAINPAGE: "README.md" }; doxygen.createConfig(userOptions); }); it("From a task, with a custom config location", function () { var userOptions = { OUTPUT_DIRECTORY: "testResults/Docs", INPUT: "./", RECURSIVE: "YES", FILE_PATTERNS: ["*.js", "*.md"], EXTENSION_MAPPING: "js=Javascript", GENERATE_LATEX: "NO", EXCLUDE_PATTERNS: ["*/node_modules/*", "*/filters/*"], PROJECT_NAME: "Node-Doxygen", USE_MDFILE_AS_MAINPAGE: "README.md" }; doxygen.createConfig(userOptions, "testResults/config"); }); it("From CLI, with the default config location", function () { var userOptions = { OUTPUT_DIRECTORY: "testResults/Docs", INPUT: "./", RECURSIVE: "YES", FILE_PATTERNS: ["*.js", "*.md"], EXTENSION_MAPPING: "js=Javascript", GENERATE_LATEX: "NO", EXCLUDE_PATTERNS: ["*/node_modules/*", "*/filters/*"], PROJECT_NAME: "Node-Doxygen", USE_MDFILE_AS_MAINPAGE: "README.md" }; exec("node ./bin/nodeDoxygen.js --config --jsonParams=" + JSON.stringify(JSON.stringify(userOptions)), { stdio: ["pipe", process.stdout, "pipe"] }); }); it("From CLI, with a custom config location", function () { var userOptions = { OUTPUT_DIRECTORY: "testResults/Docs", INPUT: "./", RECURSIVE: "YES", FILE_PATTERNS: ["*.js", "*.md"], EXTENSION_MAPPING: "js=Javascript", GENERATE_LATEX: "NO", EXCLUDE_PATTERNS: ["*/node_modules/*", "*/filters/*"], PROJECT_NAME: "Node-Doxygen", USE_MDFILE_AS_MAINPAGE: "README.md" }; exec("node ./bin/nodeDoxygen.js --config --configPath=testResults/config --jsonParams=" + JSON.stringify(JSON.stringify(userOptions)), { stdio: ["pipe", process.stdout, "pipe"] }); }); }); describe("Generates the docs:", function () { beforeAll(function (done) { doxygen.downloadVersion() .then(function () { done(); }, function (error) { throw error; }); }, 360000); beforeEach(function (done) { rimraf("testResults/Docs", function (error) { if (error) { throw error; } else { done(); } }); }); it("From a task, with a custom config location", function () { doxygen.run("testResults/config"); }); it("From a task, with the default config location", function () { doxygen.run(); }); it("From CLI, with a custom config location", function () { exec("node ./bin/nodeDoxygen.js --docs --configPath=testResults/config", { stdio: ["pipe", process.stdout, "pipe"] }); }); it("From CLI, with the default config location", function () { exec("node ./bin/nodeDoxygen.js --docs", { stdio: ["pipe", process.stdout, "pipe"] }); }); });<|fim▁end|>
<|file_name|>filters.js<|end_file_name|><|fim▁begin|>import { put, select, takeEvery } from 'redux-saga/effects' import { getQuotes } from './quotes' import { has, toArray } from 'lodash' import { createAuthor, removeAuthor, updateAuthor } from "./authors"; import { createCategory, removeCategory, updateCategory } from "./categories"; import { createTag, removeTag, updateTag } from "./tags"; const FILTER_CHANGED = 'FILTER_CHANGED' const SEARCH_CHANGED = 'SEARCH_CHANGED' const initialState = { categories: new Set(), authors: new Set(), tags: new Set(),<|fim▁hole|> search: null, snapshot: { categories: new Set(), authors: new Set(), tags: new Set(), search: null, } } export function reducer(state = initialState, action = {}) { switch (action.type) { case FILTER_CHANGED: const id = action.payload.value const field = action.payload.field const checked = action.payload.checked let newState if (checked) { const newSet = new Set(state[field]) newSet.add(id) newState = newSet } else { const newSet = new Set(state[field]) newSet.delete(id) newState = newSet } return { ...state, [field]: newState } case SEARCH_CHANGED: return { ...state, search: action.payload.value } case 'SET_FILTERS': return { ...state, ...action.payload } default: return state } } export function* handleFilterChange() { const filters = yield select((state) => state.filters) const routing = yield select((state) => state.routing) const params = { category: toArray(filters.categories), author: toArray(filters.authors), tags: toArray(filters.tags), search: filters.search, user__username: has(routing.match.params, 'username') ? routing.match.params.username : null } yield put(getQuotes(params)) } export function* saga() { yield takeEvery(FILTER_CHANGED, handleFilterChange) yield takeEvery(SEARCH_CHANGED, handleFilterChange) } export const changeFilter = (field, value, checked) => ({ type: FILTER_CHANGED, payload: {field, value, checked} }) export const changeSearch = (value) => ({ type: SEARCH_CHANGED, payload: {value} }) export const createFilter = (type, data) => { switch (type) { case 'authors': return createAuthor(data) case 'categories': return createCategory(data) case 'tags': return createTag(data) default: return null } } export const updateFilter = (type, data) => { switch (type) { case 'authors': return updateAuthor(data) case 'categories': return updateCategory(data) case 'tags': return updateTag(data) default: return null } } export const removeFilter = (type, data) => { switch (type) { case 'authors': return removeAuthor(data) case 'categories': return removeCategory(data) case 'tags': return removeTag(data) default: return null } }<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>"""Landlab component that simulates relative incidence shortwave radiation on sloped surface. Landlab component that computes 1D and 2D total incident shortwave radiation. This code also computes relative incidence shortwave radiation compared to a flat surface. Ref: Bras, Rafael L. Hydrology: an introduction to hydrologic science. Addison Wesley Publishing Company, 1990. .. codeauthor:: Sai Nudurupati & Erkan Istanbulluoglu Examples -------- >>> import numpy as np >>> from landlab import RasterModelGrid >>> from landlab.components import Radiation Create a grid on which to calculate incident shortwave radiation<|fim▁hole|>>>> grid = RasterModelGrid((5, 4), spacing=(0.2, 0.2)) The grid will need some input data. To check the names of the fields that provide the input to this component, use the *input_var_names* class property. >>> Radiation.input_var_names ('topographic__elevation',) Check the units for the fields. >>> Radiation.var_units('topographic__elevation') 'm' Create the input fields. >>> grid['node']['topographic__elevation'] = np.array([ ... 0., 0., 0., 0., ... 1., 1., 1., 1., ... 2., 2., 2., 2., ... 3., 4., 4., 3., ... 4., 4., 4., 4.]) If you are not sure about one of the input or output variables, you can get help for specific variables. >>> Radiation.var_help('topographic__elevation') name: topographic__elevation description: elevation of the ground surface relative to some datum units: m at: node intent: in Check the output variable names >>> sorted(Radiation.output_var_names) # doctest: +NORMALIZE_WHITESPACE ['radiation__incoming_shortwave_flux', 'radiation__net_shortwave_flux', 'radiation__ratio_to_flat_surface'] Instantiate the 'Radiation' component to work on this grid, and run it. >>> rad = Radiation(grid) Run the *update* method to update output variables with current time >>> current_time = 0.5 >>> rad.update(current_time) >>> rad.grid.at_cell['radiation__ratio_to_flat_surface'] array([ 0.38488566, 0.38488566, 0.33309785, 0.33309785, 0.37381705, 0.37381705]) >>> rad.grid.at_cell['radiation__incoming_shortwave_flux'] array([ 398.33664988, 398.33664988, 344.73895668, 344.73895668, 386.88120966, 386.88120966]) """ from .radiation import Radiation __all__ = ['Radiation', ]<|fim▁end|>
<|file_name|>carbon-components-tests.ts<|end_file_name|><|fim▁begin|>import { Accordion, Checkbox, CodeSnippet, ContentSwitcher, CopyButton, DataTable, DataTableV2, DatePicker, Dropdown, FileUploader, FloatingMenu, HeaderNav, HeaderSubmenu, InlineLoading, Loading, Modal, NavigationMenu, Notification, NumberInput, OverflowMenu, Pagination, PaginationNav, ProductSwitcher, ProgressIndicator, Search, SideNav, Slider, StructuredList, Tab, TextInput, Tile, Toolbar, Tooltip, TooltipSimple, settings, } from 'carbon-components'; <|fim▁hole|>accordion._handleKeypress({}); accordion._toggle(document.getElementById('root')!); Accordion.components; Accordion.options; const checkbox = new Checkbox(document.getElementById('root')!); checkbox._handleBlur(); checkbox._handleClick(); checkbox._handleFocus(); checkbox._indeterminateCheckbox(); checkbox._initCheckbox(); checkbox.setDisabled(true); checkbox.setState(false); settings.prefix; // $ExpectType string settings.selectorFocusable; // $ExpectType string settings.selectorTabbable; // $ExpectType string const tooltip = new Tooltip(document.getElementById('tooltip')!, { classShown: 'tooltip' }); tooltip.changeState('', {}, () => {}); const tableEl = document.getElementById('table')!; const datatable = new DataTableV2(tableEl, { selectorActionCancel: '' }); datatable.activateSearch(tableEl);<|fim▁end|>
const accordion = new Accordion(document.getElementById('root')!, { selectorAccordionContent: '' }); accordion._checkIfButton();
<|file_name|>ServletLocaleFilterTests.java<|end_file_name|><|fim▁begin|>package com.teamunify.i18n.com.teamunify.i18n.webapp; import com.teamunify.i18n.I; import com.teamunify.i18n.webapp.AbstractLocaleFilter; import org.junit.Before; import org.junit.Test; import static junit.framework.Assert.*; import static org.mockito.Mockito.*; import javax.servlet.*; import java.io.IOException; import java.util.Locale; public class ServletLocaleFilterTests { private static Locale computedLocale; private static Locale defaultLocale; Filter f = new AbstractLocaleFilter() { @Override public Locale getLocale(ServletRequest req) { return computedLocale; } @Override public Locale getDefaultLocale() { return defaultLocale; } }; private ServletRequest req; private FilterChain chain; private ServletResponse resp; @Before public void setup() { computedLocale = Locale.US; defaultLocale = Locale.US; req = mock(ServletRequest.class); resp = mock(ServletResponse.class); chain = mock(FilterChain.class); } @Test public void chains_the_filters() throws IOException, ServletException { f.doFilter(req, resp, chain); verify(chain).doFilter(eq(req), eq(resp)); }<|fim▁hole|> f.doFilter(req, resp, chain); assertEquals(I.getCurrentLanguage().locale, computedLocale); } @Test public void uses_the_filter_default_locale_if_compute_fails() throws IOException, ServletException { computedLocale = null; defaultLocale = Locale.KOREA; f.doFilter(req, resp, chain); assertEquals(I.getCurrentLanguage().locale, defaultLocale); } @Test public void uses_the_system_locale_if_no_computed_or_default_available() throws IOException, ServletException { computedLocale = null; defaultLocale = null; f.doFilter(req, resp, chain); assertEquals(I.getCurrentLanguage().locale, Locale.getDefault()); } }<|fim▁end|>
@Test public void uses_the_computed_locale_if_available() throws IOException, ServletException { computedLocale = Locale.CHINA;
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8; -*- # Copyright (C) 2015 - 2019 Lionel Ott # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import copy import logging import threading import time from xml.etree import ElementTree from PyQt5 import QtWidgets import gremlin import gremlin.ui.common import gremlin.ui.input_item class DoubleTapContainerWidget(gremlin.ui.input_item.AbstractContainerWidget): """DoubleTap container for actions for double or single taps.""" def __init__(self, profile_data, parent=None): """Creates a new instance. :param profile_data the profile data represented by this widget :param parent the parent of this widget """ super().__init__(profile_data, parent) def _create_action_ui(self): """Creates the UI components.""" self.profile_data.create_or_delete_virtual_button() self.options_layout = QtWidgets.QHBoxLayout() # Activation delay self.options_layout.addWidget( QtWidgets.QLabel("<b>Double-tap delay: </b>") ) self.delay_input = gremlin.ui.common.DynamicDoubleSpinBox() self.delay_input.setRange(0.1, 2.0) self.delay_input.setSingleStep(0.1) self.delay_input.setValue(0.5) self.delay_input.setValue(self.profile_data.delay) self.delay_input.valueChanged.connect(self._delay_changed_cb) self.options_layout.addWidget(self.delay_input) self.options_layout.addStretch() # Activation moment self.options_layout.addWidget(QtWidgets.QLabel("<b>Single/Double Tap: </b>")) self.activate_exclusive = QtWidgets.QRadioButton("exclusive") self.activate_combined = QtWidgets.QRadioButton("combined") if self.profile_data.activate_on == "combined": self.activate_combined.setChecked(True) else: self.activate_exclusive.setChecked(True) self.activate_combined.toggled.connect(self._activation_changed_cb) self.activate_exclusive.toggled.connect(self._activation_changed_cb) self.options_layout.addWidget(self.activate_exclusive) self.options_layout.addWidget(self.activate_combined) self.action_layout.addLayout(self.options_layout) if self.profile_data.action_sets[0] is None: self._add_action_selector( lambda x: self._add_action(0, x), "Single Tap" ) else: self._create_action_widget( 0, "Single Tap", self.action_layout, gremlin.ui.common.ContainerViewTypes.Action ) if self.profile_data.action_sets[1] is None: self._add_action_selector( lambda x: self._add_action(1, x), "Double Tap" ) else: self._create_action_widget( 1, "Double Tap", self.action_layout, gremlin.ui.common.ContainerViewTypes.Action ) def _create_condition_ui(self): if self.profile_data.activation_condition_type == "action": if self.profile_data.action_sets[0] is not None: self._create_action_widget( 0, "Single Tap", self.activation_condition_layout, gremlin.ui.common.ContainerViewTypes.Condition ) if self.profile_data.action_sets[1] is not None: self._create_action_widget( 1, "Double Tap", self.activation_condition_layout, gremlin.ui.common.ContainerViewTypes.Condition ) def _add_action_selector(self, add_action_cb, label): """Adds an action selection UI widget. :param add_action_cb function to call when an action is added :param label the description of the action selector """ action_selector = gremlin.ui.common.ActionSelector( self.profile_data.get_input_type() ) action_selector.action_added.connect(add_action_cb) group_layout = QtWidgets.QVBoxLayout() group_layout.addWidget(action_selector) group_layout.addStretch(1) group_box = QtWidgets.QGroupBox(label) group_box.setLayout(group_layout) self.action_layout.addWidget(group_box) def _create_action_widget(self, index, label, layout, view_type): """Creates a new action widget. :param index the index at which to store the created action :param label the name of the action to create """ widget = self._create_action_set_widget( self.profile_data.action_sets[index], label, view_type ) layout.addWidget(widget) widget.redraw() widget.model.data_changed.connect(self.container_modified.emit) def _add_action(self, index, action_name): """Adds a new action to the container. :param action_name the name of the action to add """ plugin_manager = gremlin.plugin_manager.ActionPlugins() action_item = plugin_manager.get_class(action_name)(self.profile_data) if self.profile_data.action_sets[index] is None: self.profile_data.action_sets[index] = [] self.profile_data.action_sets[index].append(action_item) self.profile_data.create_or_delete_virtual_button() self.container_modified.emit() def _delay_changed_cb(self, value): """Updates the activation delay value. :param value the value after which the double-tap action activates """ self.profile_data.delay = value def _activation_changed_cb(self, value): """Updates the activation condition state. :param value whether or not the selection was toggled - ignored """ if self.activate_combined.isChecked(): self.profile_data.activate_on = "combined" else: self.profile_data.activate_on = "exclusive" def _handle_interaction(self, widget, action): """Handles interaction icons being pressed on the individual actions. :param widget the action widget on which an action was invoked :param action the type of action being invoked """ index = self._get_widget_index(widget) if index != -1: if index == 0 and self.profile_data.action_sets[0] is None: index = 1 self.profile_data.action_sets[index] = None self.container_modified.emit() def _get_window_title(self): """Returns the title to use for this container. :return title to use for the container """ if self.profile_data.is_valid(): return "Double Tap: ({}) / ({})".format( ", ".join([a.name for a in self.profile_data.action_sets[0]]), ", ".join([a.name for a in self.profile_data.action_sets[1]]) ) else: return "DoubleTap" class DoubleTapContainerFunctor(gremlin.base_classes.AbstractFunctor): """Executes the contents of the associated DoubleTap container.""" def __init__(self, container): super().__init__(container) self.single_tap = gremlin.execution_graph.ActionSetExecutionGraph( container.action_sets[0] ) self.double_tap = gremlin.execution_graph.ActionSetExecutionGraph( container.action_sets[1] ) self.delay = container.delay self.activate_on = container.activate_on self.start_time = 0 self.double_action_timer = None self.tap_type = None self.value_press = None self.event_press = None def process_event(self, event, value): # TODO: Currently this does not handle hat or axis events, however # virtual buttons created on those inputs is supported if not isinstance(value.current, bool): logging.getLogger("system").warning( "Invalid data type received in DoubleTap container: {}".format( type(event.value) ) ) return False # Copy state when input is pressed if value.current: self.value_press = copy.deepcopy(value) self.event_press = event.clone() # Execute double tap logic if value.current: # Second activation within the delay, i.e. second tap if (self.start_time + self.delay) > time.time(): # Prevent repeated double taps from repeated button presses self.start_time = 0 self.tap_type = "double" if self.activate_on == "exclusive": self.double_action_timer.cancel() # First acitvation within the delay, i.e. first tap else: self.start_time = time.time() self.tap_type = "single" if self.activate_on == "exclusive": self.double_action_timer = \ threading.Timer(self.delay, self._single_tap) self.double_action_timer.start() # Input is being released at this point elif self.double_action_timer and self.double_action_timer.is_alive(): # if releasing single tap before delay # we will want to send a short press and release self.double_action_timer.cancel() self.double_action_timer = threading.Timer( (self.start_time + self.delay) - time.time(), lambda: self._single_tap(event, value) ) self.double_action_timer.start() if self.tap_type == "double": self.double_tap.process_event(event, value) if self.activate_on == "combined": self.single_tap.process_event(event, value) elif self.activate_on != "exclusive": self.single_tap.process_event(event, value) def _single_tap(self, event_release=None, value_release=None): """Callback executed, when the delay expires.""" self.single_tap.process_event(self.event_press, self.value_press) if event_release: time.sleep(0.05) self.single_tap.process_event(event_release, value_release) class DoubleTapContainer(gremlin.base_classes.AbstractContainer): """A container with two actions which are triggered based on the delay between the taps. A single tap will run the first action while a double tap will run the second action. """ name = "Double Tap" tag = "double_tap" functor = DoubleTapContainerFunctor widget = DoubleTapContainerWidget input_types = [ gremlin.common.InputType.JoystickAxis, gremlin.common.InputType.JoystickButton, gremlin.common.InputType.JoystickHat, gremlin.common.InputType.Keyboard ] interaction_types = [ gremlin.ui.input_item.ActionSetView.Interactions.Edit, ] def __init__(self, parent=None): """Creates a new instance. :param parent the InputItem this container is linked to """ super().__init__(parent)<|fim▁hole|> self.delay = 0.5 self.activate_on = "exclusive" def _parse_xml(self, node): """Populates the container with the XML node's contents. :param node the XML node with which to populate the container """ super()._parse_xml(node) self.delay = gremlin.profile.safe_read(node, "delay", float, 0.5) self.activate_on = \ gremlin.profile.safe_read(node, "activate-on", str, "combined") def _generate_xml(self): """Returns an XML node representing this container's data. :return XML node representing the data of this container """ node = ElementTree.Element("container") node.set("type", DoubleTapContainer.tag) node.set("delay", str(self.delay)) node.set("activate-on", self.activate_on) for actions in self.action_sets: as_node = ElementTree.Element("action-set") for action in actions: as_node.append(action.to_xml()) node.append(as_node) return node def _is_container_valid(self): """Returns whether or not this container is configured properly. :return True if the container is configured properly, False otherwise """ return any(len(action_set) for action_set in self.action_sets) # Plugin definitions version = 1 name = "double_tap" create = DoubleTapContainer<|fim▁end|>
self.action_sets = [[], []]
<|file_name|>COLT2015.py<|end_file_name|><|fim▁begin|># coding=utf-8 """ This file is used to make a crawl """ import __init__ import os import re import urllib from utility import prgbar def get_html(url): """Get the html """ page = urllib.urlopen(url) html = page.read() return html <|fim▁hole|>def get_pdf(html): """ xxx""" reg = r'href="(.+?\.pdf)">pdf' pdfre = re.compile(reg) pdflist = re.findall(pdfre, html) dir_name = 'COLT2015' maxrows = len(pdflist) pbar = prgbar.ProgressBar(total=maxrows) if os.path.exists(dir_name) is False: os.mkdir(dir_name) for idx, pdfurl in enumerate(pdflist): filename = dir_name + '/' + pdfurl pbar.log('http://jmlr.org/proceedings/papers/v40/' + pdfurl) if os.path.exists(filename) is True: pbar.log('Exist') else: urllib.urlretrieve( 'http://jmlr.org/proceedings/papers/v40/' + pdfurl, filename) pbar.update(index=(idx + 1)) pbar.finish() if __name__ == '__main__': HTML = get_html("http://jmlr.org/proceedings/papers/v40/") print(get_pdf(HTML))<|fim▁end|>
<|file_name|>PLSVM.py<|end_file_name|><|fim▁begin|>from __future__ import division __author__ = 'wenqihe' import sys import random<|fim▁hole|> def __init__(self, feature_size, label_size, type_hierarchy, lambda_reg=0.1, max_iter=5000, threshold=0.5, batch_size=100): self._feature_size = feature_size self._label_size = label_size self._type_hierarchy = type_hierarchy self._weight = [[0 for col in range(feature_size)] for row in range(label_size)] for i in xrange(label_size): for j in xrange(feature_size): self._weight[i][j] = random.uniform(0, 1) self._lambda_reg = lambda_reg self._max_iter = max_iter self._threshold = threshold self._batch_size = batch_size def fit(self, train_x, train_y): """ :param train_x: list of list :param train_y: list of list :return: """ m = len(train_y) batch = int(math.ceil(m/self._batch_size)) for t in xrange(1, self._max_iter): eta_t = 1.0/(self._lambda_reg*t) dW = [[0 for col in range(self._feature_size)] for row in range(self._label_size)] for j in xrange(self._batch_size): i = random.randint(0, m-1) x = train_x[i] y = train_y[i] ny = [k for k in range(self._label_size) if k not in y] yi = self.find_max(y, x) nyi = self.find_max(ny, x) for feature in x: self._weight[yi][feature] = self._weight[yi][feature]*(1-eta_t*self._lambda_reg) + eta_t self._weight[nyi][feature] = self._weight[nyi][feature]*(1-eta_t*self._lambda_reg) - eta_t # self.update_weight(dW, eta_t, 1) sys.stdout.write('{0} iteration done.\r'.format(t)) sys.stdout.flush() def predict(self, x): labels = set() parent_mapping = self._type_hierarchy._type_hierarchy scores = [] max_index = 0 max_value = self.inner_prod(self._weight[0], x) scores.append(max_value) for i in xrange(1, self._label_size): temp = self.inner_prod(self._weight[i], x) scores.append(temp) if temp>max_value: max_index = i max_value = temp # print scores labels.add(max_index) # Add parent of max_index if any temp = max_index while temp in parent_mapping: labels.add(parent_mapping[temp]) temp = parent_mapping[temp] # add child of max_index if meeting threshold temp = max_index while temp != -1: max_sub_index = -1 max_sub_score = -sys.maxint for child in parent_mapping: # check the maximum subtype if parent_mapping[child] == temp: if child < self._label_size: # print child if max_sub_score < scores[child]: max_sub_index = child max_sub_score = scores[child] if max_sub_index != -1 and max_sub_score > self._threshold: labels.add(max_sub_index) temp = max_sub_index return labels def find_max(self, Y, x): random.shuffle(Y) y = Y[0] max_value = self.inner_prod(self._weight[y], x) for i in xrange(1, len(Y)): temp = self.inner_prod(self._weight[Y[i]], x) if temp > max_value: y = Y[i] max_value = temp return y def update_weight(self, dW, eta_t, m): for i in xrange(self._label_size): # L2 = 0 for j in xrange(self._feature_size): self._weight[i][j] = self._weight[i][j]*(1-eta_t*self._lambda_reg) + eta_t*dW[i][j]/m # L2 += self._weight[i][j] * self._weight[i][j] # if L2>0: # factor = min(1, 1/(math.sqrt(self._lambda_reg)*math.sqrt(L2))) # if factor < 1: # for j in xrange(self._feature_size): # self._weight[i][j] *= factor @staticmethod def inner_prod(weight, x): result = 0 for feature in x: result += weight[feature] return result @staticmethod def kernel(x1, x2): i1 = 0 i2 = 0 result = 0 while i1<len(x1) and i2<len(x2): if x1[i1] == x2[i2]: result += 1 i1 += 1 i2 += 1 elif x1[i1] < x2[i2]: i1 += 1 else: i2 += 1 return result<|fim▁end|>
import math class PLSVM:
<|file_name|>test_manager.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. <|fim▁hole|>import mock from keystone import catalog from keystone.common import manager from keystone.tests import unit class TestCreateLegacyDriver(unit.BaseTestCase): @mock.patch('oslo_log.versionutils.report_deprecated_feature') def test_class_is_properly_deprecated(self, mock_reporter): Driver = manager.create_legacy_driver(catalog.CatalogDriverV8) # NOTE(dstanek): I want to subvert the requirement for this # class to implement all of the abstract methods. Driver.__abstractmethods__ = set() impl = Driver() details = { 'as_of': 'Liberty', 'what': 'keystone.catalog.core.Driver', 'in_favor_of': 'keystone.catalog.core.CatalogDriverV8', 'remove_in': mock.ANY, } mock_reporter.assert_called_with(mock.ANY, mock.ANY, details) self.assertEqual('N', mock_reporter.call_args[0][2]['remove_in'][0]) self.assertIsInstance(impl, catalog.CatalogDriverV8)<|fim▁end|>
<|file_name|>0013_auto_20180305_1339.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.10.4 on 2018-03-05 05:39 from __future__ import unicode_literals from django.db import migrations, models <|fim▁hole|> dependencies = [ ('sponsors', '0012_sponsor_level_smallint'), ] operations = [ migrations.AlterField( model_name='sponsor', name='conference', field=models.SlugField(choices=[('pycontw-2016', 'PyCon Taiwan 2016'), ('pycontw-2017', 'PyCon Taiwan 2017'), ('pycontw-2018', 'PyCon Taiwan 2018')], default='pycontw-2018', verbose_name='conference'), ), ]<|fim▁end|>
class Migration(migrations.Migration):
<|file_name|>configureUrlQuery-test.js<|end_file_name|><|fim▁begin|>import configureUrlQuery from '../configureUrlQuery';<|fim▁hole|>import urlQueryConfig from '../urlQueryConfig'; it('updates the singleton query object', () => { configureUrlQuery({ test: 99 }); expect(urlQueryConfig.test).toBe(99); configureUrlQuery({ history: 123 }); expect(urlQueryConfig.history).toBe(123); expect(urlQueryConfig.test).toBe(99); }); it('does not break on undefined options', () => { configureUrlQuery(); expect(Object.keys(urlQueryConfig).length).toBeGreaterThan(0); }); it('configures entrySeparator and keyValSeparator global values', () => { expect(urlQueryConfig.entrySeparator).toBe('_'); expect(urlQueryConfig.keyValSeparator).toBe('-'); configureUrlQuery({ entrySeparator: '__' }); expect(urlQueryConfig.entrySeparator).toBe('__'); expect(urlQueryConfig.keyValSeparator).toBe('-'); configureUrlQuery({ keyValSeparator: '--' }); expect(urlQueryConfig.entrySeparator).toBe('__'); expect(urlQueryConfig.keyValSeparator).toBe('--'); // Reset so it does not effect other tests configureUrlQuery({ entrySeparator: '_', keyValSeparator: '-' }); });<|fim▁end|>
<|file_name|>Model.hpp<|end_file_name|><|fim▁begin|>#pragma once #include <vector> #ifdef __APPLE__ #include <OpenGL/gl3.h> #else #include <GL/glew.h> #endif #include <FLIGHT/Core/BB.hpp> #include "Shader.hpp" #include "Vertex.hpp" #include <array> #include <fstream> #include <iostream> #include <memory> #include <sstream> namespace FLIGHT { class Model { public: struct Binding { size_t numVertices; explicit Binding(const size_t numVertices) { this->numVertices = numVertices; } Binding(const Binding &) = delete; Binding(Binding && other) { numVertices = other.numVertices; } ~Binding() { glBindBuffer(GL_ARRAY_BUFFER, 0); } }; static std::shared_ptr<Model> LoadFromWavefront(const std::string & path); virtual Binding Bind(ShaderProgram & shader) = 0; virtual ~Model(); const AABB & GetAABB() const; protected: Model(const AABB & aabb); GLuint m_vbo; size_t m_vbLen; AABB m_aabb; }; class ModelP : public Model { friend class Model; static std::shared_ptr<Model> LoadFromWavefront(std::fstream &); public: ModelP(const std::vector<VertexP> & data, const AABB & aabb); Binding Bind(ShaderProgram & shader) override; }; class ModelPT : public Model { friend class Model;<|fim▁hole|>public: ModelPT(const std::vector<VertexPT> & data, const AABB & aabb); Binding Bind(ShaderProgram & shader) override; }; class ModelPN : public Model { friend class Model; static std::shared_ptr<Model> LoadFromWavefront(std::fstream &); public: ModelPN(const std::vector<VertexPN> & data, const AABB & aabb); Binding Bind(ShaderProgram & shader) override; }; class ModelPTN : public Model { friend class Model; static std::shared_ptr<Model> LoadFromWavefront(std::fstream &); public: ModelPTN(const std::vector<VertexPTN> & data, const AABB & aabb); Binding Bind(ShaderProgram & shader) override; }; }<|fim▁end|>
static std::shared_ptr<Model> LoadFromWavefront(std::fstream &);
<|file_name|>passphrase_recover.go<|end_file_name|><|fim▁begin|>// Copyright 2019 Keybase, Inc. All rights reserved. Use of // this source code is governed by the included BSD license. package engine import ( "fmt" "sort" "github.com/keybase/client/go/kbun" "github.com/keybase/client/go/libkb" keybase1 "github.com/keybase/client/go/protocol/keybase1" ) // PassphraseRecover is an engine that implements the "password recovery" flow, // where the user is shown instructions on how to either change their password // on other devices or allows them to change the password using a paper key. type PassphraseRecover struct { arg keybase1.RecoverPassphraseArg libkb.Contextified usernameFound bool } func NewPassphraseRecover(g *libkb.GlobalContext, arg keybase1.RecoverPassphraseArg) *PassphraseRecover { return &PassphraseRecover{ arg: arg, Contextified: libkb.NewContextified(g), } } // Name provides the name of the engine for the engine interface func (e *PassphraseRecover) Name() string { return "PassphraseRecover" } // Prereqs returns engine prereqs func (e *PassphraseRecover) Prereqs() Prereqs { return Prereqs{} } // RequiredUIs returns the required UIs. func (e *PassphraseRecover) RequiredUIs() []libkb.UIKind { return []libkb.UIKind{ libkb.LoginUIKind, libkb.SecretUIKind, } } // SubConsumers requires the other UI consumers of this engine func (e *PassphraseRecover) SubConsumers() []libkb.UIConsumer { return []libkb.UIConsumer{ &LoginWithPaperKey{}, } } // Run the engine func (e *PassphraseRecover) Run(mctx libkb.MetaContext) (err error) { defer mctx.Trace("PassphraseRecover#Run", &err)() // If no username was passed, ask for one if e.arg.Username == "" { res, err := mctx.UIs().LoginUI.GetEmailOrUsername(mctx.Ctx(), 0) if err != nil { return err } e.arg.Username = res } // Look up the passed username against the list of configured users if err := e.processUsername(mctx); err != nil { return err } // In the new flow we noop if we're already logged in if loggedIn, _ := isLoggedIn(mctx); loggedIn { mctx.Warning("Already logged in with unlocked device keys") return libkb.LoggedInError{} } mctx.Debug("No device keys available, proceeding with recovery") // Load the user by username ueng := newLoginLoadUser(mctx.G(), e.arg.Username) if err := RunEngine2(mctx, ueng); err != nil { return err } // Now we're taking that user info and evaluating our options ckf := ueng.User().GetComputedKeyFamily() if ckf == nil { return libkb.NewNotFoundError("Account missing key family") } // HasActiveKey rather than HasActiveDevice to handle PGP cases if !ckf.HasActiveKey() { // Go directly to password reset return e.resetPassword(mctx) } if !ckf.HasActiveDevice() { // No point in asking for device selection return e.suggestReset(mctx) } return e.chooseDevice(mctx, ckf) } func (e *PassphraseRecover) processUsername(mctx libkb.MetaContext) error { // Fetch usernames from user configs currentUsername, otherUsernames, err := mctx.G().GetAllUserNames() if err != nil { return err } usernamesMap := map[libkb.NormalizedUsername]struct{}{ currentUsername: {}, } for _, username := range otherUsernames { usernamesMap[username] = struct{}{} } var normalized kbun.NormalizedUsername if e.arg.Username != "" { normalized = libkb.NewNormalizedUsername(e.arg.Username) } else { normalized = currentUsername } e.arg.Username = normalized.String() // Check if the passed username is in the map _, ok := usernamesMap[normalized] e.usernameFound = ok return nil } func (e *PassphraseRecover) chooseDevice(mctx libkb.MetaContext, ckf *libkb.ComputedKeyFamily) (err error) { defer mctx.Trace("PassphraseRecover#chooseDevice", &err)() // Reorder the devices for the list devices := partitionDeviceList(ckf.GetAllActiveDevices()) sort.Sort(devices) // Choose an existing device expDevices := make([]keybase1.Device, 0, len(devices)) idMap := make(map[keybase1.DeviceID]libkb.DeviceWithDeviceNumber) for _, d := range devices { // Don't show paper keys if the user has not provisioned on this device if !e.usernameFound && d.Type == keybase1.DeviceTypeV2_PAPER { continue } expDevices = append(expDevices, *d.ProtExportWithDeviceNum()) idMap[d.ID] = d } id, err := mctx.UIs().LoginUI.ChooseDeviceToRecoverWith(mctx.Ctx(), keybase1.ChooseDeviceToRecoverWithArg{ Devices: expDevices, }) if err != nil { return err } // No device chosen, we're going into the reset flow if len(id) == 0 { // Go directly to reset return e.suggestReset(mctx) } mctx.Debug("user selected device %s", id) selected, ok := idMap[id] if !ok { return fmt.Errorf("selected device %s not in local device map", id) } mctx.Debug("device details: %+v", selected) // Roughly the same flow as in provisioning switch selected.Type { case keybase1.DeviceTypeV2_PAPER: return e.loginWithPaperKey(mctx) case keybase1.DeviceTypeV2_DESKTOP, keybase1.DeviceTypeV2_MOBILE: return e.explainChange(mctx, selected) default: return fmt.Errorf("unknown device type: %v", selected.Type) } } func (e *PassphraseRecover) resetPassword(mctx libkb.MetaContext) (err error) { enterReset, err := mctx.UIs().LoginUI.PromptResetAccount(mctx.Ctx(), keybase1.PromptResetAccountArg{ Prompt: keybase1.NewResetPromptDefault(keybase1.ResetPromptType_ENTER_RESET_PW), }) if err != nil { return err } if enterReset != keybase1.ResetPromptResponse_CONFIRM_RESET { // Flow cancelled return nil } // User wants a reset password email res, err := mctx.G().API.Post(mctx, libkb.APIArg{<|fim▁hole|> Args: libkb.HTTPArgs{ "email_or_username": libkb.S{Val: e.arg.Username}, }, AppStatusCodes: []int{libkb.SCOk, libkb.SCBadLoginUserNotFound}, }) if err != nil { return err } if res.AppStatus.Code == libkb.SCBadLoginUserNotFound { return libkb.NotFoundError{Msg: "User not found"} } // done if err := mctx.UIs().LoginUI.DisplayResetMessage(mctx.Ctx(), keybase1.DisplayResetMessageArg{ Kind: keybase1.ResetMessage_RESET_LINK_SENT, }); err != nil { return err } return nil } func (e *PassphraseRecover) suggestReset(mctx libkb.MetaContext) (err error) { enterReset, err := mctx.UIs().LoginUI.PromptResetAccount(mctx.Ctx(), keybase1.PromptResetAccountArg{ Prompt: keybase1.NewResetPromptDefault(keybase1.ResetPromptType_ENTER_FORGOT_PW), }) if err != nil { return err } if enterReset != keybase1.ResetPromptResponse_CONFIRM_RESET { // Cancel the engine as the user elected not to reset their account return nil } // We are certain the user will not know their password, so we can disable that prompt. eng := NewAccountReset(mctx.G(), e.arg.Username) eng.skipPasswordPrompt = true if err := eng.Run(mctx); err != nil { return err } // We're ignoring eng.ResetPending() as we've disabled reset completion return nil } func (e *PassphraseRecover) loginWithPaperKey(mctx libkb.MetaContext) (err error) { // First log in using the paper key loginEng := NewLoginWithPaperKey(mctx.G(), e.arg.Username) if err := RunEngine2(mctx, loginEng); err != nil { return err } if err := e.changePassword(mctx); err != nil { // Log out before returning if err2 := RunEngine2(mctx, NewLogout(libkb.LogoutOptions{KeepSecrets: false, Force: true})); err2 != nil { mctx.Warning("Unable to log out after password change failed: %v", err2) } return err } mctx.Debug("PassphraseRecover with paper key success, sending login notification") mctx.G().NotifyRouter.HandleLogin(mctx.Ctx(), e.arg.Username) mctx.Debug("PassphraseRecover with paper key success, calling login hooks") mctx.G().CallLoginHooks(mctx) return nil } func (e *PassphraseRecover) changePassword(mctx libkb.MetaContext) (err error) { // Once logged in, check if there are any server keys hskEng := NewHasServerKeys(mctx.G()) if err := RunEngine2(mctx, hskEng); err != nil { return err } if hskEng.GetResult().HasServerKeys { // Prompt the user explaining that they'll lose server keys proceed, err := mctx.UIs().LoginUI.PromptPassphraseRecovery(mctx.Ctx(), keybase1.PromptPassphraseRecoveryArg{ Kind: keybase1.PassphraseRecoveryPromptType_ENCRYPTED_PGP_KEYS, }) if err != nil { return err } if !proceed { return libkb.NewCanceledError("Password recovery canceled") } } // We either have no server keys or the user is OK with resetting them // Prompt the user for a new passphrase. passphrase, err := e.promptPassphrase(mctx) if err != nil { return err } // ppres.Passphrase contains our new password // Run passphrase change to finish the flow changeEng := NewPassphraseChange(mctx.G(), &keybase1.PassphraseChangeArg{ Passphrase: passphrase, Force: true, }) if err := RunEngine2(mctx, changeEng); err != nil { return err } // We have a new passphrase! return nil } func (e *PassphraseRecover) explainChange(mctx libkb.MetaContext, device libkb.DeviceWithDeviceNumber) (err error) { var name string if device.Description != nil { name = *device.Description } // The actual contents of the shown prompt will depend on the UI impl return mctx.UIs().LoginUI.ExplainDeviceRecovery(mctx.Ctx(), keybase1.ExplainDeviceRecoveryArg{ Name: name, Kind: device.Type.ToDeviceType(), }) } func (e *PassphraseRecover) promptPassphrase(mctx libkb.MetaContext) (string, error) { arg := libkb.DefaultPassphraseArg(mctx) arg.WindowTitle = "Pick a new passphrase" arg.Prompt = fmt.Sprintf("Pick a new strong passphrase (%d+ characters)", libkb.MinPassphraseLength) arg.Type = keybase1.PassphraseType_VERIFY_PASS_PHRASE ppres, err := libkb.GetKeybasePassphrase(mctx, mctx.UIs().SecretUI, arg) if err != nil { return "", err } return ppres.Passphrase, nil }<|fim▁end|>
Endpoint: "send-reset-pw", SessionType: libkb.APISessionTypeNONE,
<|file_name|>LogFileAccessManager.py<|end_file_name|><|fim▁begin|>#! /usr/bin/env python """ ############################################################################################# #<|fim▁hole|># Name: LogFileAccessManager.py # # @author: Nicholas Lemay # # @license: MetPX Copyright (C) 2004-2006 Environment Canada # MetPX comes with ABSOLUTELY NO WARRANTY; For details type see the file # named COPYING in the root of the source directory tree. # # Description : Utility class used to manage the access to the log files by the # the pickle updater. # # Note : If this file is to be modified, please run the main() method at the bottom of this # file to make sure everything still works properly. Feel free to add tests if needed. # # While using this class, you can either use only one file with all your entries # and give a different identifier to all of you entries, or you can use different # files. # # Using a single file however can be problematic if numerous process try to update # the file at the same time. # ############################################################################################# """ import os, sys, commands, time sys.path.insert(1, os.path.dirname( os.path.abspath(__file__) ) + '/../../') from pxStats.lib.StatsPaths import StatsPaths from pxStats.lib.CpickleWrapper import CpickleWrapper class LogFileAccessManager(object): def __init__( self, accessDictionary = None, accessFile = "" ): """ @summary: LogFileAccessManager constructor. @param accessArrays: @param accessFile: """ paths = StatsPaths() paths.setPaths() if accessFile =="": accessFile = paths.STATSLOGACCESS + "default" self.accessDictionary = accessDictionary or {} # Empty array to start with. self.accessFile = accessFile #File that contains the current file acces. if self.accessDictionary == {} and os.path.isfile( self.accessFile ): self.loadAccessFile() def saveAccessDictionary( self ): """ @summary: Saves the current accessDictionary into the accessfile. """ if not os.path.isdir( os.path.dirname( self.accessFile ) ): os.makedirs( os.path.dirname( self.accessFile ) ) CpickleWrapper.save( self.accessDictionary, self.accessFile ) def loadAccessFile(self): """ @summary: Loads the accessFile into the accessDictionary. """ self.accessDictionary = CpickleWrapper.load( self.accessFile ) def getLineAssociatedWith( self, identifier ): """ @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName @return: returns the first line of the last file accessed by the identifier. If identifier has no associated line, the returned line will be "". """ line = "" try:#In case the key does not exist. line = self.accessDictionary[ identifier ][0] except:#Pass keyerror pass return line def getLastReadPositionAssociatedWith(self, identifier): """ @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName @return: returns the last read position of the last file accessed by the identifier. If no position is associated with identifier will return 0. """ lastReadPositon = 0 try:#In case the key does not exist. lastReadPositon = self.accessDictionary[ identifier ][1] except:#Pass keyerror pass return lastReadPositon def getFirstLineFromFile(self, fileName): """ @summary: Reads the first line of a file and returns it. @param fileName: File from wich you want to know @return: The first line of the specified file. """ firstLine = "" if os.path.isfile( fileName ): fileHandle = open( fileName, "r") firstLine = fileHandle.readline() fileHandle.close() return firstLine def getFirstLineAndLastReadPositionAssociatedwith(self, identifier): """ @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName @return : A tuple containing the first line of the last file read(in string format) and the last read position (int format). """ line = "" lastReadPositon = 0 try:#In case the key does not exist. line ,lastReadPositon = self.accessDictionary[ identifier ] except:#Pass keyerror pass return line, lastReadPositon def setFirstLineAssociatedwith(self, firstLine, identifier ): """ @summary: Simple setter that hides data structure implementation so that methods still work if implementation is ever to change. @param firstLine: First line to set. @param identifier:Identifier string of the following format: fileType_client/sourcename_machineName """ currentLastReadPosition = self.getLastReadPositionAssociatedWith(identifier) self.accessDictionary[ identifier ] = firstLine, currentLastReadPosition def setLastReadPositionAssociatedwith(self, lastReadPosition, identifier ): """ @summary: Simple setter that hides data structure implementation so that methods still work if implementation is ever to change. @param lastReadPosition: Position to set. @param identifier:Identifier string of the following format: fileType_client/sourcename_machineName """ currentFirstLine = self.getLineAssociatedWith(identifier) self.accessDictionary[ identifier ] = currentFirstLine, lastReadPosition def setFirstLineAndLastReadPositionAssociatedwith(self, firstLine, lastReadPosition, identifier ): """ @summary: Simple setter that hides data structure implementation so that methods still work if implementation is ever to change. @param firstLine: First line to set. @param lastReadPosition: Position to set. @param identifier:Identifier string of the following format: fileType_client/sourcename_machineName """ self.accessDictionary[ identifier ] = (firstLine, lastReadPosition) def isTheLastFileThatWasReadByThisIdentifier(self, fileName, identifier ): """ @summary : Returns whether or not(True or False ) the specified file was the last one read by the identifier. @param fileName: Name fo the file to be verified. @param identifier: Identifier string of the following format: fileType_client/sourcename_machineName @return: Returns whether or not(True or False ) the specified file was the last one read by the identifier. """ lastFileThatWasRead = False if os.path.isfile(fileName): lastLineRead = self.getLineAssociatedWith(identifier) filehandle = open( fileName, "r") firstLineOfTheFile = filehandle.readline() if lastLineRead == firstLineOfTheFile: lastFileThatWasRead = True filehandle.close() return lastFileThatWasRead def main(): """ @summary: Small test case to see if everything works out well. @note: IMPORTANT if you modifiy this file, run this method to make sure it still passes all the tests. If test are no longer valid, please modify accordingly. """ from LogFileAccessManager import LogFileAccessManager paths = StatsPaths() paths.setPaths() # # Create text file for testing. # testDirectory = paths.STATSDATA + "logFileAccessTestFolder/" if not os.path.isdir( testDirectory ) : os.makedirs(testDirectory) testTextfile = testDirectory + "testTextfile" fileHandle = open( testTextfile , 'w' ) old_stdout = sys.stdout #redirect standard output to the file sys.stdout = fileHandle for i in range(100): print "%s-A line written for testing." %i fileHandle.close() sys.stdout = old_stdout #resets standard output # #Read file like normal file and stop in the middle. # fileHandle = open( testTextfile , 'r' ) for i in range(50): fileHandle.readline() lastReadPosition = fileHandle.tell() fileHandle.close() # # Set LogFileAccessManager with the previous infos. # testFile = testDirectory + "testLFAMfile" lfam = LogFileAccessManager( accessFile = testFile ) firstLine = lfam.getFirstLineFromFile( testTextfile ) lfam.setFirstLineAndLastReadPositionAssociatedwith( firstLine, lastReadPosition, "testId" ) # # Unit-like test every method to make sure the result is what is expected. # Section for getters. # if firstLine != "0-A line written for testing.\n": print "getFirstLineFromFile is corrupted. Please repair " if lfam.getFirstLineAndLastReadPositionAssociatedwith("testId") != ("0-A line written for testing.\n",1540 ): print "getFirstLineAndLastReadPositionAssociatedwith is corrupted. Please repair." if lfam.getLastReadPositionAssociatedWith( "testId" ) != 1540: print "getLastReadPositionAssociatedWith is corrupted. Please repair." # # Section for testing Setters # lfam.setFirstLineAssociatedwith("firstLine", 'testId') if lfam.getLineAssociatedWith('testId') != 'firstLine': print "setFirstLineAssociatedwith is corrupted. Please repair." lfam.setLastReadPositionAssociatedwith( 18987, 'testId') if lfam.getLastReadPositionAssociatedWith('testId') != 18987: print "setLastReadPositionAssociatedwith is corrupted. Please repair." lfam.setFirstLineAndLastReadPositionAssociatedwith("testline2", 1285647, 'testId') if lfam.getFirstLineAndLastReadPositionAssociatedwith('testId') != ("testline2", 1285647): print "setFirstLineAndLastReadPositionAssociatedwith is corrupted. Please repair." lfam.saveAccessDictionary() lfam.loadAccessFile() if lfam.getFirstLineAndLastReadPositionAssociatedwith('testId') != ("testline2", 1285647): print "saveAccessDictionary and/or loadAccessFile is corrupted. Please repair." print "Testing done." if __name__ == '__main__': main()<|fim▁end|>
#
<|file_name|>test_leave_period.py<|end_file_name|><|fim▁begin|># Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and Contributors # See license.txt import unittest import frappe import erpnext test_dependencies = ["Employee", "Leave Type", "Leave Policy"] class TestLeavePeriod(unittest.TestCase): pass def create_leave_period(from_date, to_date, company=None): leave_period = frappe.db.get_value('Leave Period', dict(company=company or erpnext.get_default_company(), from_date=from_date, to_date=to_date, is_active=1), 'name') if leave_period: return frappe.get_doc("Leave Period", leave_period)<|fim▁hole|> "company": company or erpnext.get_default_company(), "from_date": from_date, "to_date": to_date, "is_active": 1 }).insert() return leave_period<|fim▁end|>
leave_period = frappe.get_doc({ "doctype": "Leave Period",
<|file_name|>selectssperspective.js<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2009 * Jan-Felix Schwarz * * Permission is hereby granted, free of charge, to any person obtaining a * copy of this software and associated documentation files (the "Software"), * to deal in the Software without restriction, including without limitation * the rights to use, copy, modify, merge, publish, distribute, sublicense, * and/or sell copies of the Software, and to permit persons to whom the * Software is furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER * DEALINGS IN THE SOFTWARE. **/ if (!ORYX.Plugins) { ORYX.Plugins = new Object(); } ORYX.Plugins.SelectStencilSetPerspective = { facade: undefined, extensions : undefined, perspectives: undefined, construct: function(facade) { this.facade = facade; var panel = new Ext.Panel({ cls:'selectssperspective', border: false, autoWidth:true, autoScroll:true }); var region = this.facade.addToRegion("west", panel); var jsonObject = this.facade.getStencilSetExtensionDefinition(); /* Determine available extensions */ this.extensions = {}; jsonObject.extensions.each(function(ext) { this.extensions[ext.namespace] = ext; }.bind(this)); /* Determine available extensions */ this.perspectives = {}; jsonObject.perspectives.each(function(per) { this.perspectives[per.namespace] = per; }.bind(this)); this.facade.getStencilSets().values().each((function(sset) { var validPerspectives = jsonObject.perspectives.findAll(function(perspective){ if(perspective.stencilset == sset.namespace()) return true; else return false; }); // If one perspective is defined, load this if (validPerspectives.size() === 1) { this.loadPerspective(validPerspectives.first().namespace); // If more than one perspective is defined, add a combobox and load the first one } else if (validPerspectives.size() > 1) { this.createPerspectivesCombobox(panel, sset, validPerspectives); } }).bind(this)); }, createPerspectivesCombobox: function(panel, stencilset, perspectives) { var lang = ORYX.I18N.Language.split("_").first(); var data = []; perspectives.each(function(perspective) { data.push([perspective.namespace, (perspective["title_"+lang]||perspective.title).unescapeHTML(), perspective["description_"+lang]||perspective.description]); }); var store = new Ext.data.SimpleStore({ fields: ['namespace', 'title', 'tooltip'], data: data }); var combobox = new Ext.form.ComboBox({ store : store, displayField : 'title', valueField : 'namespace', forceSelection : true, typeAhead : true, mode : 'local', allowBlank : false, autoWidth : true, triggerAction : 'all', emptyText : 'Select a perspective...', selectOnFocus : true, tpl : '<tpl for="."><div ext:qtip="{tooltip}" class="x-combo-list-item">{[(values.title||"").escapeHTML()]}</div></tpl>' }); //panel.on("resize", function(){combobox.setWidth(panel.body.getWidth())}); panel.add(combobox); panel.doLayout(); combobox.on('beforeselect', this.onSelect ,this) this.facade.registerOnEvent(ORYX.CONFIG.EVENT_LOADED, function(){ this.facade.getStencilSets().values().each(function(stencilset) { var ext = stencilset.extensions().values() if (ext.length > 0){ var persp = perspectives.find(function(perspective){ return (perspective.extensions && perspective.extensions.include(ext[0].namespace)) || // Check if there is the extension part of the extension in the perspectives (perspective.addExtensions && perspective.addExtensions.any(function(add){ return // OR Check if the namespace if part of the addExtension part (add.ifIsLoaded === stencilset.namespace() && add.add == ext[0].namespace) || (add.ifIsLoaded !== stencilset.namespace() && add["default"] === ext[0].namespace) // OR is some in the default }) ) }) if (!persp){ persp = perspectives.find(function(r){ return !(r.extensions instanceof Array) || r.extensions.length <= 0 }) } if (persp) { combobox.setValue(data[perspectives.indexOf(persp)][1]); throw $break; } } // Force to load extension combobox.setValue(data[0][1]); this.loadPerspective(data[0][0]); }.bind(this)); }.bind(this)) }, onSelect: function(combobox, record) { if (combobox.getValue() === record.get("namespace") || combobox.getValue() === record.get("title")){ return; } this.loadPerspective(record.json[0]); }, loadPerspective: function(ns){ // If there is no namespace if (!ns){ // unload all extensions this._loadExtensions([], [], true); return; } /* Get loaded stencil set extensions */ var stencilSets = this.facade.getStencilSets(); var loadedExtensions = new Object(); var perspective = this.perspectives[ns]; stencilSets.values().each(function(ss) { ss.extensions().values().each(function(extension) { if(this.extensions[extension.namespace]) loadedExtensions[extension.namespace] = extension; }.bind(this)); }.bind(this)); /* Determine extensions that are required for this perspective */ var addExtensions = new Array(); if(perspective.addExtensions||perspective.extensions) { [] .concat(this.perspectives[ns].addExtensions||[]) .concat(this.perspectives[ns].extensions||[]) .compact() .each(function(ext){ if(!ext.ifIsLoaded) { addExtensions.push(this.extensions[ext]); return; } if(loadedExtensions[ext.ifIsLoaded] && this.extensions[ext.add]) { addExtensions.push(this.extensions[ext.add]); } else { if(ext["default"] && this.extensions[ext["default"]]) { addExtensions.push(this.extensions[ext["default"]]); } } }.bind(this)); } /* Determine extension that are not allowed in this perspective */ /* Check if flag to remove all other extension is set */ if(this.perspectives[ns].removeAllExtensions) { window.setTimeout(function(){ this._loadExtensions(addExtensions, undefined, true); }.bind(this), 10); return; } /* Check on specific extensions */ var removeExtensions = new Array(); if(perspective.removeExtensions) { perspective.removeExtensions.each(function(ns){ if (loadedExtensions[ns]) removeExtensions.push(this.extensions[ns]); }.bind(this)); } if (perspective.extensions && !perspective.addExtensions && !perspective.removeExtensions) { var combined = [].concat(addExtensions).concat(removeExtensions).compact(); $H(loadedExtensions).each(function(extension){ var key = extension.key; if (!extension.value.includeAlways&&!combined.any(function(r){ return r.namespace == key })) { removeExtensions.push(this.extensions[key]); } }.bind(this)) } window.setTimeout(function(){ this._loadExtensions(addExtensions, removeExtensions, false); }.bind(this), 10); }, /* * Load all stencil set extensions specified in param extensions (key map: String -> Object) * Unload all other extensions (method copied from addssextension plugin) */ _loadExtensions: function(addExtensions, removeExtensions, removeAll) { var stencilsets = this.facade.getStencilSets(); var atLeastOne = false; // unload unselected extensions stencilsets.values().each(function(stencilset) { var unselected = stencilset.extensions().values().select(function(ext) { return addExtensions[ext.namespace] == undefined }); if(removeAll) { unselected.each(function(ext) { stencilset.removeExtension(ext.namespace); atLeastOne = true; }); } else { unselected.each(function(ext) { var remove = removeExtensions.find(function(remExt) { return ext.namespace === remExt.namespace; }); if(remove) { stencilset.removeExtension(ext.namespace); atLeastOne = true; } }); } }); <|fim▁hole|> // load selected extensions addExtensions.each(function(extension) { var stencilset = stencilsets[extension["extends"]]; if(stencilset) { // Load absolute if ((extension.definition || "").startsWith("/")) { stencilset.addExtension(extension.definition); // Load relative } else { stencilset.addExtension(ORYX.CONFIG.SS_EXTENSIONS_FOLDER + extension.definition); } atLeastOne = true; } }.bind(this)); if (atLeastOne) { stencilsets.values().each(function(stencilset) { this.facade.getRules().initializeRules(stencilset); }.bind(this)); this.facade.raiseEvent({ type: ORYX.CONFIG.EVENT_STENCIL_SET_LOADED }); var selection = this.facade.getSelection(); this.facade.setSelection(); this.facade.setSelection(selection); } } } ORYX.Plugins.SelectStencilSetPerspective = Clazz.extend(ORYX.Plugins.SelectStencilSetPerspective);<|fim▁end|>
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>registry = {} def register(model, fields, order='pk', filter=False, results=5): registry[str(model)] = (model, fields, results, order, filter)<|fim▁hole|>class LoopBreak(Exception): pass def search_for_string(search_string): search_string = search_string.lower() matches = [] for key in registry: model, fields, results, order, filter_by = registry[key] # partial application didn't seem sane in python ... so: if filter_by: if callable(filter_by): filter_by = filter_by() objects = model.objects.filter(filter_by) else: objects = model.objects.all() counter = 0 try: for object in objects.order_by(order): for field in fields: try: searchee = getattr(object, field) except AttributeError: pass if callable(searchee): searchee = searchee() if search_string in searchee.lower(): matches.append(object) counter += 1 if counter >= results: raise LoopBreak() except LoopBreak: pass return matches<|fim▁end|>